lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
mpl-2.0
d37fcd49c9ce7e9f148fa51edef3b0ca38c71710
0
esarbanis/strabon,esarbanis/strabon,esarbanis/strabon,esarbanis/strabon,esarbanis/strabon
/** * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Copyright (C) 2010, 2011, 2012, Pyravlos Team * * http://www.strabon.di.uoa.gr/ */ package org.openrdf.query.resultio.sparqlhtml; import java.io.IOException; import java.io.OutputStream; import java.net.URLEncoder; import java.util.List; import org.openrdf.model.BNode; import org.openrdf.query.Binding; import org.openrdf.query.BindingSet; import org.openrdf.query.TupleQueryResultHandlerException; import org.openrdf.query.resultio.TupleQueryResultFormat; import org.openrdf.query.resultio.TupleQueryResultWriter; import org.openrdf.query.resultio.stSPARQLQueryResultFormat; import org.openrdf.query.resultio.sparqlxml.stSPARQLXMLWriter; import org.openrdf.model.Value; import org.openrdf.model.URI; /** * @author Charalampos Nikolaou <charnik@di.uoa.gr> * */ public class stSPARQLResultsHTMLWriter implements TupleQueryResultWriter { public static final String TABLE = "TABLE"; public static final String TABLE_ROW_TAG = "TR"; public static final String TABLE_HEADER_TAG = "TH"; public static final String TABLE_DATA_TAG = "TD"; public static final String LINK = "A"; public static final String LINK_REF = "HREF"; public static final String STYLE = "class"; public static final String ID = "id"; public static final String LINK_ID = "uri"; public static final String TABLE_HEADER_CLASS = "query_results_header"; public static final String TABLE_DATA_CLASS = "query_results_data"; public static final String TABLE_CLASS = "query_results_table"; public static final String MORE_LINK = "comment more"; /** * The underlying XML formatter. */ private stSPARQLXMLWriter xmlWriter; /** * The ordered list of binding names of the result. */ private List<String> bindingNames; public stSPARQLResultsHTMLWriter(OutputStream out) { this(new stSPARQLXMLWriter(out)); } public stSPARQLResultsHTMLWriter(stSPARQLXMLWriter writer) { xmlWriter = writer; xmlWriter.setPrettyPrint(true); } @Override public void startQueryResult(List<String> bindingNames) throws TupleQueryResultHandlerException { try { // keep the order of binding names this.bindingNames = bindingNames; // set style for table xmlWriter.setAttribute(STYLE, TABLE_CLASS); // write start of table xmlWriter.startTag(TABLE); // write Table header containing the bindings xmlWriter.startTag(TABLE_ROW_TAG); for (String bindingName: bindingNames) { // set style for header xmlWriter.setAttribute(STYLE, TABLE_HEADER_CLASS); xmlWriter.textElement(TABLE_HEADER_TAG, bindingName); } xmlWriter.endTag(TABLE_ROW_TAG); } catch (IOException e) { throw new TupleQueryResultHandlerException(e); } } @Override public void endQueryResult() throws TupleQueryResultHandlerException { try { // write end of table xmlWriter.endTag(TABLE); // needed to flush data xmlWriter.endDocument(); } catch (IOException e) { throw new TupleQueryResultHandlerException(e); } } @Override public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException { try { StringBuilder value = new StringBuilder(); Value boundValue = null; xmlWriter.startTag(TABLE_ROW_TAG); for (String bindingName : bindingNames) { Binding binding = bindingSet.getBinding(bindingName); if(binding != null) { boundValue = binding.getValue(); value.append(boundValue.stringValue()); if(boundValue instanceof BNode) { value.insert(0, "_:"); } // If the value is a uri, make it link if(boundValue instanceof URI) { xmlWriter.setAttribute(STYLE, TABLE_DATA_CLASS); xmlWriter.startTag(TABLE_DATA_TAG); // select all the triples that contain the boundValue String query= "select * " + "where " + "{ " + "?subject ?predicate ?object . "+ "FILTER((?subject = <"+ boundValue.toString()+ ">) || "+ "(?predicate = <"+ boundValue.toString()+ ">) || "+ "(?object = <"+ boundValue.toString()+ ">)) " + "}"; String href = "Browse?view=HTML&query="+URLEncoder.encode(query, "UTF-8")+"&format=HTML&resource="+boundValue.toString(); xmlWriter.setAttribute(LINK_REF, href); xmlWriter.startTag(LINK); xmlWriter.text(boundValue.toString()); xmlWriter.endTag(LINK); } else { xmlWriter.setAttribute(STYLE, TABLE_DATA_CLASS+" "+MORE_LINK); xmlWriter.startTag(TABLE_DATA_TAG); xmlWriter.text(boundValue.toString()); } xmlWriter.endTag(TABLE_DATA_TAG); } else { xmlWriter.setAttribute(STYLE, TABLE_DATA_CLASS); xmlWriter.startTag(TABLE_DATA_TAG); xmlWriter.endTag(TABLE_DATA_TAG); } value.setLength(0); } xmlWriter.endTag(TABLE_ROW_TAG); } catch (IOException e) { throw new TupleQueryResultHandlerException(e); } } @Override public TupleQueryResultFormat getTupleQueryResultFormat() { return stSPARQLQueryResultFormat.HTML; } }
resultio-spatial/sparqlhtml/src/main/java/org/openrdf/query/resultio/sparqlhtml/stSPARQLResultsHTMLWriter.java
/** * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Copyright (C) 2010, 2011, 2012, Pyravlos Team * * http://www.strabon.di.uoa.gr/ */ package org.openrdf.query.resultio.sparqlhtml; import java.io.IOException; import java.io.OutputStream; import java.net.URLEncoder; import java.util.List; import org.openrdf.model.BNode; import org.openrdf.query.Binding; import org.openrdf.query.BindingSet; import org.openrdf.query.TupleQueryResultHandlerException; import org.openrdf.query.resultio.TupleQueryResultFormat; import org.openrdf.query.resultio.TupleQueryResultWriter; import org.openrdf.query.resultio.stSPARQLQueryResultFormat; import org.openrdf.query.resultio.sparqlxml.stSPARQLXMLWriter; import org.openrdf.model.Value; import org.openrdf.model.URI; /** * @author Charalampos Nikolaou <charnik@di.uoa.gr> * */ public class stSPARQLResultsHTMLWriter implements TupleQueryResultWriter { public static final String TABLE = "TABLE"; public static final String TABLE_ROW_TAG = "TR"; public static final String TABLE_HEADER_TAG = "TH"; public static final String TABLE_DATA_TAG = "TD"; public static final String LINK = "A"; public static final String LINK_REF = "HREF"; public static final String STYLE = "class"; public static final String ID = "id"; public static final String LINK_ID = "uri"; public static final String TABLE_HEADER_CLASS = "query_results_header"; public static final String TABLE_DATA_CLASS = "query_results_data"; public static final String TABLE_CLASS = "query_results_table"; public static final String MORE_LINK = "comment more"; /** * The underlying XML formatter. */ private stSPARQLXMLWriter xmlWriter; /** * The ordered list of binding names of the result. */ private List<String> bindingNames; public stSPARQLResultsHTMLWriter(OutputStream out) { this(new stSPARQLXMLWriter(out)); } public stSPARQLResultsHTMLWriter(stSPARQLXMLWriter writer) { xmlWriter = writer; xmlWriter.setPrettyPrint(true); } @Override public void startQueryResult(List<String> bindingNames) throws TupleQueryResultHandlerException { try { // keep the order of binding names this.bindingNames = bindingNames; // set style for table xmlWriter.setAttribute(STYLE, TABLE_CLASS); // write start of table xmlWriter.startTag(TABLE); // write Table header containing the bindings xmlWriter.startTag(TABLE_ROW_TAG); for (String bindingName: bindingNames) { // set style for header xmlWriter.setAttribute(STYLE, TABLE_HEADER_CLASS); xmlWriter.textElement(TABLE_HEADER_TAG, bindingName); } xmlWriter.endTag(TABLE_ROW_TAG); } catch (IOException e) { throw new TupleQueryResultHandlerException(e); } } @Override public void endQueryResult() throws TupleQueryResultHandlerException { try { // write end of table xmlWriter.endTag(TABLE); // needed to flush data xmlWriter.endDocument(); } catch (IOException e) { throw new TupleQueryResultHandlerException(e); } } @Override public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException { try { StringBuilder value = new StringBuilder(); Value boundValue = null; xmlWriter.startTag(TABLE_ROW_TAG); for (String bindingName : bindingNames) { Binding binding = bindingSet.getBinding(bindingName); if(binding != null) { boundValue = binding.getValue(); value.append(boundValue.stringValue()); if(boundValue instanceof BNode) { value.insert(0, "_:"); } // If the value is a uri, make it link if(boundValue instanceof URI) { xmlWriter.setAttribute(STYLE, TABLE_DATA_CLASS); xmlWriter.startTag(TABLE_DATA_TAG); // select all the triples that contain the boundValue String query= "select * " + "where " + "{ " + "?subject ?predicate ?object . "+ "FILTER((?subject = <"+ boundValue.toString()+ ">) || "+ "(?predicate = <"+ boundValue.toString()+ ">) || "+ "(?object = <"+ boundValue.toString()+ ">)) " + "}"; String href = "Browse?view=HTML&query="+URLEncoder.encode(query, "UTF-8")+"&format=HTML&resource="+boundValue.toString(); xmlWriter.setAttribute(LINK_REF, href); xmlWriter.startTag(LINK); xmlWriter.text(boundValue.toString()); xmlWriter.endTag(LINK); } else { xmlWriter.setAttribute(STYLE, TABLE_DATA_CLASS+" "+MORE_LINK); xmlWriter.startTag(TABLE_DATA_TAG); xmlWriter.text(boundValue.toString()); } xmlWriter.endTag(TABLE_DATA_TAG); } value.setLength(0); } xmlWriter.endTag(TABLE_ROW_TAG); } catch (IOException e) { throw new TupleQueryResultHandlerException(e); } } @Override public TupleQueryResultFormat getTupleQueryResultFormat() { return stSPARQLQueryResultFormat.HTML; } }
Added a TD tag that was missing when a variable was not bound.
resultio-spatial/sparqlhtml/src/main/java/org/openrdf/query/resultio/sparqlhtml/stSPARQLResultsHTMLWriter.java
Added a TD tag that was missing when a variable was not bound.
Java
agpl-3.0
96f8e2b8cb11e046fa2ad84af4b2f9fac25a9de4
0
automenta/java_dann
/****************************************************************************** * * * Copyright: (c) Syncleus, Inc. * * * * You may redistribute and modify this source code under the terms and * * conditions of the Open Source Community License - Type C version 1.0 * * or any later version as published by Syncleus, Inc. at www.syncleus.com. * * There should be a copy of the license included with this file. If a copy * * of the license is not included you are granted no right to distribute or * * otherwise use this file except through a legal and valid license. You * * should also contact Syncleus, Inc. at the information below if you cannot * * find a license: * * * * Syncleus, Inc. * * 2604 South 12th Street * * Philadelphia, PA 19148 * * * ******************************************************************************/ package com.syncleus.dann.genetics.wavelets; import com.syncleus.dann.math.*; import java.util.*; public class ExpressionFunction { private static Random random = new Random(); HashSet<ReceptorKey> receptors; ArrayList<WaveMultidimensionalMathFunction> waves; WaveletMathFunction wavelet; public ExpressionFunction(ExpressionFunction copy) { this.receptors = new HashSet<ReceptorKey>(copy.receptors); this.waves = new ArrayList<WaveMultidimensionalMathFunction>(copy.waves); this.wavelet = copy.wavelet; } public ExpressionFunction(ReceptorKey initialInput) { this.waves = new ArrayList<WaveMultidimensionalMathFunction>(); this.receptors = new HashSet<ReceptorKey>(); this.receptors.add(initialInput); WaveMultidimensionalMathFunction initialWave = generateNewWave(); this.waves.add(initialWave); } private ExpressionFunction() { } public WaveletMathFunction getWaveletMathFunction() { return this.wavelet; } public int getWaveCount() { this.reconstructWavelet(); return this.wavelet.getWaveCount(); } public Set<ReceptorKey> getReceptors() { return Collections.unmodifiableSet(this.receptors); } public WaveletMathFunction getWavelet() { this.reconstructWavelet(); return this.wavelet.clone(); } public boolean receives(SignalKey signal) { for(ReceptorKey receptor : this.receptors) { if( receptor.binds(signal) ) return true; } return false; } public double calculate(Set<SignalKeyConcentration> signalConcentrations) { this.reconstructWavelet(); for(ReceptorKey receptor : this.receptors) { double concentration = 0.0; //calculate concentration for the current receptor for(SignalKeyConcentration signalConcentration : signalConcentrations) { if( receptor.binds(signalConcentration.getSignal())) concentration += signalConcentration.getConcentration(); } this.wavelet.setParameter(String.valueOf(receptor.hashCode()), concentration); } return this.wavelet.calculate(); } @Override public ExpressionFunction clone() { return new ExpressionFunction(this); } private void reconstructWavelet() { String[] receptorNames = new String[this.receptors.size()]; int receptorNamesIndex = 0; for(ReceptorKey receptor : this.receptors) receptorNames[receptorNamesIndex++] = String.valueOf(receptor.hashCode()); this.wavelet = new WaveletMathFunction(receptorNames); for(WaveMultidimensionalMathFunction wave : this.waves) { this.wavelet.addWave(wave); } } /** * Internally mutates.<br/> * <br/> * may change in any of the following ways:<br/> * <ul> * <li>add a new bound wave</li> * <li>copy an existing wave and mutates it adding the new mutated wave</li> * <li>delete an existing wave</li> * <li>removing a signal</li> * <li>Do nothing</li> * </ul> * * @return New mutated wavelet */ public ExpressionFunction mutate(double deviation) { ExpressionFunction copy = this.clone(); while(random.nextFloat() < 0.1) { //add a mutated copy of an existing wave if(random.nextDouble() < 0.1) { //Signal newSignal = this.getRandomSignal(); //return this.mutate(newSignal); copy.waves.add(this.generateRandomWave()); } //make a random new wave if(random.nextDouble() < 0.1) { copy.waves.add(this.generateNewWave()); } //delete a random wave if(random.nextDouble() < 0.1) { //only delete if there will be atleast one wave left if(this.waves.size() > 1) { WaveMultidimensionalMathFunction deleteWave = copy.waves.get(random.nextInt(copy.waves.size())); copy.waves.remove(deleteWave); } } //delete a signal if(random.nextDouble() < 0.1) { //only delet eif there will be atleast one signal left if(this.receptors.size() > 1) { ReceptorKey[] receptorArray = new ReceptorKey[copy.receptors.size()]; copy.receptors.toArray(receptorArray); ReceptorKey deleteReceptor = receptorArray[random.nextInt(receptorArray.length)]; copy.receptors.remove(deleteReceptor); ReceptorKey[] copyReceptors = new ReceptorKey[copy.receptors.size()]; copy.receptors.toArray(copyReceptors); String[] dimensionNames = new String[copyReceptors.length]; int dimensionNamesIndex = 0; for(ReceptorKey copyReceptor : copyReceptors) dimensionNames[dimensionNamesIndex++] = String.valueOf(copyReceptor.hashCode()); copy.waves.clear(); for(WaveMultidimensionalMathFunction wave:this.waves) { WaveMultidimensionalMathFunction newWave = new WaveMultidimensionalMathFunction(dimensionNames); newWave.setAmplitude(wave.getAmplitude()); newWave.setDistribution(wave.getDistribution()); newWave.setForm(wave.getForm()); newWave.setFrequency(wave.getFrequency()); newWave.setPhase(wave.getPhase()); for(String dimension:dimensionNames) { newWave.setCenter(dimension, wave.getCenter(dimension)); newWave.setDimension(dimension, wave.getDimension(dimension)); } copy.waves.add(newWave); } } } } return copy; } /** * Mutates by incorperating a new signal into the mutated result.<br/> * <br/> * May mutate by:<br/> * <ul> * <li>adding the new signal</li> * <ul> * @param newReceptor The new receptor to possibly incorperate into mutation * @param deviation random deviation for mutation. * @return New mutated wavelet */ public ExpressionFunction mutate(double deviation, ReceptorKey newReceptor) { ExpressionFunction copy = this.clone(); copy.receptors.add(newReceptor); if(copy.receptors.size() > this.receptors.size()) { copy.waves.clear(); for(WaveMultidimensionalMathFunction wave:this.waves) { String[] names = new String[wave.getDimensionNames().length + 1]; int index = 0; for(String dimensionName:wave.getDimensionNames()) names[index++] = dimensionName; names[index++] = String.valueOf(newReceptor.hashCode()); WaveMultidimensionalMathFunction newWave = new WaveMultidimensionalMathFunction(names); newWave.setAmplitude(wave.getAmplitude()); newWave.setDistribution(wave.getDistribution()); newWave.setForm(wave.getForm()); newWave.setFrequency(wave.getFrequency()); newWave.setPhase(wave.getPhase()); for(String dimension:wave.getDimensionNames()) { newWave.setCenter(dimension, wave.getCenter(dimension)); newWave.setDimension(dimension, wave.getDimension(dimension)); } copy.waves.add(newWave); } } return copy.mutate(1.0); } private WaveMultidimensionalMathFunction generateNewWave() { String[] dimensionNames = new String[this.receptors.size()]; int index = 0; for(ReceptorKey receptor : this.receptors) dimensionNames[index++] = String.valueOf(receptor.hashCode()); WaveMultidimensionalMathFunction newWave = new WaveMultidimensionalMathFunction(dimensionNames); newWave.setFrequency(random.nextGaussian() * 0.001); newWave.setPhase(random.nextGaussian() * 10); newWave.setAmplitude(random.nextGaussian()); newWave.setForm(Math.abs(random.nextGaussian())); if(newWave.getForm() <= 0.0) { newWave.setForm(newWave.getForm() + ((1 + random.nextGaussian()) * 10)); } for(String dimensionName:dimensionNames) { newWave.setCenter(dimensionName, newWave.getCenter(dimensionName) + ((random.nextFloat() * 2 - 1) * 100)); } newWave.setDistribution(random.nextFloat() * 100); return newWave; } private WaveMultidimensionalMathFunction generateRandomWave() { if(this.waves.size() > 0) { WaveMultidimensionalMathFunction[] wavesArray = new WaveMultidimensionalMathFunction[this.waves.size()]; this.waves.toArray(wavesArray); WaveMultidimensionalMathFunction randomWave = wavesArray[random.nextInt(wavesArray.length)]; WaveMultidimensionalMathFunction newWave = randomWave.clone(); if(random.nextDouble() <= 1.0) newWave.setFrequency(newWave.getFrequency() + ((random.nextFloat() * 2 - 1) * 0.01)); if(random.nextDouble() <= 1.0) newWave.setPhase(newWave.getPhase() + ((random.nextFloat() * 2 - 1) * 10)); if(random.nextDouble() <= 1.0) newWave.setAmplitude(newWave.getAmplitude() + ((random.nextFloat() * 2 - 1) * 10)); if(random.nextDouble() <= 1.0) newWave.setForm(newWave.getForm() + (random.nextFloat() * 0.01)); if(random.nextDouble() <= 1.0) newWave.setDistribution(newWave.getDistribution() + ((random.nextFloat() * 2 - 1) * 100)); if(random.nextDouble() <= 1.0) { String[] dimensionNames = newWave.getDimensionNames(); for(String dimensionName:dimensionNames) newWave.setCenter(dimensionName, newWave.getCenter(dimensionName) + ((random.nextFloat() * 2 - 1) * 100)); } return newWave; } return null; } @Override public String toString() { this.reconstructWavelet(); return this.wavelet.toString(); } }
src/com/syncleus/dann/genetics/wavelets/ExpressionFunction.java
/****************************************************************************** * * * Copyright: (c) Syncleus, Inc. * * * * You may redistribute and modify this source code under the terms and * * conditions of the Open Source Community License - Type C version 1.0 * * or any later version as published by Syncleus, Inc. at www.syncleus.com. * * There should be a copy of the license included with this file. If a copy * * of the license is not included you are granted no right to distribute or * * otherwise use this file except through a legal and valid license. You * * should also contact Syncleus, Inc. at the information below if you cannot * * find a license: * * * * Syncleus, Inc. * * 2604 South 12th Street * * Philadelphia, PA 19148 * * * ******************************************************************************/ package com.syncleus.dann.genetics.wavelets; import com.syncleus.dann.math.*; import java.util.*; public class ExpressionFunction { private static Random random = new Random(); HashSet<ReceptorKey> receptors; ArrayList<WaveMultidimensionalMathFunction> waves; WaveletMathFunction wavelet; public ExpressionFunction(ExpressionFunction copy) { this.receptors = new HashSet<ReceptorKey>(copy.receptors); this.waves = new ArrayList<WaveMultidimensionalMathFunction>(copy.waves); this.wavelet = copy.wavelet; } public ExpressionFunction(ReceptorKey initialInput) { this.waves = new ArrayList<WaveMultidimensionalMathFunction>(); this.receptors = new HashSet<ReceptorKey>(); this.receptors.add(initialInput); WaveMultidimensionalMathFunction initialWave = generateNewWave(); this.waves.add(initialWave); } private ExpressionFunction() { } public WaveletMathFunction getWaveletMathFunction() { return this.wavelet; } public int getWaveCount() { this.reconstructWavelet(); return this.wavelet.getWaveCount(); } public Set<ReceptorKey> getReceptors() { return Collections.unmodifiableSet(this.receptors); } public WaveletMathFunction getWavelet() { this.reconstructWavelet(); return this.wavelet.clone(); } public boolean receives(SignalKey signal) { for(ReceptorKey receptor : this.receptors) { if( receptor.binds(signal) ) return true; } return false; } public double calculate(Set<SignalKeyConcentration> signalConcentrations) { this.reconstructWavelet(); for(ReceptorKey receptor : this.receptors) { double concentration = 0.0; //calculate concentration for the current receptor for(SignalKeyConcentration signalConcentration : signalConcentrations) { if( receptor.binds(signalConcentration.getSignal())) concentration += signalConcentration.getConcentration(); } this.wavelet.setParameter(String.valueOf(receptor.hashCode()), concentration); } return this.wavelet.calculate(); } @Override public ExpressionFunction clone() { return new ExpressionFunction(this); } private void reconstructWavelet() { String[] receptorNames = new String[this.receptors.size()]; int receptorNamesIndex = 0; for(ReceptorKey receptor : this.receptors) receptorNames[receptorNamesIndex++] = String.valueOf(receptor.hashCode()); this.wavelet = new WaveletMathFunction(receptorNames); for(WaveMultidimensionalMathFunction wave : this.waves) { this.wavelet.addWave(wave); } } /** * Internally mutates.<br/> * <br/> * may change in any of the following ways:<br/> * <ul> * <li>add a new bound wave</li> * <li>copy an existing wave and mutates it adding the new mutated wave</li> * <li>delete an existing wave</li> * <li>removing a signal</li> * <li>Do nothing</li> * </ul> * * @return New mutated wavelet */ public ExpressionFunction mutate(double deviation) { ExpressionFunction copy = this.clone(); while(random.nextFloat() < 0.1) { //add a mutated copy of an existing wave if(random.nextDouble() < 0.1) { //Signal newSignal = this.getRandomSignal(); //return this.mutate(newSignal); copy.waves.add(this.generateRandomWave()); } //make a random new wave if(random.nextDouble() < 0.1) { copy.waves.add(this.generateNewWave()); } //delete a random wave if(random.nextDouble() < 0.1) { //only delete if there will be atleast one wave left if(this.waves.size() > 1) { WaveMultidimensionalMathFunction deleteWave = copy.waves.get(random.nextInt(copy.waves.size())); copy.waves.remove(deleteWave); } } //delete a signal if(random.nextDouble() < 0.1) { //only delet eif there will be atleast one signal left if(this.receptors.size() > 1) { ReceptorKey[] receptorArray = new ReceptorKey[copy.receptors.size()]; copy.receptors.toArray(receptorArray); ReceptorKey deleteReceptor = receptorArray[random.nextInt(receptorArray.length)]; copy.receptors.remove(deleteReceptor); ReceptorKey[] copyReceptors = new ReceptorKey[copy.receptors.size()]; copy.receptors.toArray(copyReceptors); String[] dimensionNames = new String[copyReceptors.length]; int dimensionNamesIndex = 0; for(ReceptorKey copyReceptor : copyReceptors) { dimensionNames[dimensionNamesIndex++] = String.valueOf(copyReceptor.hashCode()); } copy.waves.clear(); for(WaveMultidimensionalMathFunction wave:this.waves) { WaveMultidimensionalMathFunction newWave = new WaveMultidimensionalMathFunction(dimensionNames); newWave.setAmplitude(wave.getAmplitude()); newWave.setDistribution(wave.getDistribution()); newWave.setForm(wave.getForm()); newWave.setFrequency(wave.getFrequency()); newWave.setPhase(wave.getPhase()); for(String dimension:dimensionNames) { newWave.setCenter(dimension, wave.getCenter(dimension)); newWave.setDimension(dimension, wave.getDimension(dimension)); } copy.waves.add(newWave); } } } } return copy; } /** * Mutates by incorperating a new signal into the mutated result.<br/> * <br/> * May mutate by:<br/> * <ul> * <li>adding the new signal</li> * <ul> * @param newReceptor The new receptor to possibly incorperate into mutation * @param deviation random deviation for mutation. * @return New mutated wavelet */ public ExpressionFunction mutate(double deviation, ReceptorKey newReceptor) { ExpressionFunction copy = this.clone(); copy.receptors.add(newReceptor); if(copy.receptors.size() > this.receptors.size()) { copy.waves.clear(); for(WaveMultidimensionalMathFunction wave:this.waves) { String[] names = new String[wave.getDimensionNames().length + 1]; int index = 0; for(String dimensionName:wave.getDimensionNames()) { names[index++] = dimensionName; } names[index++] = String.valueOf(newReceptor.hashCode()); WaveMultidimensionalMathFunction newWave = new WaveMultidimensionalMathFunction(names); newWave.setAmplitude(wave.getAmplitude()); newWave.setDistribution(wave.getDistribution()); newWave.setForm(wave.getForm()); newWave.setFrequency(wave.getFrequency()); newWave.setPhase(wave.getPhase()); for(String dimension:wave.getDimensionNames()) { newWave.setCenter(dimension, wave.getCenter(dimension)); newWave.setDimension(dimension, wave.getDimension(dimension)); } copy.waves.add(newWave); } } return copy.mutate(1.0); } private WaveMultidimensionalMathFunction generateNewWave() { String[] dimensionNames = new String[this.receptors.size()]; int index = 0; for(ReceptorKey receptor : this.receptors) dimensionNames[index++] = String.valueOf(receptor.hashCode()); WaveMultidimensionalMathFunction newWave = new WaveMultidimensionalMathFunction(dimensionNames); newWave.setFrequency(random.nextGaussian() * 0.001); newWave.setPhase(random.nextGaussian() * 10); newWave.setAmplitude(random.nextGaussian()); newWave.setForm(Math.abs(random.nextGaussian())); if(newWave.getForm() <= 0.0) { newWave.setForm(newWave.getForm() + ((1 + random.nextGaussian()) * 10)); } for(String dimensionName:dimensionNames) { newWave.setCenter(dimensionName, newWave.getCenter(dimensionName) + ((random.nextFloat() * 2 - 1) * 100)); } newWave.setDistribution(random.nextFloat() * 100); return newWave; } private WaveMultidimensionalMathFunction generateRandomWave() { if(this.waves.size() > 0) { WaveMultidimensionalMathFunction[] wavesArray = new WaveMultidimensionalMathFunction[this.waves.size()]; this.waves.toArray(wavesArray); WaveMultidimensionalMathFunction randomWave = wavesArray[random.nextInt(wavesArray.length)]; WaveMultidimensionalMathFunction newWave = randomWave.clone(); if(random.nextDouble() <= 1.0) newWave.setFrequency(newWave.getFrequency() + ((random.nextFloat() * 2 - 1) * 0.01)); if(random.nextDouble() <= 1.0) newWave.setPhase(newWave.getPhase() + ((random.nextFloat() * 2 - 1) * 10)); if(random.nextDouble() <= 1.0) newWave.setAmplitude(newWave.getAmplitude() + ((random.nextFloat() * 2 - 1) * 10)); if(random.nextDouble() <= 1.0) newWave.setForm(newWave.getForm() + (random.nextFloat() * 0.01)); if(random.nextDouble() <= 1.0) newWave.setDistribution(newWave.getDistribution() + ((random.nextFloat() * 2 - 1) * 100)); if(random.nextDouble() <= 1.0) { String[] dimensionNames = newWave.getDimensionNames(); for(String dimensionName:dimensionNames) newWave.setCenter(dimensionName, newWave.getCenter(dimensionName) + ((random.nextFloat() * 2 - 1) * 100)); } return newWave; } return null; } @Override public String toString() { this.reconstructWavelet(); return this.wavelet.toString(); } }
Cleaned up some of the code formatting. git-svn-id: d0182e8a596e776e5c783845197fda39501da556@316 6ae8b97b-f314-0410-8212-aecf10b92ded
src/com/syncleus/dann/genetics/wavelets/ExpressionFunction.java
Cleaned up some of the code formatting.
Java
agpl-3.0
86da653730109cb528476dd34010dc6ddf09545d
0
brsimioni/rstudio,maligulzar/Rstudio-instrumented,jar1karp/rstudio,suribes/rstudio,piersharding/rstudio,githubfun/rstudio,tbarrongh/rstudio,jrnold/rstudio,edrogers/rstudio,suribes/rstudio,githubfun/rstudio,JanMarvin/rstudio,edrogers/rstudio,tbarrongh/rstudio,nvoron23/rstudio,sfloresm/rstudio,maligulzar/Rstudio-instrumented,jar1karp/rstudio,edrogers/rstudio,sfloresm/rstudio,jzhu8803/rstudio,sfloresm/rstudio,githubfun/rstudio,brsimioni/rstudio,maligulzar/Rstudio-instrumented,edrogers/rstudio,more1/rstudio,githubfun/rstudio,jar1karp/rstudio,more1/rstudio,thklaus/rstudio,more1/rstudio,jzhu8803/rstudio,JanMarvin/rstudio,piersharding/rstudio,brsimioni/rstudio,edrogers/rstudio,jzhu8803/rstudio,piersharding/rstudio,jzhu8803/rstudio,jar1karp/rstudio,edrogers/rstudio,pssguy/rstudio,sfloresm/rstudio,tbarrongh/rstudio,jar1karp/rstudio,more1/rstudio,thklaus/rstudio,jrnold/rstudio,JanMarvin/rstudio,jzhu8803/rstudio,jar1karp/rstudio,piersharding/rstudio,suribes/rstudio,brsimioni/rstudio,jar1karp/rstudio,piersharding/rstudio,suribes/rstudio,jrnold/rstudio,piersharding/rstudio,githubfun/rstudio,tbarrongh/rstudio,maligulzar/Rstudio-instrumented,nvoron23/rstudio,john-r-mcpherson/rstudio,more1/rstudio,piersharding/rstudio,suribes/rstudio,more1/rstudio,maligulzar/Rstudio-instrumented,thklaus/rstudio,githubfun/rstudio,vbelakov/rstudio,john-r-mcpherson/rstudio,jzhu8803/rstudio,vbelakov/rstudio,vbelakov/rstudio,thklaus/rstudio,brsimioni/rstudio,vbelakov/rstudio,piersharding/rstudio,vbelakov/rstudio,JanMarvin/rstudio,tbarrongh/rstudio,pssguy/rstudio,nvoron23/rstudio,nvoron23/rstudio,pssguy/rstudio,jar1karp/rstudio,maligulzar/Rstudio-instrumented,suribes/rstudio,vbelakov/rstudio,sfloresm/rstudio,maligulzar/Rstudio-instrumented,githubfun/rstudio,sfloresm/rstudio,john-r-mcpherson/rstudio,jrnold/rstudio,sfloresm/rstudio,jar1karp/rstudio,edrogers/rstudio,jrnold/rstudio,thklaus/rstudio,pssguy/rstudio,tbarrongh/rstudio,thklaus/rstudio,nvoron23/rstudio,john-r-mcpherson/rstudio,suribes/rstudio,more1/rstudio,john-r-mcpherson/rstudio,JanMarvin/rstudio,pssguy/rstudio,JanMarvin/rstudio,brsimioni/rstudio,nvoron23/rstudio,nvoron23/rstudio,jrnold/rstudio,pssguy/rstudio,pssguy/rstudio,tbarrongh/rstudio,JanMarvin/rstudio,edrogers/rstudio,jrnold/rstudio,piersharding/rstudio,vbelakov/rstudio,john-r-mcpherson/rstudio,JanMarvin/rstudio,jrnold/rstudio,sfloresm/rstudio,brsimioni/rstudio,john-r-mcpherson/rstudio,pssguy/rstudio,tbarrongh/rstudio,john-r-mcpherson/rstudio,JanMarvin/rstudio,githubfun/rstudio,jzhu8803/rstudio,more1/rstudio,brsimioni/rstudio,thklaus/rstudio,maligulzar/Rstudio-instrumented,thklaus/rstudio,jrnold/rstudio,maligulzar/Rstudio-instrumented,vbelakov/rstudio,suribes/rstudio,jzhu8803/rstudio
/* * HelpStrategy.java * * Copyright (C) 2009-12 by RStudio, Inc. * * Unless you have received this program directly from RStudio pursuant * to the terms of a commercial license agreement with RStudio, then * this program is licensed to you under the terms of version 3 of the * GNU Affero General Public License. This program is distributed WITHOUT * ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT, * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the * AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details. * */ package org.rstudio.studio.client.workbench.views.console.shell.assist; import java.util.HashMap; import org.rstudio.core.client.Debug; import org.rstudio.studio.client.RStudioGinjector; import org.rstudio.studio.client.common.codetools.CodeToolsServerOperations; import org.rstudio.studio.client.common.codetools.RCompletionType; import org.rstudio.studio.client.server.ServerError; import org.rstudio.studio.client.server.ServerRequestCallback; import org.rstudio.studio.client.workbench.views.console.shell.assist.CompletionRequester.QualifiedName; import org.rstudio.studio.client.workbench.views.help.model.HelpInfo; import org.rstudio.studio.client.workbench.views.help.model.HelpInfo.ParsedInfo; import com.google.inject.Inject; import com.google.inject.Singleton; @Singleton public class HelpStrategy { final CodeToolsServerOperations server_; @Inject public HelpStrategy(CodeToolsServerOperations server) { server_ = server; cache_ = new HashMap<QualifiedName, ParsedInfo>(); } public void showHelpTopic(final QualifiedName selectedItem) { switch (selectedItem.type) { case RCompletionType.PACKAGE: server_.showHelpTopic(selectedItem.pkgName + "-package", null); break; default: server_.showHelpTopic(selectedItem.pkgName, null); break; } } public void showHelp(final QualifiedName item, final CompletionPopupDisplay display) { switch (item.type) { case RCompletionType.PACKAGE: showPackageHelp(item, display); break; case RCompletionType.ARGUMENTS: showParameterHelp(item, display); break; default: showFunctionHelp(item, display); break; } } public void clearCache() { cache_.clear(); } private void showFunctionHelp(final QualifiedName selectedItem, final CompletionPopupDisplay display) { ParsedInfo cachedHelp = cache_.get(selectedItem); if (cachedHelp != null) { display.displayFunctionHelp(cachedHelp); return; } server_.getHelp(selectedItem.name, selectedItem.pkgName, 0, new ServerRequestCallback<HelpInfo>() { @Override public void onError(ServerError error) { Debug.logError(error); RStudioGinjector.INSTANCE.getGlobalDisplay().showErrorMessage( "Error Retrieving Help", error.getUserMessage()); display.clearHelp(false) ; } public void onResponseReceived(HelpInfo result) { if (result != null) { HelpInfo.ParsedInfo help = result.parse(selectedItem.name) ; if (help.hasInfo()) { cache_.put(selectedItem, help); display.displayFunctionHelp(help) ; return; } } display.setHelpVisible(false); display.clearHelp(false) ; } }) ; } private void showParameterHelp(final QualifiedName selectedItem, final CompletionPopupDisplay display) { final String name = selectedItem.name.replaceAll("\\s*=\\s*$", ""); ParsedInfo cachedHelp = cache_.get(selectedItem); if (cachedHelp != null) { doShowParameterHelp(cachedHelp, name, display); return; } server_.getHelp(selectedItem.pkgName, null, 0, new ServerRequestCallback<HelpInfo>() { @Override public void onError(ServerError error) { display.clearHelp(false) ; } @Override public void onResponseReceived(HelpInfo response) { if (response != null) { ParsedInfo info = response.parse(selectedItem.pkgName); cache_.put(selectedItem, info); doShowParameterHelp(info, name, display); } else { display.clearHelp(false); } } }) ; } private void doShowParameterHelp(final ParsedInfo info, final String parameter, final CompletionPopupDisplay display) { String desc = info.getArgs().get(parameter) ; if (desc == null) { display.setHelpVisible(false); display.clearHelp(false) ; } else { display.displayParameterHelp(info, parameter) ; } } private void showPackageHelp(final QualifiedName selectedItem, final CompletionPopupDisplay display) { ParsedInfo cachedHelp = cache_.get(selectedItem); if (cachedHelp != null) { doShowPackageHelp(cachedHelp, display); return; } final String packageName = selectedItem.name; server_.getHelp(packageName, null, 0, new ServerRequestCallback<HelpInfo>() { @Override public void onError(ServerError error) { display.clearHelp(false) ; } @Override public void onResponseReceived(HelpInfo response) { if (response != null) { ParsedInfo info = response.parse(packageName); cache_.put(selectedItem, info); doShowPackageHelp(info, display); } else { display.clearHelp(false); } } }) ; } private void doShowPackageHelp(final ParsedInfo info, final CompletionPopupDisplay display) { display.displayPackageHelp(info) ; } HashMap<QualifiedName, ParsedInfo> cache_; }
src/gwt/src/org/rstudio/studio/client/workbench/views/console/shell/assist/HelpStrategy.java
/* * HelpStrategy.java * * Copyright (C) 2009-12 by RStudio, Inc. * * Unless you have received this program directly from RStudio pursuant * to the terms of a commercial license agreement with RStudio, then * this program is licensed to you under the terms of version 3 of the * GNU Affero General Public License. This program is distributed WITHOUT * ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT, * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the * AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details. * */ package org.rstudio.studio.client.workbench.views.console.shell.assist; import java.util.HashMap; import org.rstudio.core.client.Debug; import org.rstudio.studio.client.RStudioGinjector; import org.rstudio.studio.client.common.codetools.CodeToolsServerOperations; import org.rstudio.studio.client.common.codetools.RCompletionType; import org.rstudio.studio.client.server.ServerError; import org.rstudio.studio.client.server.ServerRequestCallback; import org.rstudio.studio.client.workbench.views.console.shell.assist.CompletionRequester.QualifiedName; import org.rstudio.studio.client.workbench.views.help.model.HelpInfo; import org.rstudio.studio.client.workbench.views.help.model.HelpInfo.ParsedInfo; import com.google.inject.Inject; import com.google.inject.Singleton; @Singleton public class HelpStrategy { final CodeToolsServerOperations server_; @Inject public HelpStrategy(CodeToolsServerOperations server) { server_ = server; cache_ = new HashMap<QualifiedName, ParsedInfo>(); } public void showHelpTopic(final QualifiedName selectedItem) { switch (selectedItem.type) { case RCompletionType.PACKAGE: server_.showHelpTopic(selectedItem.pkgName + "-package", null); break; default: server_.showHelpTopic(selectedItem.pkgName, null); break; } } public void showHelp(final QualifiedName item, final CompletionPopupDisplay display) { switch (item.type) { case RCompletionType.PACKAGE: showPackageHelp(item, display); break; case RCompletionType.ARGUMENTS: showParameterHelp(item, display); break; default: showFunctionHelp(item, display); break; } } public void clearCache() { cache_.clear(); } private void showFunctionHelp(final QualifiedName selectedItem, final CompletionPopupDisplay display) { ParsedInfo cachedHelp = cache_.get(selectedItem); if (cachedHelp != null) { display.displayFunctionHelp(cachedHelp); return; } server_.getHelp(selectedItem.name, selectedItem.pkgName, 0, new ServerRequestCallback<HelpInfo>() { @Override public void onError(ServerError error) { Debug.logError(error); RStudioGinjector.INSTANCE.getGlobalDisplay().showErrorMessage( "Error Retrieving Help", error.getUserMessage()); display.clearHelp(false) ; } public void onResponseReceived(HelpInfo result) { if (result != null) { HelpInfo.ParsedInfo help = result.parse(selectedItem.name) ; if (help.hasInfo()) { cache_.put(selectedItem, help); display.displayFunctionHelp(help) ; return; } } display.setHelpVisible(false); display.clearHelp(false) ; } }) ; } private void showParameterHelp(final QualifiedName selectedItem, final CompletionPopupDisplay display) { ParsedInfo cachedHelp = cache_.get(selectedItem); if (cachedHelp != null) { doShowParameterHelp(cachedHelp, selectedItem.name, display); return; } final String name = selectedItem.name.replaceAll("\\s*=\\s*$", ""); server_.getHelp(selectedItem.pkgName, null, 0, new ServerRequestCallback<HelpInfo>() { @Override public void onError(ServerError error) { display.clearHelp(false) ; } @Override public void onResponseReceived(HelpInfo response) { if (response != null) { ParsedInfo info = response.parse(selectedItem.pkgName); cache_.put(selectedItem, info); doShowParameterHelp(info, name, display); } else { display.clearHelp(false); } } }) ; } private void doShowParameterHelp(final ParsedInfo info, final String parameter, final CompletionPopupDisplay display) { String desc = info.getArgs().get(parameter) ; if (desc == null) { display.setHelpVisible(false); display.clearHelp(false) ; } else { display.displayParameterHelp(info, parameter) ; } } private void showPackageHelp(final QualifiedName selectedItem, final CompletionPopupDisplay display) { ParsedInfo cachedHelp = cache_.get(selectedItem); if (cachedHelp != null) { doShowPackageHelp(cachedHelp, display); return; } final String packageName = selectedItem.name; server_.getHelp(packageName, null, 0, new ServerRequestCallback<HelpInfo>() { @Override public void onError(ServerError error) { display.clearHelp(false) ; } @Override public void onResponseReceived(HelpInfo response) { if (response != null) { ParsedInfo info = response.parse(packageName); cache_.put(selectedItem, info); doShowPackageHelp(info, display); } else { display.clearHelp(false); } } }) ; } private void doShowPackageHelp(final ParsedInfo info, final CompletionPopupDisplay display) { display.displayPackageHelp(info) ; } HashMap<QualifiedName, ParsedInfo> cache_; }
use same name in parameter help
src/gwt/src/org/rstudio/studio/client/workbench/views/console/shell/assist/HelpStrategy.java
use same name in parameter help
Java
agpl-3.0
f18c8b10e363e5e7b309c13a72f9831796986887
0
aihua/opennms,rdkgit/opennms,rdkgit/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github,aihua/opennms,rdkgit/opennms,tdefilip/opennms,tdefilip/opennms,rdkgit/opennms,rdkgit/opennms,rdkgit/opennms,roskens/opennms-pre-github,aihua/opennms,aihua/opennms,tdefilip/opennms,aihua/opennms,roskens/opennms-pre-github,aihua/opennms,rdkgit/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,tdefilip/opennms,rdkgit/opennms,roskens/opennms-pre-github,rdkgit/opennms,tdefilip/opennms,tdefilip/opennms,aihua/opennms,rdkgit/opennms,roskens/opennms-pre-github,aihua/opennms,tdefilip/opennms,roskens/opennms-pre-github,aihua/opennms,tdefilip/opennms,tdefilip/opennms
// // This file is part of the OpenNMS(R) Application. // // OpenNMS(R) is Copyright (C) 2006 The OpenNMS Group, Inc. All rights reserved. // OpenNMS(R) is a derivative work, containing both original code, included code and modified // code that was published under the GNU General Public License. Copyrights for modified // and included code are below. // // OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. // // Original code base Copyright (C) 1999-2001 Oculan Corp. All rights reserved. // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation; either version 2 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // For more information contact: // OpenNMS Licensing <license@opennms.org> // http://www.opennms.org/ // http://www.opennms.com/ // package org.opennms.netmgt.dao.jdbc.pollresult; import java.sql.Types; import javax.sql.DataSource; import org.opennms.netmgt.model.PollResult; import org.springframework.jdbc.core.SqlParameter; import org.springframework.jdbc.object.SqlUpdate; public class PollResultSaveOrUpdate extends SqlUpdate { public PollResultSaveOrUpdate(DataSource ds, String updateStmt) { setDataSource(ds); setSql(updateStmt); // assumes that the update and insert statements have the same parms in the same order declareParameter(new SqlParameter(Types.INTEGER)); //pollid (demandPollId) declareParameter(new SqlParameter(Types.INTEGER)); //nodeid declareParameter(new SqlParameter(Types.VARCHAR)); //ipaddr declareParameter(new SqlParameter(Types.INTEGER)); //ifIndex declareParameter(new SqlParameter(Types.INTEGER)); //serviceId declareParameter(new SqlParameter(Types.INTEGER)); //statusCode declareParameter(new SqlParameter(Types.VARCHAR)); //statusName declareParameter(new SqlParameter(Types.VARCHAR)); //statusReason declareParameter(new SqlParameter(Types.INTEGER)); //id compile(); } public int persist(PollResult result) { Object[] parms = new Object[] { result.getDemandPoll().getId(), result.getMonitoredService().getNodeId(), result.getMonitoredService().getIpAddress(), result.getMonitoredService().getIfIndex(), result.getMonitoredService().getServiceId(), result.getStatus().getStatusCode(), result.getStatus().getStatusName(), result.getStatus().getReason(), result.getId() }; return update(parms); } }
opennms-dao/src/main/java/org/opennms/netmgt/dao/jdbc/pollresult/PollResultSaveOrUpdate.java
// // This file is part of the OpenNMS(R) Application. // // OpenNMS(R) is Copyright (C) 2006 The OpenNMS Group, Inc. All rights reserved. // OpenNMS(R) is a derivative work, containing both original code, included code and modified // code that was published under the GNU General Public License. Copyrights for modified // and included code are below. // // OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. // // Original code base Copyright (C) 1999-2001 Oculan Corp. All rights reserved. // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation; either version 2 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // For more information contact: // OpenNMS Licensing <license@opennms.org> // http://www.opennms.org/ // http://www.opennms.com/ // package org.opennms.netmgt.dao.jdbc.pollresult; import java.sql.Types; import javax.sql.DataSource; import org.opennms.netmgt.model.OnmsOutage; import org.opennms.netmgt.model.PollResult; import org.springframework.jdbc.core.SqlParameter; import org.springframework.jdbc.object.SqlUpdate; public class PollResultSaveOrUpdate extends SqlUpdate { public PollResultSaveOrUpdate(DataSource ds, String updateStmt) { setDataSource(ds); setSql(updateStmt); // assumes that the update and insert statements have the same parms in the same order declareParameter(new SqlParameter(Types.INTEGER)); //pollid (demandPollId) declareParameter(new SqlParameter(Types.INTEGER)); //nodeid declareParameter(new SqlParameter(Types.VARCHAR)); //ipaddr declareParameter(new SqlParameter(Types.INTEGER)); //ifIndex declareParameter(new SqlParameter(Types.INTEGER)); //serviceId declareParameter(new SqlParameter(Types.INTEGER)); //statusCode declareParameter(new SqlParameter(Types.VARCHAR)); //statusName declareParameter(new SqlParameter(Types.VARCHAR)); //statusReason declareParameter(new SqlParameter(Types.INTEGER)); //id compile(); } public int persist(PollResult result) { Object[] parms = new Object[] { result.getDemandPoll().getId(), result.getMonitoredService().getNodeId(), result.getMonitoredService().getIpAddress(), result.getMonitoredService().getIfIndex(), result.getMonitoredService().getServiceId(), result.getStatus().getStatusCode(), result.getStatus().getStatusName(), result.getStatus().getReason(), result.getId() }; return update(parms); } }
organize imports
opennms-dao/src/main/java/org/opennms/netmgt/dao/jdbc/pollresult/PollResultSaveOrUpdate.java
organize imports
Java
lgpl-2.1
1fbf1ef78e9e0682da9714a8a479eda3a2821437
0
99sono/wildfly,rhusar/wildfly,jstourac/wildfly,jstourac/wildfly,pferraro/wildfly,wildfly/wildfly,xasx/wildfly,iweiss/wildfly,jstourac/wildfly,tomazzupan/wildfly,iweiss/wildfly,rhusar/wildfly,xasx/wildfly,tomazzupan/wildfly,pferraro/wildfly,wildfly/wildfly,jstourac/wildfly,tadamski/wildfly,99sono/wildfly,99sono/wildfly,tomazzupan/wildfly,iweiss/wildfly,pferraro/wildfly,golovnin/wildfly,tadamski/wildfly,iweiss/wildfly,rhusar/wildfly,golovnin/wildfly,xasx/wildfly,pferraro/wildfly,golovnin/wildfly,tadamski/wildfly,wildfly/wildfly,wildfly/wildfly,rhusar/wildfly
/* * JBoss, Home of Professional Open Source. * Copyright 2012, Red Hat Middleware LLC, and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.test.integration.ws.authentication; import java.net.URL; import javax.xml.namespace.QName; import javax.xml.ws.Service; import javax.xml.ws.WebServiceException; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.container.test.api.RunAsClient; import org.jboss.arquillian.junit.Arquillian; import org.jboss.arquillian.test.api.ArquillianResource; import org.jboss.as.arquillian.api.ServerSetup; import org.jboss.as.test.integration.ejb.security.EjbSecurityDomainSetup; import org.jboss.as.test.integration.security.common.Utils; import org.jboss.security.Base64Encoder; import org.jboss.shrinkwrap.api.Archive; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.junit.runner.RunWith; import org.junit.Assert; import org.junit.Test; /** * Tests for secured access to WSDL for EJB endpoint * * @author Rostislav Svoboda */ @ServerSetup({EjbSecurityDomainSetup.class}) @RunWith(Arquillian.class) @RunAsClient public class EJBEndpointSecuredWSDLAccessTestCase { @ArquillianResource URL baseUrl; @Deployment(testable = false) public static Archive<?> deployment() { JavaArchive jar = ShrinkWrap.create(JavaArchive.class, "jaxws-authentication-ejb3-for-wsdl.jar") .addAsResource(EJBEndpointSecuredWSDLAccessTestCase.class.getPackage(), "users.properties", "users.properties") .addAsResource(EJBEndpointSecuredWSDLAccessTestCase.class.getPackage(), "roles.properties", "roles.properties") .addClasses(EJBEndpointIface.class, EJBEndpointSecuredWSDLAccess.class); return jar; } @Test public void createService() throws Exception { QName serviceName = new QName("http://jbossws.org/authenticationForWSDL", "EJB3ServiceForWSDL"); URL wsdlURL = new URL(baseUrl, "/jaxws-authentication-ejb3-for-wsdl/EJB3ServiceForWSDL?wsdl"); try { Service service = Service.create(wsdlURL, serviceName); EJBEndpointIface proxy = service.getPort(EJBEndpointIface.class); Assert.fail("Proxy shouldn't be created because WSDL access should be secured"); } catch (WebServiceException e) { // failure is expected } } @Test public void accessWSDLWithValidUsernameAndPassord() throws Exception { URL wsdlURL = new URL(baseUrl, "/jaxws-authentication-ejb3-for-wsdl/EJB3ServiceForWSDL?wsdl"); String encoding = Base64Encoder.encode("user1:password1"); DefaultHttpClient httpclient = new DefaultHttpClient(); HttpGet httpget = new HttpGet(wsdlURL.toString()); httpget.setHeader("Authorization", "Basic " + encoding); HttpResponse response = httpclient.execute(httpget); String text = Utils.getContent(response); Assert.assertTrue("Response doesn't contain wsdl file", text.contains("wsdl:binding")); } @Test public void accessWSDLWithValidUsernameAndPassordButInvalidRole() throws Exception { URL wsdlURL = new URL(baseUrl, "/jaxws-authentication-ejb3-for-wsdl/EJB3ServiceForWSDL?wsdl"); String encoding = Base64Encoder.encode("user2:password2"); DefaultHttpClient httpclient = new DefaultHttpClient(); HttpGet httpget = new HttpGet(wsdlURL.toString()); httpget.setHeader("Authorization", "Basic " + encoding); HttpResponse response = httpclient.execute(httpget); Assert.assertEquals(403, response.getStatusLine().getStatusCode()); Utils.getContent(response); //Assert.assertTrue("Response doesn't contain access denied message", text.contains("Access to the requested resource has been denied")); } @Test public void accessWSDLWithInvalidUsernameAndPassord() throws Exception { URL wsdlURL = new URL(baseUrl, "/jaxws-authentication-ejb3-for-wsdl/EJB3ServiceForWSDL?wsdl"); String encoding = Base64Encoder.encode("user1:password-XZY"); DefaultHttpClient httpclient = new DefaultHttpClient(); HttpGet httpget = new HttpGet(wsdlURL.toString()); httpget.setHeader("Authorization", "Basic " + encoding); HttpResponse response = httpclient.execute(httpget); Assert.assertEquals(401, response.getStatusLine().getStatusCode()); Utils.getContent(response); //Assert.assertTrue("Response doesn't contain expected message.", text.contains("This request requires HTTP authentication")); } @Test public void accessWSDLWithoutUsernameAndPassord() throws Exception { URL wsdlURL = new URL(baseUrl, "/jaxws-authentication-ejb3-for-wsdl/EJB3ServiceForWSDL?wsdl"); DefaultHttpClient httpclient = new DefaultHttpClient(); HttpGet httpget = new HttpGet(wsdlURL.toString()); HttpResponse response = httpclient.execute(httpget); Assert.assertEquals(401, response.getStatusLine().getStatusCode()); Utils.getContent(response); //Assert.assertTrue("Response doesn't contain expected message.", text.contains("This request requires HTTP authentication")); } }
testsuite/integration/basic/src/test/java/org/jboss/as/test/integration/ws/authentication/EJBEndpointSecuredWSDLAccessTestCase.java
/* * JBoss, Home of Professional Open Source. * Copyright 2012, Red Hat Middleware LLC, and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.test.integration.ws.authentication; import java.net.URL; import javax.xml.namespace.QName; import javax.xml.ws.Service; import javax.xml.ws.WebServiceException; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.container.test.api.RunAsClient; import org.jboss.arquillian.junit.Arquillian; import org.jboss.arquillian.test.api.ArquillianResource; import org.jboss.as.arquillian.api.ServerSetup; import org.jboss.as.test.integration.ejb.security.EjbSecurityDomainSetup; import org.jboss.as.test.integration.security.common.Utils; import org.jboss.security.Base64Encoder; import org.jboss.shrinkwrap.api.Archive; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.junit.runner.RunWith; import org.junit.Assert; import org.junit.Test; /** * Tests for secured access to WSDL for EJB endpoint * * @author Rostislav Svoboda */ @ServerSetup({EjbSecurityDomainSetup.class}) @RunWith(Arquillian.class) @RunAsClient public class EJBEndpointSecuredWSDLAccessTestCase { @ArquillianResource URL baseUrl; @Deployment(testable = false) public static Archive<?> deployment() { JavaArchive jar = ShrinkWrap.create(JavaArchive.class, "jaxws-authentication-ejb3-for-wsdl.jar") .addAsResource(EJBEndpointSecuredWSDLAccessTestCase.class.getPackage(), "users.properties", "users.properties") .addAsResource(EJBEndpointSecuredWSDLAccessTestCase.class.getPackage(), "roles.properties", "roles.properties") .addClasses(EJBEndpointIface.class, EJBEndpointSecuredWSDLAccess.class); return jar; } @Test public void createService() throws Exception { QName serviceName = new QName("http://jbossws.org/authenticationForWSDL", "EJB3ServiceForWSDL"); URL wsdlURL = new URL(baseUrl, "/jaxws-authentication-ejb3-for-wsdl/EJB3ServiceForWSDL?wsdl"); try { Service service = Service.create(wsdlURL, serviceName); EJBEndpointIface proxy = service.getPort(EJBEndpointIface.class); Assert.fail("Proxy shouldn't be created because WSDL access should be secured"); } catch (WebServiceException e) { // failure is expected } } @Test public void accessWSDLWithValidUsernameAndPassord() throws Exception { URL wsdlURL = new URL(baseUrl, "/jaxws-authentication-ejb3-for-wsdl/EJB3ServiceForWSDL?wsdl"); String encoding = Base64Encoder.encode("user1:password1"); DefaultHttpClient httpclient = new DefaultHttpClient(); HttpGet httpget = new HttpGet(wsdlURL.toString()); httpget.setHeader("Authorization", "Basic " + encoding); HttpResponse response = httpclient.execute(httpget); String text = Utils.getContent(response); Assert.assertTrue("Response doesn't contain wsdl file", text.contains("wsdl:binding")); } @Test public void accessWSDLWithValidUsernameAndPassordButInvalidRole() throws Exception { URL wsdlURL = new URL(baseUrl, "/jaxws-authentication-ejb3-for-wsdl/EJB3ServiceForWSDL?wsdl"); String encoding = Base64Encoder.encode("user2:password2"); DefaultHttpClient httpclient = new DefaultHttpClient(); HttpGet httpget = new HttpGet(wsdlURL.toString()); httpget.setHeader("Authorization", "Basic " + encoding); HttpResponse response = httpclient.execute(httpget); String text = Utils.getContent(response); Assert.assertTrue("Response doesn't contain access denied message", text.contains("Access to the requested resource has been denied")); } @Test public void accessWSDLWithInvalidUsernameAndPassord() throws Exception { URL wsdlURL = new URL(baseUrl, "/jaxws-authentication-ejb3-for-wsdl/EJB3ServiceForWSDL?wsdl"); String encoding = Base64Encoder.encode("user1:password-XZY"); DefaultHttpClient httpclient = new DefaultHttpClient(); HttpGet httpget = new HttpGet(wsdlURL.toString()); httpget.setHeader("Authorization", "Basic " + encoding); HttpResponse response = httpclient.execute(httpget); String text = Utils.getContent(response); Assert.assertTrue("Response doesn't contain expected message.", text.contains("This request requires HTTP authentication")); } @Test public void accessWSDLWithoutUsernameAndPassord() throws Exception { URL wsdlURL = new URL(baseUrl, "/jaxws-authentication-ejb3-for-wsdl/EJB3ServiceForWSDL?wsdl"); DefaultHttpClient httpclient = new DefaultHttpClient(); HttpGet httpget = new HttpGet(wsdlURL.toString()); HttpResponse response = httpclient.execute(httpget); String text = Utils.getContent(response); Assert.assertTrue("Response doesn't contain expected message.", text.contains("This request requires HTTP authentication")); } }
Change test to not rely on text in the error pages
testsuite/integration/basic/src/test/java/org/jboss/as/test/integration/ws/authentication/EJBEndpointSecuredWSDLAccessTestCase.java
Change test to not rely on text in the error pages
Java
apache-2.0
55bde5cfff9e0bc17b8651a30075fdb3616eab48
0
thelastpickle/metrics,rexren/metrics,fcrepo4-archive/metrics,mveitas/metrics,signalfx/metrics,mtakaki/metrics,slachiewicz/metrics,mnuessler/metrics,egymgmbh/metrics,ohr/metrics,AltitudeDigital/metrics,gburton1/metrics,mt0803/metrics,slovdahl/metrics,wfxiang08/metrics,dropwizard/metrics,dropwizard/metrics,ind9/metrics,infusionsoft/yammer-metrics,jplock/metrics,randomstatistic/metrics,kevintvh/metrics,Banno/metrics,cirrus-dev/metrics,chenxianghua2014/metrics,gburton1/metrics,mspiegel/metrics,tempredirect/metrics,bentatham/metrics,jasw/metrics,valery1707/dropwizard-metrics,unitsofmeasurement/metrics,dropwizard/metrics,ChetnaChaudhari/metrics,mveitas/metrics,fcrepo4-archive/metrics,mattnelson/metrics,timezra/metrics,infusionsoft/yammer-metrics
package com.yammer.metrics.jersey; import com.sun.jersey.api.core.HttpContext; import com.sun.jersey.api.model.AbstractResourceMethod; import com.sun.jersey.spi.container.ResourceMethodDispatchProvider; import com.sun.jersey.spi.dispatch.RequestDispatcher; import com.yammer.metrics.Meter; import com.yammer.metrics.MetricRegistry; import com.yammer.metrics.Timer; import com.yammer.metrics.annotation.ExceptionMetered; import com.yammer.metrics.annotation.Metered; import com.yammer.metrics.annotation.Timed; import static com.yammer.metrics.MetricRegistry.name; class InstrumentedResourceMethodDispatchProvider implements ResourceMethodDispatchProvider { private static class TimedRequestDispatcher implements RequestDispatcher { private final RequestDispatcher underlying; private final Timer timer; private TimedRequestDispatcher(RequestDispatcher underlying, Timer timer) { this.underlying = underlying; this.timer = timer; } @Override public void dispatch(Object resource, HttpContext httpContext) { final Timer.Context context = timer.time(); try { underlying.dispatch(resource, httpContext); } finally { context.stop(); } } } private static class MeteredRequestDispatcher implements RequestDispatcher { private final RequestDispatcher underlying; private final Meter meter; private MeteredRequestDispatcher(RequestDispatcher underlying, Meter meter) { this.underlying = underlying; this.meter = meter; } @Override public void dispatch(Object resource, HttpContext httpContext) { meter.mark(); underlying.dispatch(resource, httpContext); } } private static class ExceptionMeteredRequestDispatcher implements RequestDispatcher { private final RequestDispatcher underlying; private final Meter meter; private final Class<? extends Throwable> exceptionClass; private ExceptionMeteredRequestDispatcher(RequestDispatcher underlying, Meter meter, Class<? extends Throwable> exceptionClass) { this.underlying = underlying; this.meter = meter; this.exceptionClass = exceptionClass; } @Override public void dispatch(Object resource, HttpContext httpContext) { try { underlying.dispatch(resource, httpContext); } catch (Error e) { throw e; } catch (Throwable e) { if (exceptionClass.isAssignableFrom(e.getClass()) || (e.getCause() != null && exceptionClass.isAssignableFrom(e.getCause().getClass()))) { meter.mark(); } InstrumentedResourceMethodDispatchProvider.<RuntimeException>throwUnchecked(e); } } } /* * A dirty hack to allow us to throw exceptions of any type without bringing down the unsafe * thunder. */ @SuppressWarnings("unchecked") private static <T extends Exception> void throwUnchecked(Throwable e) throws T { throw (T) e; } private final ResourceMethodDispatchProvider provider; private final MetricRegistry registry; public InstrumentedResourceMethodDispatchProvider(ResourceMethodDispatchProvider provider, MetricRegistry registry) { this.provider = provider; this.registry = registry; } @Override public RequestDispatcher create(AbstractResourceMethod method) { RequestDispatcher dispatcher = provider.create(method); if (dispatcher == null) { return null; } if (method.getMethod().isAnnotationPresent(Timed.class)) { final Timed annotation = method.getMethod().getAnnotation(Timed.class); final String name = chooseName(annotation.name(), annotation.absolute(), method); final Timer timer = registry.timer(name); dispatcher = new TimedRequestDispatcher(dispatcher, timer); } if (method.getMethod().isAnnotationPresent(Metered.class)) { final Metered annotation = method.getMethod().getAnnotation(Metered.class); final String name = chooseName(annotation.name(), annotation.absolute(), method); final Meter meter = registry.meter(name); dispatcher = new MeteredRequestDispatcher(dispatcher, meter); } if (method.getMethod().isAnnotationPresent(ExceptionMetered.class)) { final ExceptionMetered annotation = method.getMethod() .getAnnotation(ExceptionMetered.class); final String name = chooseName(annotation.name(), annotation.absolute(), method, ExceptionMetered.DEFAULT_NAME_SUFFIX); final Meter meter = registry.meter(name); dispatcher = new ExceptionMeteredRequestDispatcher(dispatcher, meter, annotation.cause()); } return dispatcher; } private String chooseName(String explicitName, boolean absolute, AbstractResourceMethod method, String... suffixes) { if (explicitName != null && !explicitName.isEmpty()) { if (absolute) { return explicitName; } return name(method.getDeclaringResource().getResourceClass(), explicitName); } return name(name(method.getDeclaringResource().getResourceClass(), method.getMethod().getName()), suffixes); } }
metrics-jersey/src/main/java/com/yammer/metrics/jersey/InstrumentedResourceMethodDispatchProvider.java
package com.yammer.metrics.jersey; import com.sun.jersey.api.core.HttpContext; import com.sun.jersey.api.model.AbstractResourceMethod; import com.sun.jersey.spi.container.ResourceMethodDispatchProvider; import com.sun.jersey.spi.dispatch.RequestDispatcher; import com.yammer.metrics.Meter; import com.yammer.metrics.MetricRegistry; import com.yammer.metrics.Timer; import com.yammer.metrics.annotation.ExceptionMetered; import com.yammer.metrics.annotation.Metered; import com.yammer.metrics.annotation.Timed; import static com.yammer.metrics.MetricRegistry.name; class InstrumentedResourceMethodDispatchProvider implements ResourceMethodDispatchProvider { private static class TimedRequestDispatcher implements RequestDispatcher { private final RequestDispatcher underlying; private final Timer timer; private TimedRequestDispatcher(RequestDispatcher underlying, Timer timer) { this.underlying = underlying; this.timer = timer; } @Override public void dispatch(Object resource, HttpContext httpContext) { final Timer.Context context = timer.time(); try { underlying.dispatch(resource, httpContext); } finally { context.stop(); } } } private static class MeteredRequestDispatcher implements RequestDispatcher { private final RequestDispatcher underlying; private final Meter meter; private MeteredRequestDispatcher(RequestDispatcher underlying, Meter meter) { this.underlying = underlying; this.meter = meter; } @Override public void dispatch(Object resource, HttpContext httpContext) { meter.mark(); underlying.dispatch(resource, httpContext); } } private static class ExceptionMeteredRequestDispatcher implements RequestDispatcher { private final RequestDispatcher underlying; private final Meter meter; private final Class<? extends Throwable> exceptionClass; private ExceptionMeteredRequestDispatcher(RequestDispatcher underlying, Meter meter, Class<? extends Throwable> exceptionClass) { this.underlying = underlying; this.meter = meter; this.exceptionClass = exceptionClass; } @Override public void dispatch(Object resource, HttpContext httpContext) { try { underlying.dispatch(resource, httpContext); } catch (Throwable e) { if (exceptionClass.isAssignableFrom(e.getClass()) || (e.getCause() != null && exceptionClass.isAssignableFrom(e.getCause().getClass()))) { meter.mark(); } InstrumentedResourceMethodDispatchProvider.<RuntimeException>throwUnchecked(e); } } } /* * A dirty hack to allow us to throw exceptions of any type without bringing down the unsafe * thunder. */ @SuppressWarnings("unchecked") private static <T extends Exception> void throwUnchecked(Throwable e) throws T { throw (T) e; } private final ResourceMethodDispatchProvider provider; private final MetricRegistry registry; public InstrumentedResourceMethodDispatchProvider(ResourceMethodDispatchProvider provider, MetricRegistry registry) { this.provider = provider; this.registry = registry; } @Override public RequestDispatcher create(AbstractResourceMethod method) { RequestDispatcher dispatcher = provider.create(method); if (dispatcher == null) { return null; } if (method.getMethod().isAnnotationPresent(Timed.class)) { final Timed annotation = method.getMethod().getAnnotation(Timed.class); final String name = chooseName(annotation.name(), annotation.absolute(), method); final Timer timer = registry.timer(name); dispatcher = new TimedRequestDispatcher(dispatcher, timer); } if (method.getMethod().isAnnotationPresent(Metered.class)) { final Metered annotation = method.getMethod().getAnnotation(Metered.class); final String name = chooseName(annotation.name(), annotation.absolute(), method); final Meter meter = registry.meter(name); dispatcher = new MeteredRequestDispatcher(dispatcher, meter); } if (method.getMethod().isAnnotationPresent(ExceptionMetered.class)) { final ExceptionMetered annotation = method.getMethod() .getAnnotation(ExceptionMetered.class); final String name = chooseName(annotation.name(), annotation.absolute(), method, ExceptionMetered.DEFAULT_NAME_SUFFIX); final Meter meter = registry.meter(name); dispatcher = new ExceptionMeteredRequestDispatcher(dispatcher, meter, annotation.cause()); } return dispatcher; } private String chooseName(String explicitName, boolean absolute, AbstractResourceMethod method, String... suffixes) { if (explicitName != null && !explicitName.isEmpty()) { if (absolute) { return explicitName; } return name(method.getDeclaringResource().getResourceClass(), explicitName); } return name(name(method.getDeclaringResource().getResourceClass(), method.getMethod().getName()), suffixes); } }
Make sure InstrumentedResourceMethodDispatchProvider doesn't suppress errors.
metrics-jersey/src/main/java/com/yammer/metrics/jersey/InstrumentedResourceMethodDispatchProvider.java
Make sure InstrumentedResourceMethodDispatchProvider doesn't suppress errors.
Java
apache-2.0
c9975513de3f131cb1a44de4eb5988f684c223e0
0
TommyLemon/APIJSON,TommyLemon/APIJSON,TommyLemon/APIJSON,TommyLemon/APIJSON,TommyLemon/APIJSON,TommyLemon/APIJSON
/*Copyright ©2016 TommyLemon(https://github.com/TommyLemon/APIJSON) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.*/ package zuo.biao.apijson.server; import static zuo.biao.apijson.JSONObject.KEY_COLUMN; import static zuo.biao.apijson.JSONObject.KEY_COMBINE; import static zuo.biao.apijson.JSONObject.KEY_DATABASE; import static zuo.biao.apijson.JSONObject.KEY_EXPLAIN; import static zuo.biao.apijson.JSONObject.KEY_CACHE; import static zuo.biao.apijson.JSONObject.KEY_FROM; import static zuo.biao.apijson.JSONObject.KEY_GROUP; import static zuo.biao.apijson.JSONObject.KEY_HAVING; import static zuo.biao.apijson.JSONObject.KEY_ID; import static zuo.biao.apijson.JSONObject.KEY_ORDER; import static zuo.biao.apijson.JSONObject.KEY_ROLE; import static zuo.biao.apijson.JSONObject.KEY_SCHEMA; import static zuo.biao.apijson.JSONObject.KEY_USER_ID; import static zuo.biao.apijson.RequestMethod.DELETE; import static zuo.biao.apijson.RequestMethod.GET; import static zuo.biao.apijson.RequestMethod.GETS; import static zuo.biao.apijson.RequestMethod.HEADS; import static zuo.biao.apijson.RequestMethod.POST; import static zuo.biao.apijson.RequestMethod.PUT; import static zuo.biao.apijson.SQL.AND; import static zuo.biao.apijson.SQL.NOT; import static zuo.biao.apijson.SQL.OR; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.regex.Pattern; import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.annotation.JSONField; import zuo.biao.apijson.JSON; import zuo.biao.apijson.Log; import zuo.biao.apijson.NotNull; import zuo.biao.apijson.RequestMethod; import zuo.biao.apijson.RequestRole; import zuo.biao.apijson.SQL; import zuo.biao.apijson.StringUtil; import zuo.biao.apijson.server.exception.NotExistException; import zuo.biao.apijson.server.model.Column; import zuo.biao.apijson.server.model.PgAttribute; import zuo.biao.apijson.server.model.PgClass; import zuo.biao.apijson.server.model.Table; /**config sql for JSON Request * @author Lemon */ public abstract class AbstractSQLConfig implements SQLConfig { private static final String TAG = "AbstractSQLConfig"; public static String DEFAULT_SCHEMA = "sys"; public static String PREFFIX_DISTINCT = "DISTINCT "; /** * 表名映射,隐藏真实表名,对安全要求很高的表可以这么做 */ public static final Map<String, String> TABLE_KEY_MAP; static { TABLE_KEY_MAP = new HashMap<String, String>(); TABLE_KEY_MAP.put(Table.class.getSimpleName(), Table.TABLE_NAME); TABLE_KEY_MAP.put(Column.class.getSimpleName(), Column.TABLE_NAME); TABLE_KEY_MAP.put(PgAttribute.class.getSimpleName(), PgAttribute.TABLE_NAME); TABLE_KEY_MAP.put(PgClass.class.getSimpleName(), PgClass.TABLE_NAME); } @NotNull @Override public String getIdKey() { return KEY_ID; } @NotNull @Override public String getUserIdKey() { return KEY_USER_ID; } private Object id; //Table的id private RequestMethod method; //操作方法 private boolean prepared = true; //预编译 private boolean main = true; /** * TODO 被关联的表通过就忽略关联的表?(这个不行 User:{"sex@":"/Comment/toId"}) */ private RequestRole role; //发送请求的用户的角色 private String database; //表所在的数据库类型 private String schema; //表所在的数据库名 private String table; //表名 private String alias; //表别名 private String group; //分组方式的字符串数组,','分隔 private String having; //聚合函数的字符串数组,','分隔 private String order; //排序方式的字符串数组,','分隔 private Subquery from; //子查询临时表 private List<String> column; //表内字段名(或函数名,仅查询操作可用)的字符串数组,','分隔 private List<List<Object>> values; //对应表内字段的值的字符串数组,','分隔 private Map<String, Object> content; //Request内容,key:value形式,column = content.keySet(),values = content.values() private Map<String, Object> where; //筛选条件,key:value形式 private Map<String, List<String>> combine; //条件组合,{ "&":[key], "|":[key], "!":[key] } //array item <<<<<<<<<< private int count; //Table数量 private int page; //Table所在页码 private int position; //Table在[]中的位置 private int query; //JSONRequest.query private int type; //ObjectParser.type private int cache; private boolean explain; private List<Join> joinList; //连表 配置列表 //array item >>>>>>>>>> private boolean test; //测试 private String procedure; public SQLConfig setProcedure(String procedure) { this.procedure = procedure; return this; } public String getProcedure() { return procedure; } public AbstractSQLConfig(RequestMethod method) { setMethod(method); } public AbstractSQLConfig(RequestMethod method, String table) { this(method); setTable(table); } public AbstractSQLConfig(RequestMethod method, int count, int page) { this(method); setCount(count); setPage(page); } @NotNull @Override public RequestMethod getMethod() { if (method == null) { method = GET; } return method; } @Override public AbstractSQLConfig setMethod(RequestMethod method) { this.method = method; return this; } @Override public boolean isPrepared() { return prepared; } @Override public AbstractSQLConfig setPrepared(boolean prepared) { this.prepared = prepared; return this; } @Override public boolean isMain() { return main; } @Override public AbstractSQLConfig setMain(boolean main) { this.main = main; return this; } @Override public Object getId() { return id; } @Override public AbstractSQLConfig setId(Object id) { this.id = id; return this; } @Override public RequestRole getRole() { //不能 @NotNull , AbstractParser#getSQLObject 内当getRole() == null时填充默认值 return role; } public AbstractSQLConfig setRole(String roleName) throws Exception { return setRole(RequestRole.get(roleName)); } @Override public AbstractSQLConfig setRole(RequestRole role) { this.role = role; return this; } @Override public String getDatabase() { return database; } @Override public SQLConfig setDatabase(String database) { this.database = database; return this; } @Override public String getQuote() { return DATABASE_POSTGRESQL.equals(getDatabase()) ? "\"" : "`"; } @Override public String getSchema() { return schema; } public String getSQLSchema(String sqlTable) { //强制,避免因为全局默认的 @schema 自动填充进来,导致这几个类的 schema 为 sys 等其它值 if ((Table.TABLE_NAME.equals(sqlTable) || Column.TABLE_NAME.equals(sqlTable)) ) { return SCHEMA_INFORMATION; } if ((PgAttribute.TABLE_NAME.equals(sqlTable) || PgClass.TABLE_NAME.equals(sqlTable)) ) { return ""; } String sch = getSchema(); if (sch == null) { //PostgreSQL 的 pg_class 和 pg_attribute 表好像不属于任何 Schema StringUtil.isEmpty(sch, true)) { sch = DEFAULT_SCHEMA; } return sch; } @Override public AbstractSQLConfig setSchema(String schema) { if (schema != null) { String quote = getQuote(); String s = schema.startsWith(quote) && schema.endsWith(quote) ? schema.substring(1, schema.length() - 1) : schema; if (StringUtil.isEmpty(s, true) == false && StringUtil.isName(s) == false) { throw new IllegalArgumentException("@schema:value 中value必须是1个单词!"); } } this.schema = schema; return this; } /**请求传进来的Table名 * @return * @see {@link #getSQLTable()} */ @Override public String getTable() { return table; } /**数据库里的真实Table名 * 通过 {@link #TABLE_KEY_MAP} 映射 * @return */ @JSONField(serialize = false) @Override public String getSQLTable() { // String t = TABLE_KEY_MAP.containsKey(table) ? TABLE_KEY_MAP.get(table) : table; //如果要强制小写,则可在子类重写这个方法再 toLowerCase return DATABASE_POSTGRESQL.equals(getDatabase()) ? t.toLowerCase() : t; return TABLE_KEY_MAP.containsKey(table) ? TABLE_KEY_MAP.get(table) : table; } @JSONField(serialize = false) @Override public String getTablePath() { String q = getQuote(); String sqlTable = getSQLTable(); String sch = getSQLSchema(sqlTable); return (StringUtil.isEmpty(sch, true) ? "" : q + sch + q + ".") + q + sqlTable + q + ( isKeyPrefix() ? " AS " + getAlias() : ""); } @Override public AbstractSQLConfig setTable(String table) { //Table已经在Parser中校验,所以这里不用防SQL注入 this.table = table; return this; } @Override public String getAlias() { if (StringUtil.isEmpty(alias, true)) { alias = getTable(); } String q = getQuote(); //getTable 不能小写,因为Verifier用大小写敏感的名称判断权限 //如果要强制小写,则可在子类重写这个方法再 toLowerCase return q + (DATABASE_POSTGRESQL.equals(getDatabase()) ? alias.toLowerCase() : alias) + q; return q + alias + q; } @Override public AbstractSQLConfig setAlias(String alias) { this.alias = alias; return this; } @Override public String getGroup() { return group; } public AbstractSQLConfig setGroup(String... keys) { return setGroup(StringUtil.getString(keys)); } @Override public AbstractSQLConfig setGroup(String group) { this.group = group; return this; } @JSONField(serialize = false) public String getGroupString(boolean hasPrefix) { //加上子表的 group String joinGroup = ""; if (joinList != null) { SQLConfig cfg; String c; boolean first = true; for (Join j : joinList) { if (j.isAppJoin()) { continue; } cfg = j.isLeftOrRightJoin() ? j.getOutterConfig() : j.getJoinConfig(); cfg.setAlias(cfg.getTable()); c = ((AbstractSQLConfig) cfg).getGroupString(false); if (StringUtil.isEmpty(c, true) == false) { joinGroup += (first ? "" : ", ") + c; first = false; } } } group = StringUtil.getTrimedString(group); String[] keys = StringUtil.split(group); if (keys == null || keys.length <= 0) { return StringUtil.isEmpty(joinGroup, true) ? "" : (hasPrefix ? " GROUP BY " : "") + joinGroup; } for (int i = 0; i < keys.length; i++) { if (isPrepared()) { //不能通过 ? 来代替,因为SQLExecutor statement.setString后 GROUP BY 'userId' 有单引号,只能返回一条数据,必须去掉单引号才行! if (StringUtil.isName(keys[i]) == false) { throw new IllegalArgumentException("@group:value 中 value里面用 , 分割的每一项都必须是1个单词!并且不要有空格!"); } } keys[i] = getKey(keys[i]); } return (hasPrefix ? " GROUP BY " : "") + StringUtil.concat(StringUtil.getString(keys), joinGroup, ", "); } @Override public String getHaving() { return having; } public AbstractSQLConfig setHaving(String... conditions) { return setHaving(StringUtil.getString(conditions)); } @Override public AbstractSQLConfig setHaving(String having) { this.having = having; return this; } /** * @return HAVING conditoin0 AND condition1 OR condition2 ... */ @JSONField(serialize = false) public String getHavingString(boolean hasPrefix) { //加上子表的 having String joinHaving = ""; if (joinList != null) { SQLConfig cfg; String c; boolean first = true; for (Join j : joinList) { if (j.isAppJoin()) { continue; } cfg = j.isLeftOrRightJoin() ? j.getOutterConfig() : j.getJoinConfig(); cfg.setAlias(cfg.getTable()); c = ((AbstractSQLConfig) cfg).getHavingString(false); if (StringUtil.isEmpty(c, true) == false) { joinHaving += (first ? "" : ", ") + c; first = false; } } } having = StringUtil.getTrimedString(having); String[] keys = StringUtil.split(having, ";"); if (keys == null || keys.length <= 0) { return StringUtil.isEmpty(joinHaving, true) ? "" : (hasPrefix ? " HAVING " : "") + joinHaving; } String expression; String method; //暂时不允许 String prefix; String suffix; //fun0(arg0,arg1,...);fun1(arg0,arg1,...) for (int i = 0; i < keys.length; i++) { //fun(arg0,arg1,...) expression = keys[i]; int start = expression.indexOf("("); if (start < 0) { if (isPrepared() && PATTERN_HAVING.matcher(expression).matches() == false) { throw new UnsupportedOperationException("字符串 " + expression + " 不合法!" + "预编译模式下 @having:\"column?value;function(arg0,arg1,...)?value...\"" + " 中 column?value 必须符合正则表达式 ^[A-Za-z0-9%!=<>]+$ !不允许空格!"); } continue; } int end = expression.indexOf(")"); if (start >= end) { throw new IllegalArgumentException("字符 " + expression + " 不合法!" + "@having:value 中 value 里的 SQL函数必须为 function(arg0,arg1,...) 这种格式!"); } method = expression.substring(0, start); if (StringUtil.isName(method) == false) { throw new IllegalArgumentException("字符 " + method + " 不合法!" + "预编译模式下 @having:\"column?value;function(arg0,arg1,...)?value...\"" + " 中SQL函数名 function 必须符合正则表达式 ^[0-9a-zA-Z_]+$ !"); } suffix = expression.substring(end + 1, expression.length()); if (isPrepared() && PATTERN_HAVING_SUFFIX.matcher((String) suffix).matches() == false) { throw new UnsupportedOperationException("字符串 " + suffix + " 不合法!" + "预编译模式下 @having:\"column?value;function(arg0,arg1,...)?value...\"" + " 中 ?value 必须符合正则表达式 ^[0-9%!=<>]+$ !不允许空格!"); } String[] ckeys = StringUtil.split(expression.substring(start + 1, end)); if (ckeys != null) { for (int j = 0; j < ckeys.length; j++) { if (isPrepared() && (StringUtil.isName(ckeys[j]) == false || ckeys[j].startsWith("_"))) { throw new IllegalArgumentException("字符 " + ckeys[j] + " 不合法!" + "预编译模式下 @having:\"column?value;function(arg0,arg1,...)?value...\"" + " 中所有 arg 都必须是1个不以 _ 开头的单词!并且不要有空格!"); } ckeys[j] = getKey(ckeys[j]); } } keys[i] = method + "(" + StringUtil.getString(ckeys) + ")" + suffix; } //TODO 支持 OR, NOT 参考 @combine:"&key0,|key1,!key2" return (hasPrefix ? " HAVING " : "") + StringUtil.concat(StringUtil.getString(keys, AND), joinHaving, AND); } @Override public String getOrder() { return order; } public AbstractSQLConfig setOrder(String... conditions) { return setOrder(StringUtil.getString(conditions)); } @Override public AbstractSQLConfig setOrder(String order) { this.order = order; return this; } @JSONField(serialize = false) public String getOrderString(boolean hasPrefix) { //加上子表的 order String joinOrder = ""; if (joinList != null) { SQLConfig cfg; String c; boolean first = true; for (Join j : joinList) { if (j.isAppJoin()) { continue; } cfg = j.isLeftOrRightJoin() ? j.getOutterConfig() : j.getJoinConfig(); cfg.setAlias(cfg.getTable()); c = ((AbstractSQLConfig) cfg).getOrderString(false); if (StringUtil.isEmpty(c, true) == false) { joinOrder += (first ? "" : ", ") + c; first = false; } } } order = StringUtil.getTrimedString(order); if (order.contains("+")) {//replace没有包含的replacement会崩溃 order = order.replaceAll("\\+", " ASC "); } if (order.contains("-")) { order = order.replaceAll("-", " DESC "); } String[] keys = StringUtil.split(order); if (keys == null || keys.length <= 0) { return StringUtil.isEmpty(joinOrder, true) ? "" : (hasPrefix ? " ORDER BY " : "") + joinOrder; } String origin; String sort; int index; for (int i = 0; i < keys.length; i++) { index = keys[i].trim().endsWith(" ASC") ? keys[i].lastIndexOf(" ASC") : -1; //StringUtil.split返回数组中,子项不会有null if (index < 0) { index = keys[i].trim().endsWith(" DESC") ? keys[i].lastIndexOf(" DESC") : -1; sort = index <= 0 ? "" : " DESC "; } else { sort = " ASC "; } origin = index < 0 ? keys[i] : keys[i].substring(0, index); if (isPrepared()) { //不能通过 ? 来代替,SELECT 'id','name' 返回的就是 id:"id", name:"name",而不是数据库里的值! //这里既不对origin trim,也不对 ASC/DESC ignoreCase,希望前端严格传没有任何空格的字符串过来,减少传输数据量,节约服务器性能 if (StringUtil.isName(origin) == false) { throw new IllegalArgumentException("预编译模式下 @order:value 中 value里面用 , 分割的每一项" + " column+ / column- 中 column必须是1个单词!并且不要有多余的空格!"); } } keys[i] = getKey(origin) + sort; } return (hasPrefix ? " ORDER BY " : "") + StringUtil.concat(StringUtil.getString(keys), joinOrder, ", "); } @Override public Subquery getFrom() { return from; } @Override public AbstractSQLConfig setFrom(Subquery from) { this.from = from; return this; } @Override public List<String> getColumn() { return column; } @Override public AbstractSQLConfig setColumn(List<String> column) { this.column = column; return this; } @JSONField(serialize = false) public String getColumnString() throws Exception { return getColumnString(false); } @JSONField(serialize = false) public String getColumnString(boolean inSQLJoin) throws Exception { switch (getMethod()) { case HEAD: case HEADS: //StringUtil.isEmpty(column, true) || column.contains(",") 时SQL.count(column)会return "*" if (isPrepared() && column != null) { String origin; String alias; int index; for (String c : column) { index = c.lastIndexOf(":"); //StringUtil.split返回数组中,子项不会有null origin = index < 0 ? c : c.substring(0, index); alias = index < 0 ? null : c.substring(index + 1); if (StringUtil.isName(origin) == false || (alias != null && StringUtil.isName(alias) == false)) { throw new IllegalArgumentException("HEAD请求: 预编译模式下 @column:value 中 value里面用 , 分割的每一项" + " column:alias 中 column 必须是1个单词!如果有alias,则alias也必须为1个单词!并且不要有多余的空格!"); } } } return SQL.count(column != null && column.size() == 1 ? getKey(Pair.parseEntry(column.get(0), true).getKey()) : "*"); case POST: if (column == null || column.isEmpty()) { throw new IllegalArgumentException("POST 请求必须在Table内设置要保存的 key:value !"); } String s = ""; boolean pfirst = true; for (String c : column) { if (isPrepared() && StringUtil.isName(c) == false) { //不能通过 ? 来代替,SELECT 'id','name' 返回的就是 id:"id", name:"name",而不是数据库里的值! throw new IllegalArgumentException("POST请求: 每一个 key:value 中的key都必须是1个单词!"); } s += ((pfirst ? "" : ",") + getKey(c)); pfirst = false; } return "(" + s + ")"; case GET: case GETS: boolean isQuery = RequestMethod.isQueryMethod(method); //TODO 这个有啥用?上面应是 getMethod 的值 GET 和 GETS 了。 String joinColumn = ""; if (isQuery && joinList != null) { SQLConfig ecfg; SQLConfig cfg; String c; boolean first = true; for (Join j : joinList) { if (j.isAppJoin()) { continue; } ecfg = j.getOutterConfig(); if (ecfg != null && ecfg.getColumn() != null) { //优先级更高 cfg = ecfg; } else { cfg = j.getJoinConfig(); } cfg.setAlias(cfg.getTable()); c = ((AbstractSQLConfig) cfg).getColumnString(true); if (StringUtil.isEmpty(c, true) == false) { joinColumn += (first ? "" : ", ") + c; first = false; } inSQLJoin = true; } } String tableAlias = getAlias(); // String c = StringUtil.getString(column); //id,name;json_length(contactIdList):contactCount;... String[] keys = column == null ? null : column.toArray(new String[]{}); //StringUtil.split(c, ";"); if (keys == null || keys.length <= 0) { boolean noColumn = column != null && inSQLJoin; String mc = isKeyPrefix() == false ? (noColumn ? "" : "*") : (noColumn ? "" : tableAlias + ".*"); return StringUtil.concat(mc, joinColumn, ", ", true); } String expression; String method = null; //...;fun0(arg0,arg1,...):fun0;fun1(arg0,arg1,...):fun1;... for (int i = 0; i < keys.length; i++) { //fun(arg0,arg1,...) expression = keys[i]; int start = expression.indexOf("("); int end = 0; if (start >= 0) { end = expression.indexOf(")"); if (start >= end) { throw new IllegalArgumentException("字符 " + expression + " 不合法!" + "@having:value 中 value 里的 SQL函数必须为 function(arg0,arg1,...) 这种格式!"); } method = expression.substring(0, start); boolean distinct = method.startsWith(PREFFIX_DISTINCT); if (StringUtil.isName(distinct ? method.substring(PREFFIX_DISTINCT.length()) : method) == false) { throw new IllegalArgumentException("字符 " + method + " 不合法!" + "预编译模式下 @column:\"column0,column1:alias;function0(arg0,arg1,...);function1(...):alias...\"" + " 中SQL函数名 function 必须符合正则表达式 ^[0-9a-zA-Z_]+$ !"); } } boolean isColumn = start < 0; String[] ckeys = StringUtil.split(isColumn ? expression : expression.substring(start + 1, end)); String quote = getQuote(); // if (isPrepared()) { //不能通过 ? 来代替,SELECT 'id','name' 返回的就是 id:"id", name:"name",而不是数据库里的值! if (ckeys != null && ckeys.length > 0) { boolean distinct; String origin; String alias; int index; for (int j = 0; j < ckeys.length; j++) { index = isColumn ? ckeys[j].lastIndexOf(":") : -1; //StringUtil.split返回数组中,子项不会有null origin = index < 0 ? ckeys[j] : ckeys[j].substring(0, index); alias = index < 0 ? null : ckeys[j].substring(index + 1); distinct = origin.startsWith(PREFFIX_DISTINCT); if (distinct) { origin = origin.substring(PREFFIX_DISTINCT.length()); } if (isPrepared()) { if (isColumn) { if (StringUtil.isName(origin) == false || (alias != null && StringUtil.isName(alias) == false)) { throw new IllegalArgumentException("字符 " + ckeys[j] + " 不合法!" + "预编译模式下 @column:value 中 value里面用 , 分割的每一项" + " column:alias 中 column 必须是1个单词!如果有alias,则alias也必须为1个单词!" + "DISTINCT 必须全大写,且后面必须有且只有 1 个空格!其它情况不允许空格!"); } } else { if ((StringUtil.isName(origin) == false || origin.startsWith("_"))) { throw new IllegalArgumentException("字符 " + ckeys[j] + " 不合法!" + "预编译模式下 @column:\"column0,column1:alias;function0(arg0,arg1,...);function1(...):alias...\"" + " 中所有 arg 都必须是1个不以 _ 开头的单词!DISTINCT 必须全大写,且后面必须有且只有 1 个空格!其它情况不允许空格!"); } } } //JOIN 副表不再在外层加副表名前缀 userId AS `Commet.userId`, 而是直接 userId AS `userId` origin = quote + origin + quote; if (isKeyPrefix()) { ckeys[j] = tableAlias + "." + origin; // if (isColumn) { // ckeys[j] += " AS " + quote + (isMain() ? "" : tableAlias + ".") + (StringUtil.isEmpty(alias, true) ? origin : alias) + quote; // } if (isColumn && StringUtil.isEmpty(alias, true) == false) { ckeys[j] += " AS " + quote + alias + quote; } } else { ckeys[j] = origin + (StringUtil.isEmpty(alias, true) ? "" : " AS " + quote + alias + quote); } if (distinct) { ckeys[j] = PREFFIX_DISTINCT + ckeys[j]; } } // } } if (isColumn) { keys[i] = StringUtil.getString(ckeys); } else { String suffix = expression.substring(end + 1, expression.length()); //:contactCount String alias = suffix.startsWith(":") ? suffix.substring(1) : null; //contactCount if (StringUtil.isEmpty(alias, true)) { if (suffix.isEmpty() == false) { throw new IllegalArgumentException("GET请求: 预编译模式下 @column:value 中 value里面用 ; 分割的每一项" + " function(arg0,arg1,...):alias 中 alias 如果有就必须是1个单词!并且不要有多余的空格!"); } } else { if (StringUtil.isEmpty(alias, true) == false && StringUtil.isName(alias) == false) { throw new IllegalArgumentException("GET请求: 预编译模式下 @column:value 中 value里面用 ; 分割的每一项" + " function(arg0,arg1,...):alias 中 alias 必须是1个单词!并且不要有多余的空格!"); } } String origin = method + "(" + StringUtil.getString(ckeys) + ")"; // if (isKeyPrefix()) { // keys[i] = origin + " AS " + quote + (isMain() ? "" : tableAlias + ".") + (StringUtil.isEmpty(alias, true) ? method : alias) + quote; // } // else { keys[i] = origin + (StringUtil.isEmpty(alias, true) ? "" : " AS " + quote + alias + quote); // } } } String c = StringUtil.getString(keys); return (c.contains(":") == false ? c : c.replaceAll(":", " AS ")) + (StringUtil.isEmpty(joinColumn, true) ? "" : ", " + joinColumn);//不能在这里改,后续还要用到: default: throw new UnsupportedOperationException( "服务器内部错误:getColumnString 不支持 " + RequestMethod.getName(getMethod()) + " 等 [GET,GETS,HEAD,HEADS,POST] 外的ReuqestMethod!" ); } } @Override public List<List<Object>> getValues() { return values; } @JSONField(serialize = false) public String getValuesString() { String s = ""; if (values != null && values.size() > 0) { Object[] items = new Object[values.size()]; List<Object> vs; for (int i = 0; i < values.size(); i++) { vs = values.get(i); if (vs == null) { continue; } items[i] = "("; for (int j = 0; j < vs.size(); j++) { items[i] += ((j <= 0 ? "" : ",") + getValue(vs.get(j))); } items[i] += ")"; } s = StringUtil.getString(items); } return s; } @Override public AbstractSQLConfig setValues(List<List<Object>> valuess) { this.values = valuess; return this; } @Override public Map<String, Object> getContent() { return content; } @Override public AbstractSQLConfig setContent(Map<String, Object> content) { this.content = content; return this; } @Override public int getCount() { return count; } @Override public AbstractSQLConfig setCount(int count) { this.count = count; return this; } @Override public int getPage() { return page; } @Override public AbstractSQLConfig setPage(int page) { this.page = page; return this; } @Override public int getPosition() { return position; } @Override public AbstractSQLConfig setPosition(int position) { this.position = position; return this; } @Override public int getQuery() { return query; } @Override public AbstractSQLConfig setQuery(int query) { this.query = query; return this; } @Override public int getType() { return type; } @Override public AbstractSQLConfig setType(int type) { this.type = type; return this; } @Override public int getCache() { return cache; } @Override public AbstractSQLConfig setCache(int cache) { this.cache = cache; return this; } public AbstractSQLConfig setCache(String cache) { int cache2; if (cache == null) { cache2 = JSONRequest.CACHE_ALL; } else { // if (isSubquery) { // throw new IllegalArgumentException("子查询内不支持传 " + JSONRequest.KEY_CACHE + "!"); // } switch (cache) { case "0": case JSONRequest.CACHE_ALL_STRING: cache2 = JSONRequest.CACHE_ALL; break; case "1": case JSONRequest.CACHE_ROM_STRING: cache2 = JSONRequest.CACHE_ROM; break; case "2": case JSONRequest.CACHE_RAM_STRING: cache2 = JSONRequest.CACHE_RAM; break; default: throw new IllegalArgumentException(getTable() + "/" + JSONRequest.KEY_CACHE + ":value 中 value 的值不合法!必须在 [0,1,2] 或 [ALL, ROM, RAM] 内 !"); } } setCache(cache2); return this; } @Override public boolean isExplain() { return explain; } @Override public AbstractSQLConfig setExplain(boolean explain) { this.explain = explain; return this; } @Override public List<Join> getJoinList() { return joinList; } @Override public SQLConfig setJoinList(List<Join> joinList) { this.joinList = joinList; return this; } @Override public boolean hasJoin() { return joinList != null && joinList.isEmpty() == false; } @Override public boolean isTest() { return test; } @Override public AbstractSQLConfig setTest(boolean test) { this.test = test; return this; } /**获取初始位置offset * @return */ @JSONField(serialize = false) public int getOffset() { return getOffset(getPage(), getCount()); } /**获取初始位置offset * @param page * @param count * @return */ public static int getOffset(int page, int count) { return page*count; } /**获取限制数量 * @return */ @JSONField(serialize = false) public String getLimitString() { return getLimitString(getPage(), getCount());// + 1); } /**获取限制数量 * @param limit * @return */ public static String getLimitString(int page, int count) { return count <= 0 ? "" : " LIMIT " + count + " OFFSET " + getOffset(page, count); } //WHERE <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< @Override public Map<String, Object> getWhere() { return where; } @Override public AbstractSQLConfig setWhere(Map<String, Object> where) { this.where = where; return this; } @NotNull @Override public Map<String, List<String>> getCombine() { List<String> andList = combine == null ? null : combine.get("&"); if (andList == null) { andList = where == null ? new ArrayList<String>() : new ArrayList<String>(where.keySet()); if (combine == null) { combine = new HashMap<>(); } combine.put("&", andList); } return combine; } @Override public AbstractSQLConfig setCombine(Map<String, List<String>> combine) { this.combine = combine; return this; } /** * noFunctionChar = false * @param key * @return */ @JSONField(serialize = false) @Override public Object getWhere(String key) { return getWhere(key, false); } /** * @param key * @param exactMatch * @return */ @JSONField(serialize = false) @Override public Object getWhere(String key, boolean exactMatch) { if (exactMatch) { return where == null ? null : where.get(key); } Set<String> set = key == null || where == null ? null : where.keySet(); if (set != null) { synchronized (where) { if (where != null) { int index; for (String k : set) { index = k.indexOf(key); if (index >= 0 && StringUtil.isName(k.substring(index)) == false) { return where.get(k); } } } } } return null; } @Override public AbstractSQLConfig putWhere(String key, Object value, boolean prior) { if (key != null) { if (where == null) { where = new LinkedHashMap<String, Object>(); } if (value == null) { where.remove(key); } else { where.put(key, value); } combine = getCombine(); List<String> andList = combine.get("&"); if (value == null) { andList.remove(key); } else if (andList == null || andList.contains(key) == false) { int i = 0; if (andList == null) { andList = new ArrayList<>(); } else if (prior && andList.isEmpty() == false) { String idKey = getIdKey(); String idInKey = idKey + "{}"; String userIdKey = getUserIdKey(); String userIdInKey = userIdKey + "{}"; if (andList.contains(idKey)) { i ++; } if (andList.contains(idInKey)) { i ++; } if (andList.contains(userIdKey)) { i ++; } if (andList.contains(userIdInKey)) { i ++; } } if (prior) { andList.add(i, key); //userId的优先级不能比id高 0, key); } else { andList.add(key); //AbstractSQLExecutor.onPutColumn里getSQL,要保证缓存的SQL和查询的SQL里 where 的 key:value 顺序一致 } } combine.put("&", andList); } return this; } /**获取WHERE * @return * @throws Exception */ @JSONField(serialize = false) @Override public String getWhereString(boolean hasPrefix) throws Exception { return getWhereString(hasPrefix, getMethod(), getWhere(), getCombine(), getJoinList(), ! isTest()); } /**获取WHERE * @param method * @param where * @return * @throws Exception */ @JSONField(serialize = false) public String getWhereString(boolean hasPrefix, RequestMethod method, Map<String, Object> where, Map<String, List<String>> combine, List<Join> joinList, boolean verifyName) throws Exception { Set<Entry<String, List<String>>> combineSet = combine == null ? null : combine.entrySet(); if (combineSet == null || combineSet.isEmpty()) { Log.w(TAG, "getWhereString combineSet == null || combineSet.isEmpty() >> return \"\";"); return ""; } List<String> keyList; String whereString = ""; boolean isCombineFirst = true; int logic; boolean isItemFirst; String c; String cs; for (Entry<String, List<String>> ce : combineSet) { keyList = ce == null ? null : ce.getValue(); if (keyList == null || keyList.isEmpty()) { continue; } if ("|".equals(ce.getKey())) { logic = Logic.TYPE_OR; } else if ("!".equals(ce.getKey())) { logic = Logic.TYPE_NOT; } else { logic = Logic.TYPE_AND; } isItemFirst = true; cs = ""; for (String key : keyList) { c = getWhereItem(key, where.get(key), method, verifyName); if (StringUtil.isEmpty(c, true)) {//避免SQL条件连接错误 continue; } cs += (isItemFirst ? "" : (Logic.isAnd(logic) ? AND : OR)) + "(" + c + ")"; isItemFirst = false; } whereString += (isCombineFirst ? "" : AND) + (Logic.isNot(logic) ? NOT : "") + " ( " + cs + " ) "; isCombineFirst = false; } if (joinList != null) { String newWs = ""; String ws = "" + whereString; List<Object> newPvl = new ArrayList<>(); List<Object> pvl = new ArrayList<>(preparedValueList); SQLConfig jc; String js; boolean changed = false; //各种 JOIN 没办法统一用 & | !连接,只能按优先级,和 @combine 一样? for (Join j : joinList) { switch (j.getJoinType()) { case "@": // APP JOIN case "<": // LEFT JOIN case ">": // RIGHT JOIN break; case "": // FULL JOIN case "|": // FULL JOIN 不支持 <>, [] ,避免太多符号 case "&": // INNER JOIN case "!": // OUTTER JOIN case "^": // SIDE JOIN case "*": // CROSS JOIN jc = j.getJoinConfig(); boolean isMain = jc.isMain(); jc.setMain(false).setPrepared(isPrepared()).setPreparedValueList(new ArrayList<Object>()); js = jc.getWhereString(false); jc.setMain(isMain); if (StringUtil.isEmpty(js, true)) { continue; } if (StringUtil.isEmpty(newWs, true) == false) { newWs += AND; } if ("^".equals(j.getJoinType())) { // (A & ! B) | (B & ! A) newWs += " ( ( " + ws + ( StringUtil.isEmpty(ws, true) ? "" : AND + NOT ) + " ( " + js + " ) ) " + OR + " ( " + js + AND + NOT + " ( " + ws + " ) ) ) "; newPvl.addAll(pvl); newPvl.addAll(jc.getPreparedValueList()); newPvl.addAll(jc.getPreparedValueList()); newPvl.addAll(pvl); } else { logic = Logic.getType(j.getJoinType()); newWs += " ( " + getCondition( Logic.isNot(logic), ws + ( StringUtil.isEmpty(ws, true) ? "" : (Logic.isAnd(logic) ? AND : OR) ) + " ( " + js + " ) " ) + " ) "; newPvl.addAll(pvl); newPvl.addAll(jc.getPreparedValueList()); } changed = true; break; default: throw new UnsupportedOperationException("join:value 中 value 里的 " + j.getJoinType() + "/" + j.getPath() + "错误!不支持 " + j.getJoinType() + " 等 [@ APP, < LEFT, > RIGHT, | FULL, & INNER, ! OUTTER, ^ SIDE, * CROSS] 之外的JOIN类型 !"); } } if (changed) { whereString = newWs; preparedValueList = newPvl; } } String s = whereString.isEmpty() ? "" : (hasPrefix ? " WHERE " : "") + whereString; if (s.isEmpty() && RequestMethod.isQueryMethod(method) == false) { throw new UnsupportedOperationException("写操作请求必须带条件!!!"); } return s; } /** * @param key * @param value * @param method * @param verifyName * @return * @throws Exception */ private String getWhereItem(String key, Object value , RequestMethod method, boolean verifyName) throws Exception { Log.d(TAG, "getWhereItem key = " + key); //避免筛选到全部 value = key == null ? null : where.get(key); if (key == null || value == null || key.startsWith("@") || key.endsWith("()")) {//关键字||方法, +或-直接报错 Log.d(TAG, "getWhereItem key == null || value == null" + " || key.startsWith(@) || key.endsWith(()) >> continue;"); return null; } if (key.endsWith("@")) {//引用 // key = key.substring(0, key.lastIndexOf("@")); throw new IllegalArgumentException(TAG + ".getWhereItem: 字符 " + key + " 不合法!"); } int keyType; if (key.endsWith("$")) { keyType = 1; } else if (key.endsWith("~") || key.endsWith("?")) { //TODO ?可能以后会被废弃,全用 ~ 和 *~ 替代,更接近 PostgreSQL 语法 keyType = key.charAt(key.length() - 2) == '*' ? -2 : 2; //FIXME StringIndexOutOfBoundsException } else if (key.endsWith("%")) { keyType = 3; } else if (key.endsWith("{}")) { keyType = 4; } else if (key.endsWith("}{")) { keyType = 5; } else if (key.endsWith("<>")) { keyType = 6; } else if (key.endsWith(">=")) { keyType = 7; } else if (key.endsWith("<=")) { keyType = 8; } else if (key.endsWith(">")) { keyType = 9; } else if (key.endsWith("<")) { keyType = 10; } else { //else绝对不能省,避免再次踩坑! keyType = 0; 写在for循环外面都没注意! keyType = 0; } key = getRealKey(method, key, false, true, verifyName, getQuote()); switch (keyType) { case 1: return getSearchString(key, value); case -2: case 2: return getRegExpString(key, value, keyType < 0); case 3: return getBetweenString(key, value); case 4: return getRangeString(key, value); case 5: return getExistsString(key, value); case 6: return getContainString(key, value); case 7: return getCompareString(key, value, ">="); case 8: return getCompareString(key, value, "<="); case 9: return getCompareString(key, value, ">"); case 10: return getCompareString(key, value, "<"); default: //TODO MySQL JSON类型的字段对比 key='[]' 会无结果! key LIKE '[1, 2, 3]' //TODO MySQL , 后面有空格! return getEqualString(key, value); } } @JSONField(serialize = false) public String getEqualString(String key, Object value) throws Exception { if (JSON.isBooleanOrNumberOrString(value) == false && value instanceof Subquery == false) { throw new IllegalArgumentException(key + ":value 中value不合法!非PUT请求只支持 [Boolean, Number, String] 内的类型 !"); } boolean not = key.endsWith("!"); // & | 没有任何意义,写法多了不好控制 if (not) { key = key.substring(0, key.length() - 1); } if (StringUtil.isName(key) == false) { throw new IllegalArgumentException(key + ":value 中key不合法!不支持 ! 以外的逻辑符 !"); } return getKey(key) + (not ? " != " : " = ") + (value instanceof Subquery ? getSubqueryString((Subquery) value) : getValue(value)); } @JSONField(serialize = false) public String getCompareString(String key, Object value, String type) throws Exception { if (JSON.isBooleanOrNumberOrString(value) == false && value instanceof Subquery == false) { throw new IllegalArgumentException(key + type + ":value 中value不合法!比较运算 [>, <, >=, <=] 只支持 [Boolean, Number, String] 内的类型 !"); } if (StringUtil.isName(key) == false) { throw new IllegalArgumentException(key + type + ":value 中key不合法!比较运算 [>, <, >=, <=] 不支持 [&, !, |] 中任何逻辑运算符 !"); } return getKey(key) + " " + type + " " + (value instanceof Subquery ? getSubqueryString((Subquery) value) : getValue(value)); } public String getKey(String key) { String q = getQuote(); return (isKeyPrefix() ? getAlias() + "." : "") + q + key + q; } /** * 使用prepareStatement预编译,值为 ? ,后续动态set进去 */ private List<Object> preparedValueList = new ArrayList<>(); private Object getValue(@NotNull Object value) { if (isPrepared()) { preparedValueList.add(value); return "?"; } // return (value instanceof Number || value instanceof Boolean) && DATABASE_POSTGRESQL.equals(getDatabase()) ? value : "'" + value + "'"; return (value instanceof Number || value instanceof Boolean) ? value : "'" + value + "'"; //MySQL 隐式转换用不了索引 } @Override public List<Object> getPreparedValueList() { return preparedValueList; } @Override public AbstractSQLConfig setPreparedValueList(List<Object> preparedValueList) { this.preparedValueList = preparedValueList; return this; } //$ search <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< /**search key match value * @param in * @return {@link #getSearchString(String, Object[], int)} * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getSearchString(String key, Object value) throws IllegalArgumentException { if (value == null) { return ""; } Logic logic = new Logic(key); key = logic.getKey(); Log.i(TAG, "getSearchString key = " + key); JSONArray arr = newJSONArray(value); if (arr.isEmpty()) { return ""; } return getSearchString(key, arr.toArray(), logic.getType()); } /**search key match values * @param in * @return LOGIC [ key LIKE 'values[i]' ] * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getSearchString(String key, Object[] values, int type) throws IllegalArgumentException { if (values == null || values.length <= 0) { return ""; } String condition = ""; for (int i = 0; i < values.length; i++) { if (values[i] instanceof String == false) { throw new IllegalArgumentException(key + "$:value 中value的类型只能为String或String[]!"); } condition += (i <= 0 ? "" : (Logic.isAnd(type) ? AND : OR)) + getLikeString(key, values[i]); } return getCondition(Logic.isNot(type), condition); } /**WHERE key LIKE 'value' * @param key * @param value * @return key LIKE 'value' */ @JSONField(serialize = false) public String getLikeString(String key, Object value) { return getKey(key) + " LIKE " + getValue(value); } //$ search >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //~ regexp <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< /**search key match RegExp values * @param key * @param value * @param ignoreCase * @return {@link #getRegExpString(String, Object[], int, boolean)} * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getRegExpString(String key, Object value, boolean ignoreCase) throws IllegalArgumentException { if (value == null) { return ""; } Logic logic = new Logic(key); key = logic.getKey(); Log.i(TAG, "getRegExpString key = " + key); JSONArray arr = newJSONArray(value); if (arr.isEmpty()) { return ""; } return getRegExpString(key, arr.toArray(), logic.getType(), ignoreCase); } /**search key match RegExp values * @param key * @param values * @param type * @param ignoreCase * @return LOGIC [ key REGEXP 'values[i]' ] * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getRegExpString(String key, Object[] values, int type, boolean ignoreCase) throws IllegalArgumentException { if (values == null || values.length <= 0) { return ""; } String condition = ""; for (int i = 0; i < values.length; i++) { if (values[i] instanceof String == false) { throw new IllegalArgumentException(key + "$:value 中value的类型只能为String或String[]!"); } condition += (i <= 0 ? "" : (Logic.isAnd(type) ? AND : OR)) + getRegExpString(key, (String) values[i], ignoreCase); } return getCondition(Logic.isNot(type), condition); } /**WHERE key REGEXP 'value' * @param key * @param value * @param ignoreCase * @return key REGEXP 'value' */ @JSONField(serialize = false) public String getRegExpString(String key, String value, boolean ignoreCase) { if (DATABASE_POSTGRESQL.equals(getDatabase())) { return getKey(key) + " ~" + (ignoreCase ? "* " : " ") + getValue(value); } return getKey(key) + " REGEXP " + (ignoreCase ? "" : "BINARY ") + getValue(value); } //~ regexp >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //% between <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< /**WHERE key BETWEEN 'start' AND 'end' * @param key * @param value 'start,end' * @return LOGIC [ key BETWEEN 'start' AND 'end' ] * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getBetweenString(String key, Object value) throws IllegalArgumentException { if (value == null) { return ""; } Logic logic = new Logic(key); key = logic.getKey(); Log.i(TAG, "getBetweenString key = " + key); JSONArray arr = newJSONArray(value); if (arr.isEmpty()) { return ""; } return getBetweenString(key, arr.toArray(), logic.getType()); } /**WHERE key BETWEEN 'start' AND 'end' * @param key * @param value 'start,end' TODO 在 '1,2' 和 ['1,2', '3,4'] 基础上新增支持 [1, 2] 和 [[1,2], [3,4]] ? * @return LOGIC [ key BETWEEN 'start' AND 'end' ] * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getBetweenString(String key, Object[] values, int type) throws IllegalArgumentException { if (values == null || values.length <= 0) { return ""; } String condition = ""; String[] vs; for (int i = 0; i < values.length; i++) { if (values[i] instanceof String == false) { throw new IllegalArgumentException(key + "%:value 中 value 的类型只能为 String 或 String[] !"); } vs = StringUtil.split((String) values[i]); if (vs == null || vs.length != 2) { throw new IllegalArgumentException(key + "%:value 中 value 不合法!类型为 String 时必须包括1个逗号 , 且左右两侧都有值!类型为 String[] 里面每个元素要符合前面类型为 String 的规则 !"); } condition += (i <= 0 ? "" : (Logic.isAnd(type) ? AND : OR)) + "(" + getBetweenString(key, vs[0], vs[1]) + ")"; } return getCondition(Logic.isNot(type), condition); } /**WHERE key BETWEEN 'start' AND 'end' * @param key * @param value 'start,end' TODO 在 '1,2' 和 ['1,2', '3,4'] 基础上新增支持 [1, 2] 和 [[1,2], [3,4]] ? * @return key BETWEEN 'start' AND 'end' * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getBetweenString(String key, Object start, Object end) throws IllegalArgumentException { if (JSON.isBooleanOrNumberOrString(start) == false || JSON.isBooleanOrNumberOrString(end) == false) { throw new IllegalArgumentException(key + "%:value 中 value 不合法!类型为 String 时必须包括1个逗号 , 且左右两侧都有值!类型为 String[] 里面每个元素要符合前面类型为 String 的规则 !"); } return getKey(key) + " BETWEEN " + getValue(start) + AND + getValue(end); } //% between >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //{} range <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< // * 和 / 不能同时出现,防止 /* */ 段注释! # 和 -- 不能出现,防止行注释! ; 不能出现,防止隔断SQL语句!空格不能出现,防止 CRUD,DROP,SHOW TABLES等语句! private static final Pattern PATTERN_RANGE; private static final Pattern PATTERN_HAVING; private static final Pattern PATTERN_HAVING_SUFFIX; static { PATTERN_RANGE = Pattern.compile("^[0-9%!=<>,]+$"); // ^[a-zA-Z0-9_*%!=<>(),"]+$ 导致 exists(select*from(Comment)) 通过! PATTERN_HAVING = Pattern.compile("^[A-Za-z0-9%!=<>]+$"); //TODO 改成更好的正则,校验前面为单词,中间为操作符,后面为值 PATTERN_HAVING_SUFFIX = Pattern.compile("^[0-9%!=<>]+$"); // ^[a-zA-Z0-9_*%!=<>(),"]+$ 导致 exists(select*from(Comment)) 通过! } /**WHERE key > 'key0' AND key <= 'key1' AND ... * @param key * @param range "condition0,condition1..." * @return key condition0 AND key condition1 AND ... * @throws Exception */ @JSONField(serialize = false) public String getRangeString(String key, Object range) throws Exception { Log.i(TAG, "getRangeString key = " + key); if (range == null) {//依赖的对象都没有给出有效值,这个存在无意义。如果是客户端传的,那就能在客户端确定了。 throw new NotExistException(TAG + "getRangeString(" + key + ", " + range + ") range == null"); } Logic logic = new Logic(key); key = logic.getKey(); Log.i(TAG, "getRangeString key = " + key); if (range instanceof List) { if (logic.isOr() || logic.isNot()) { return getKey(key) + getInString(key, ((List<?>) range).toArray(), logic.isNot()); } throw new IllegalArgumentException(key + "{}\":[] 中key末尾的逻辑运算符只能用'|','!'中的一种 !"); } else if (range instanceof String) {//非Number类型需要客户端拼接成 < 'value0', >= 'value1'这种 if (isPrepared() && PATTERN_RANGE.matcher((String) range).matches() == false) { throw new UnsupportedOperationException("字符串 " + range + " 不合法!预编译模式下 key{}:\"condition\" 中 condition 必须符合正则表达式 ^[0-9%!=<>,]+$ !不允许空格!"); } String[] conditions = StringUtil.split((String) range); String condition = ""; if (conditions != null) { int index; for (int i = 0; i < conditions.length; i++) {//对函数条件length(key)<=5这种不再在开头加key index = conditions[i] == null ? -1 : conditions[i].indexOf("("); condition += ((i <= 0 ? "" : (logic.isAnd() ? AND : OR))//连接方式 + (index >= 0 && index < conditions[i].indexOf(")") ? "" : getKey(key) + " ")//函数和非函数条件 + conditions[i]);//单个条件 } } if (condition.isEmpty()) { return ""; } return getCondition(logic.isNot(), condition); } else if (range instanceof Subquery) { //如果在 Parser 解析成 SQL 字符串再引用,没法保证安全性,毕竟可以再通过远程函数等方式来拼接再替代,最后引用的字符串就能注入 return getKey(key) + (logic.isNot() ? NOT : "") + " IN " + getSubqueryString((Subquery) range); } throw new IllegalArgumentException(key + "{}:range 类型为" + range.getClass().getSimpleName() + "!range 只能是 用','分隔条件的字符串 或者 可取选项JSONArray!"); } /**WHERE key IN ('key0', 'key1', ... ) * @param in * @return IN ('key0', 'key1', ... ) * @throws NotExistException */ @JSONField(serialize = false) public String getInString(String key, Object[] in, boolean not) throws NotExistException { String condition = ""; if (in != null) {//返回 "" 会导致 id:[] 空值时效果和没有筛选id一样! for (int i = 0; i < in.length; i++) { condition += ((i > 0 ? "," : "") + getValue(in[i])); } } if (condition.isEmpty()) {//条件如果存在必须执行,不能忽略。条件为空会导致出错,又很难保证条件不为空(@:条件),所以还是这样好 throw new NotExistException(TAG + ".getInString(" + key + ", [], " + not + ") >> condition.isEmpty() >> IN()"); } return (not ? NOT : "") + " IN (" + condition + ")"; } //{} range >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //}{ exists <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< /**WHERE EXISTS subquery * 如果合并到 getRangeString,一方面支持不了 [1,2,2] 和 ">1" (转成 EXISTS(SELECT IN ) 需要static newSQLConfig,但它不能传入子类实例,除非不是 static),另一方面多了子查询临时表性能会比 IN 差 * @param key * @param value * @return EXISTS ALL(SELECT ...) * @throws NotExistException */ @JSONField(serialize = false) public String getExistsString(String key, Object value) throws Exception { if (value == null) { return ""; } if (value instanceof Subquery == false) { throw new IllegalArgumentException(key + "}{:subquery 类型为" + value.getClass().getSimpleName() + "!subquery 只能是 子查询JSONObejct!"); } Logic logic = new Logic(key); key = logic.getKey(); Log.i(TAG, "getExistsString key = " + key); return (logic.isNot() ? NOT : "") + " EXISTS " + getSubqueryString((Subquery) value); } //}{ exists >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //<> contain <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< /**WHERE key contains value * @param key * @param value * @return {@link #getContainString(String, Object[], int)} * @throws NotExistException */ @JSONField(serialize = false) public String getContainString(String key, Object value) throws IllegalArgumentException { if (value == null) { return ""; } Logic logic = new Logic(key); key = logic.getKey(); Log.i(TAG, "getContainString key = " + key); return getContainString(key, newJSONArray(value).toArray(), logic.getType()); } /**WHERE key contains childs * @param key * @param childs null ? "" : (empty ? no child : contains childs) * @param type |, &, ! * @return LOGIC [ ( key LIKE '[" + childs[i] + "]' OR key LIKE '[" + childs[i] + ", %' * OR key LIKE '%, " + childs[i] + ", %' OR key LIKE '%, " + childs[i] + "]' ) ] * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getContainString(String key, Object[] childs, int type) throws IllegalArgumentException { boolean not = Logic.isNot(type); String condition = ""; if (childs != null) { for (int i = 0; i < childs.length; i++) { if (childs[i] != null) { if (childs[i] instanceof JSON) { throw new IllegalArgumentException(key + "<>:value 中value类型不能为JSON!"); } if (DATABASE_POSTGRESQL.equals(getDatabase())) { condition += (i <= 0 ? "" : (Logic.isAnd(type) ? AND : OR)) + getKey(key) + " @> " + getValue(newJSONArray(childs[i])); //operator does not exist: jsonb @> character varying "[" + childs[i] + "]"); } else { condition += (i <= 0 ? "" : (Logic.isAnd(type) ? AND : OR)) + "json_contains(" + getKey(key) + ", " + getValue(childs[i].toString()) + ")"; } } } if (condition.isEmpty()) { condition = (getKey(key) + SQL.isNull(true) + OR + getLikeString(key, "[]")); // key = '[]' 无结果! } else { condition = (getKey(key) + SQL.isNull(false) + AND + "(" + condition + ")"); } } if (condition.isEmpty()) { return ""; } return getCondition(not, condition); } //<> contain >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //key@:{} Subquery <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< @Override public String getSubqueryString(Subquery subquery) throws Exception { String range = subquery.getRange(); SQLConfig cfg = subquery.getConfig(); cfg.setPreparedValueList(new ArrayList<>()); String sql = (range == null || range.isEmpty() ? "" : range) + "(" + cfg.getSQL(isPrepared()) + ") "; preparedValueList.addAll(cfg.getPreparedValueList()); return sql; } //key@:{} Subquery >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> /**拼接条件 * @param not * @param condition * @return */ private static String getCondition(boolean not, String condition) { return not ? NOT + "(" + condition + ")" : condition; } /**转为JSONArray * @param tv * @return */ @NotNull public static JSONArray newJSONArray(Object obj) { JSONArray array = new JSONArray(); if (obj != null) { if (obj instanceof Collection) { array.addAll((Collection<?>) obj); } else { array.add(obj); } } return array; } //WHERE >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //SET <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< /**获取SET * @return * @throws Exception */ @JSONField(serialize = false) public String getSetString() throws Exception { return getSetString(getMethod(), getContent(), ! isTest()); } /**获取SET * @param method * @param content * @return * @throws Exception */ @JSONField(serialize = false) public String getSetString(RequestMethod method, Map<String, Object> content, boolean verifyName) throws Exception { Set<String> set = content == null ? null : content.keySet(); String setString = ""; if (set != null && set.size() > 0) { String quote = getQuote(); boolean isFirst = true; int keyType = 0;// 0 - =; 1 - +, 2 - - Object value; String idKey = getIdKey(); for (String key : set) { //避免筛选到全部 value = key == null ? null : content.get(key); if (key == null || idKey.equals(key)) { continue; } if (key.endsWith("+")) { keyType = 1; } else if (key.endsWith("-")) { keyType = 2; } value = content.get(key); key = getRealKey(method, key, false, true, verifyName, quote); setString += (isFirst ? "" : ", ") + (getKey(key) + "=" + (keyType == 1 ? getAddString(key, value) : (keyType == 2 ? getRemoveString(key, value) : getValue(value)) ) ); isFirst = false; } } if (setString.isEmpty()) { throw new IllegalArgumentException("PUT 请求必须在Table内设置要修改的 key:value !"); } return " SET " + setString; } /**SET key = CONCAT (key, 'value') * @param key * @param value * @return CONCAT (key, 'value') * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getAddString(String key, Object value) throws IllegalArgumentException { if (value instanceof Number) { return getKey(key) + " + " + value; } if (value instanceof String) { return " CONCAT (" + getKey(key) + ", " + getValue(value) + ") "; } throw new IllegalArgumentException(key + "+ 对应的值 " + value + " 不是Number,String,Array中的任何一种!"); } /**SET key = replace(key, 'value', '') * @param key * @param value * @return REPLACE (key, 'value', '') * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getRemoveString(String key, Object value) throws IllegalArgumentException { if (value instanceof Number) { return getKey(key) + " - " + value; } if (value instanceof String) { return SQL.replace(getKey(key), (String) getValue(value), "");// " replace(" + key + ", '" + value + "', '') "; } throw new IllegalArgumentException(key + "- 对应的值 " + value + " 不是Number,String,Array中的任何一种!"); } //SET >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> /** * @return * @throws Exception */ @JSONField(serialize = false) @Override public String getSQL(boolean prepared) throws Exception { return getSQL(this.setPrepared(prepared)); } /** * @param config * @return * @throws Exception */ public static String getSQL(AbstractSQLConfig config) throws Exception { if (config == null) { Log.i(TAG, "getSQL config == null >> return null;"); return null; } //TODO procedure 改为 List<Procedure> procedureList; behind : true; function: callFunction(); String key; ... // for (...) { Call procedure1();\n SQL \n; Call procedure2(); ... } // 貌似不需要,因为 ObjecParser 里就已经处理的顺序等,只是这里要解决下 Schema 问题。 String sch = config.getSQLSchema(config.getSQLTable()); if (StringUtil.isNotEmpty(config.getProcedure(), true)) { String q = config.getQuote(); return "CALL " + q + sch + q + "."+ config.getProcedure(); } String tablePath = config.getTablePath(); if (StringUtil.isNotEmpty(tablePath, true) == false) { Log.i(TAG, "getSQL StringUtil.isNotEmpty(tablePath, true) == false >> return null;"); return null; } switch (config.getMethod()) { case POST: return "INSERT INTO " + tablePath + config.getColumnString() + " VALUES" + config.getValuesString(); case PUT: return "UPDATE " + tablePath + config.getSetString() + config.getWhereString(true); case DELETE: return "DELETE FROM " + tablePath + config.getWhereString(true); default: config.setPreparedValueList(new ArrayList<Object>()); String column = config.getColumnString(); return (config.isExplain() ? "EXPLAIN " : "") + "SELECT " + (config.getCache() == JSONRequest.CACHE_RAM ? "SQL_NO_CACHE " : "") + column + " FROM " + getConditionString(column, tablePath, config); } } /**获取条件SQL字符串 * @param page * @param column * @param table * @param where * @return * @throws Exception */ private static String getConditionString(String column, String table, AbstractSQLConfig config) throws Exception { String where = config.getWhereString(true); Subquery from = config.getFrom(); if (from != null) { table = config.getSubqueryString(from) + " AS " + config.getAlias() + " "; //TODO Comment:c 转为 AS `Comment:c` } String condition = table + config.getJoinString() + where + ( RequestMethod.isGetMethod(config.getMethod(), true) == false ? "" : config.getGroupString(true) + config.getHavingString(true) + config.getOrderString(true) ) ; //+ config.getLimitString(); //no need to optimize // if (config.getPage() <= 0 || ID.equals(column.trim())) { return condition + config.getLimitString(); // } // // // //order: id+ -> id >= idOfStartIndex; id- -> id <= idOfStartIndex <<<<<<<<<<<<<<<<<<< // String order = StringUtil.getNoBlankString(config.getOrder()); // List<String> orderList = order.isEmpty() ? null : Arrays.asList(StringUtil.split(order)); // // int type = 0; // if (BaseModel.isEmpty(orderList) || BaseModel.isContain(orderList, ID+"+")) { // type = 1; // } // else if (BaseModel.isContain(orderList, ID+"-")) { // type = 2; // } // // if (type > 0) { // return condition.replace("WHERE", // "WHERE id " + (type == 1 ? ">=" : "<=") + " (SELECT id FROM " + table // + where + " ORDER BY id " + (type == 1 ? "ASC" : "DESC") + " LIMIT " + config.getOffset() + ", 1) AND" // ) // + " LIMIT " + config.getCount(); //子查询起始id不一定准确,只能作为最小可能! ;// // } // //order: id+ -> id >= idOfStartIndex; id- -> id <= idOfStartIndex >>>>>>>>>>>>>>>>>> // // // //结果错误!SELECT * FROM User AS t0 INNER JOIN // (SELECT id FROM User ORDER BY date ASC LIMIT 20, 10) AS t1 ON t0.id = t1.id // //common case, inner join // condition += config.getLimitString(); // return table + " AS t0 INNER JOIN (SELECT id FROM " + condition + ") AS t1 ON t0.id = t1.id"; } private boolean keyPrefix; @Override public boolean isKeyPrefix() { return keyPrefix; } @Override public AbstractSQLConfig setKeyPrefix(boolean keyPrefix) { this.keyPrefix = keyPrefix; return this; } public String getJoinString() throws Exception { String joinOns = ""; if (joinList != null) { String quote = getQuote(); List<Object> pvl = new ArrayList<>(); boolean changed = false; String sql = null; SQLConfig jc; String jt; String tn; for (Join j : joinList) { if (j.isAppJoin()) { // APP JOIN,只是作为一个标记,执行完主表的查询后自动执行副表的查询 User.id IN($commentIdList) continue; } //LEFT JOIN sys.apijson_user AS User ON User.id = Moment.userId, 都是用 = ,通过relateType处理缓存 // <"INNER JOIN User ON User.id = Moment.userId", UserConfig> TODO AS 放 getSQLTable 内 jc = j.getJoinConfig(); jc.setPrepared(isPrepared()); jt = jc.getTable(); tn = j.getTargetName(); //如果要强制小写,则可在子类重写这个方法再 toLowerCase // if (DATABASE_POSTGRESQL.equals(getDatabase())) { // jt = jt.toLowerCase(); // tn = tn.toLowerCase(); // } switch (j.getJoinType()) { //TODO $ SELF JOIN // case "@": // APP JOIN // continue; case "<": // LEFT JOIN case ">": // RIGHT JOIN jc.setMain(true).setKeyPrefix(false); sql = ( ">".equals(j.getJoinType()) ? " RIGHT" : " LEFT") + " JOIN ( " + jc.getSQL(isPrepared()) + " ) AS " + quote + jt + quote + " ON " + quote + jt + quote + "." + quote + j.getKey() + quote + " = " + quote + tn + quote + "." + quote + j.getTargetKey() + quote; jc.setMain(false).setKeyPrefix(true); // preparedValueList.addAll(jc.getPreparedValueList()); pvl.addAll(jc.getPreparedValueList()); changed = true; break; case "": // FULL JOIN case "|": // FULL JOIN 不支持 <>, [] ,避免太多符号 case "&": // INNER JOIN case "!": // OUTTER JOIN case "^": // SIDE JOIN //场景少且性能差,默认禁用 case "*": // CROSS JOIN sql = ("*".equals(j.getJoinType()) ? " CROSS JOIN " : " INNER JOIN ") + jc.getTablePath() + " ON " + quote + jt + quote + "." + quote + j.getKey() + quote + " = " + quote + tn + quote + "." + quote + j.getTargetKey() + quote; break; default: throw new UnsupportedOperationException("join:value 中 value 里的 " + j.getJoinType() + "/" + j.getPath() + "错误!不支持 " + j.getJoinType() + " 等 [@ APP, < LEFT, > RIGHT, | FULL, & INNER, ! OUTTER, ^ SIDE, * CROSS] 之外的JOIN类型 !"); } joinOns += " \n " + sql; } if (changed) { pvl.addAll(preparedValueList); preparedValueList = pvl; } } return joinOns; } /**新建SQL配置 * @param table * @param request * @param joinList * @param isProcedure * @param callback * @return * @throws Exception */ public static AbstractSQLConfig newSQLConfig(RequestMethod method, String table, JSONObject request, List<Join> joinList, boolean isProcedure, Callback callback) throws Exception { if (request == null) { // User:{} 这种空内容在查询时也有效 throw new NullPointerException(TAG + ": newSQLConfig request == null!"); } AbstractSQLConfig config = callback.getSQLConfig(method, table); String database = request.getString(KEY_DATABASE); String schema = request.getString(KEY_SCHEMA); config.setDatabase(database); //不删,后面表对象还要用的,必须放在 parseJoin 前 config.setSchema(schema); //不删,后面表对象还要用的 //放后面会导致主表是空对象时 joinList 未解析 if (isProcedure == false) { config = parseJoin(method, config, joinList, callback); } if (request.isEmpty()) { // User:{} 这种空内容在查询时也有效 return config; //request.remove(key); 前都可以直接return,之后必须保证 put 回去 } if (isProcedure) { return config; } String idKey = callback.getIdKey(schema, table); String idInKey = idKey + "{}"; String userIdKey = callback.getUserIdKey(schema, table); String userIdInKey = userIdKey + "{}"; Object idIn = request.get(idInKey); //可能是 id{}:">0" if (method == POST) { if (idIn != null) { //不能在这里确定[]的长度,只能在外面传进来 if ((idIn instanceof List == false) || ((List<?>)idIn).isEmpty()) { // id{}:[] 表示同时插入多条记录 throw new IllegalArgumentException("POST请求,生成多条记录请用 id{}:[] ! [] 类型为JSONArray且不能为空!"); } } else if (request.get(idKey) == null) { request.put(idKey, callback.newId(method, table)); } } //对id和id{}处理,这两个一定会作为条件 Object id = request.get(idKey); if (id != null) { //null无效 if (id instanceof Number) { if (((Number) id).longValue() <= 0) { //一定没有值 throw new NotExistException(TAG + ": newSQLConfig " + table + ".id <= 0"); } } else if (id instanceof String) { if (StringUtil.isEmpty(id, true)) { //一定没有值 throw new NotExistException(TAG + ": newSQLConfig StringUtil.isEmpty(" + table + ".id, true)"); } } else if (id instanceof Subquery) {} else { throw new IllegalArgumentException(idKey + ":value 中 value 的类型只能是 Long , String 或 Subquery !"); } if (idIn instanceof List) { //共用idIn场景少性能差 boolean contains = false; List<?> ids = ((List<?>) idIn); Object d; for (int i = 0; i < ids.size(); i++) { //不用 idIn.contains(id) 因为 idIn 里存到很可能是 Integer,id 又是 Long! d = ids.get(i); if (d != null && id.toString().equals(d.toString())) { contains = true; break; } } if (contains == false) {//empty有效 BaseModel.isEmpty(idIn) == false) { throw new NotExistException(TAG + ": newSQLConfig idIn != null && (((List<?>) idIn).contains(id) == false"); } } } String role = request.getString(KEY_ROLE); boolean explain = request.getBooleanValue(KEY_EXPLAIN); String cache = request.getString(KEY_CACHE); String combine = request.getString(KEY_COMBINE); Subquery from = (Subquery) request.get(KEY_FROM); String column = request.getString(KEY_COLUMN); String group = request.getString(KEY_GROUP); String having = request.getString(KEY_HAVING); String order = request.getString(KEY_ORDER); //强制作为条件且放在最前面优化性能 request.remove(idKey); request.remove(idInKey); //关键词 request.remove(KEY_ROLE); request.remove(KEY_EXPLAIN); request.remove(KEY_CACHE); request.remove(KEY_DATABASE); request.remove(KEY_SCHEMA); request.remove(KEY_COMBINE); request.remove(KEY_FROM); request.remove(KEY_COLUMN); request.remove(KEY_GROUP); request.remove(KEY_HAVING); request.remove(KEY_ORDER); Map<String, Object> tableWhere = new LinkedHashMap<String, Object>();//保证顺序好优化 WHERE id > 1 AND name LIKE... //已经remove了id和id{},以及@key Set<String> set = request.keySet(); //前面已经判断request是否为空 if (method == POST) {//POST操作 if (set != null && set.isEmpty() == false) { //不能直接return,要走完下面的流程 List<Object> idList; if (id != null) { //单条记录 if (idIn != null) { throw new IllegalArgumentException("POST请求中 id 和 id{} 不能同时存在!"); } idList = new ArrayList<Object>(1); idList.add(id); } else { //多条记录 idList = new ArrayList<Object>((JSONArray) idIn); } //idIn不为空时,valuesString有多条,唯一的区别就是id String[] columns = set.toArray(new String[]{}); Collection<Object> valueCollection = request.values(); Object[] values = valueCollection == null ? null : valueCollection.toArray(); if (values == null || values.length != columns.length) { throw new Exception("服务器内部错误:\n" + TAG + " newSQLConfig values == null || values.length != columns.length !"); } column = idKey + "," + StringUtil.getString(columns); //set已经判断过不为空 final int size = columns.length + 1; //以key数量为准 List<List<Object>> valuess = new ArrayList<>(idList.size()); // [idList.size()][] List<Object> items; //(item0, item1, ...) for (int i = 0; i < idList.size(); i++) { items = new ArrayList<>(size); items.add(idList.get(i)); //第0个就是id for (int j = 1; j < size; j++) { items.add(values[j-1]); //从第1个开始,允许"null" } valuess.add(items); } config.setValues(valuess); } } else { //非POST操作 final boolean isWhere = method != PUT;//除了POST,PUT,其它全是条件!!! //条件<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< List<String> whereList = null; Map<String, List<String>> combineMap = new LinkedHashMap<>(); List<String> andList = new ArrayList<>(); List<String> orList = new ArrayList<>(); List<String> notList = new ArrayList<>(); //强制作为条件且放在最前面优化性能 if (id != null) { tableWhere.put(idKey, id); andList.add(idKey); } if (idIn != null) { tableWhere.put(idInKey, idIn); andList.add(idInKey); } String[] ws = StringUtil.split(combine); if (ws != null) { if (method == DELETE || method == GETS || method == HEADS) { throw new IllegalArgumentException("DELETE,GETS,HEADS 请求不允许传 @combine:\"conditons\" !"); } whereList = new ArrayList<>(); String w; for (int i = 0; i < ws.length; i++) { //去除 &,|,! 前缀 w = ws[i]; if (w != null) { if (w.startsWith("&")) { w = w.substring(1); andList.add(w); } else if (w.startsWith("|")) { if (method == PUT) { throw new IllegalArgumentException(table + ":{} 里的 @combine:value 中的value里条件 " + ws[i] + " 不合法!" + "PUT请求的 @combine:\"key0,key1,...\" 不允许传 |key 或 !key !"); } w = w.substring(1); orList.add(w); } else if (w.startsWith("!")) { if (method == PUT) { throw new IllegalArgumentException(table + ":{} 里的 @combine:value 中的value里条件 " + ws[i] + " 不合法!" + "PUT请求的 @combine:\"key0,key1,...\" 不允许传 |key 或 !key !"); } w = w.substring(1); notList.add(w); } else { orList.add(w); } if (w.isEmpty()) { throw new IllegalArgumentException(table + ":{} 里的 @combine:value 中的value里条件 " + ws[i] + " 不合法!不允许为空值!"); } else { if (idKey.equals(w) || idInKey.equals(w) || userIdKey.equals(w) || userIdInKey.equals(w)) { throw new UnsupportedOperationException(table + ":{} 里的 @combine:value 中的value里 " + ws[i] + " 不合法!" + "不允许传 [" + idKey + ", " + idInKey + ", " + userIdKey + ", " + userIdInKey + "] 其中任何一个!"); } } whereList.add(w); } if (request.containsKey(w) == false) { throw new IllegalArgumentException(table + ":{} 里的 @combine:value 中的value里 " + ws[i] + " 对应的 " + w + " 不在它里面!"); } } } //条件>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> Map<String, Object> tableContent = new LinkedHashMap<String, Object>(); Object value; for (String key : set) { value = request.get(key); if (value instanceof Map) {//只允许常规Object throw new IllegalArgumentException("不允许 " + key + " 等任何key的value类型为 {JSONObject} !"); } //解决AccessVerifier新增userId没有作为条件,而是作为内容,导致PUT,DELETE出错 if (isWhere) { tableWhere.put(key, value); if (whereList == null || whereList.contains(key) == false) { andList.add(key); } } else if (whereList != null && whereList.contains(key)) { tableWhere.put(key, value); } else { tableContent.put(key, value);//一样 instanceof JSONArray ? JSON.toJSONString(value) : value); } } combineMap.put("&", andList); combineMap.put("|", orList); combineMap.put("!", notList); config.setCombine(combineMap); config.setContent(tableContent); } List<String> cs = new ArrayList<>(); String[] fks = StringUtil.split(column, ";"); // key0,key1;fun0(key0,...);fun1(key0,...);key3;fun2(key0,...) if (fks != null) { String[] ks; for (String fk : fks) { if (fk.contains("(")) { //fun0(key0,...) cs.add(fk); } else { //key0,key1... ks = StringUtil.split(fk); if (ks != null && ks.length > 0) { cs.addAll(Arrays.asList(ks)); } } } } config.setExplain(explain); config.setCache(cache); config.setFrom(from); config.setColumn(column == null ? null : cs); //解决总是 config.column != null,总是不能得到 * config.setWhere(tableWhere); config.setId(id); //在 tableWhere 第0个 config.setIdIn(idIn); config.setRole(role); config.setGroup(group); config.setHaving(having); config.setOrder(order); //TODO 解析JOIN,包括 @column,@group 等要合并 //后面还可能用到,要还原 //id或id{}条件 request.put(idKey, id); request.put(idInKey, idIn); //关键词 request.put(KEY_DATABASE, database); request.put(KEY_ROLE, role); request.put(KEY_EXPLAIN, explain); request.put(KEY_CACHE, cache); request.put(KEY_SCHEMA, schema); request.put(KEY_COMBINE, combine); request.put(KEY_FROM, from); request.put(KEY_COLUMN, column); request.put(KEY_GROUP, group); request.put(KEY_HAVING, having); request.put(KEY_ORDER, order); return config; } /** * @param method * @param config * @param joinList * @param callback * @return * @throws Exception */ public static AbstractSQLConfig parseJoin(RequestMethod method, AbstractSQLConfig config, List<Join> joinList, Callback callback) throws Exception { boolean isQuery = RequestMethod.isQueryMethod(method); config.setKeyPrefix(isQuery && config.isMain() == false); //TODO 解析出 SQLConfig 再合并 column, order, group 等 if (joinList == null || joinList.isEmpty() || RequestMethod.isQueryMethod(method) == false) { return config; } String name; for (Join j : joinList) { name = j.getName(); //JOIN子查询不能设置LIMIT,因为ON关系是在子查询后处理的,会导致结果会错误 SQLConfig joinConfig = newSQLConfig(method, name, j.getTable(), null, false, callback); SQLConfig cacheConfig = newSQLConfig(method, name, j.getTable(), null, false, callback).setCount(1); if (j.isAppJoin() == false) { //除了 @ APP JOIN,其它都是 SQL JOIN,则副表要这样配置 if (joinConfig.getDatabase() == null) { joinConfig.setDatabase(config.getDatabase()); //解决主表 JOIN 副表,引号不一致 } else if (joinConfig.getDatabase().equals(config.getDatabase()) == false) { throw new IllegalArgumentException("主表 " + config.getTable() + " 的 @database:" + config.getDatabase() + " 和它 SQL JOIN 的副表 " + name + " 的 @database:" + joinConfig.getDatabase() + " 不一致!"); } if (joinConfig.getSchema() == null) { joinConfig.setSchema(config.getSchema()); //主表 JOIN 副表,默认 schema 一致 } cacheConfig.setDatabase(joinConfig.getDatabase()).setSchema(joinConfig.getSchema()); //解决主表 JOIN 副表,引号不一致 if (isQuery) { config.setKeyPrefix(true); } joinConfig.setMain(false).setKeyPrefix(true); if (j.isLeftOrRightJoin()) { SQLConfig outterConfig = newSQLConfig(method, name, j.getOutter(), null, false, callback); outterConfig.setMain(false).setKeyPrefix(true).setDatabase(joinConfig.getDatabase()).setSchema(joinConfig.getSchema()); //解决主表 JOIN 副表,引号不一致 j.setOutterConfig(outterConfig); } } //解决 query: 1/2 查数量时报错 /* SELECT count(*) AS count FROM sys.Moment AS Moment LEFT JOIN ( SELECT count(*) AS count FROM sys.Comment ) AS Comment ON Comment.momentId = Moment.id LIMIT 1 OFFSET 0 */ if (RequestMethod.isHeadMethod(method, true)) { joinConfig.setMethod(GET); //子查询不能为 SELECT count(*) ,而应该是 SELECT momentId joinConfig.setColumn(Arrays.asList(j.getKey())); //优化性能,不取非必要的字段 cacheConfig.setMethod(GET); //子查询不能为 SELECT count(*) ,而应该是 SELECT momentId cacheConfig.setColumn(Arrays.asList(j.getKey())); //优化性能,不取非必要的字段 } j.setJoinConfig(joinConfig); j.setCacheConfig(cacheConfig); } config.setJoinList(joinList); return config; } /**获取客户端实际需要的key * verifyName = true * @param method * @param originKey * @param isTableKey * @param saveLogic 保留逻辑运算符 & | ! * @return */ public static String getRealKey(RequestMethod method, String originKey , boolean isTableKey, boolean saveLogic, String quote) throws Exception { return getRealKey(method, originKey, isTableKey, saveLogic, true, quote); } /**获取客户端实际需要的key * @param method * @param originKey * @param isTableKey * @param saveLogic 保留逻辑运算符 & | ! * @param verifyName 验证key名是否符合代码变量/常量名 * @return */ public static String getRealKey(RequestMethod method, String originKey , boolean isTableKey, boolean saveLogic, boolean verifyName, String quote) throws Exception { Log.i(TAG, "getRealKey saveLogic = " + saveLogic + "; originKey = " + originKey); if (originKey == null || originKey.startsWith(quote) || zuo.biao.apijson.JSONObject.isArrayKey(originKey)) { Log.w(TAG, "getRealKey originKey == null || originKey.startsWith(`)" + " || zuo.biao.apijson.JSONObject.isArrayKey(originKey) >> return originKey;"); return originKey; } String key = new String(originKey); if (key.endsWith("$")) {//搜索 LIKE,查询时处理 key = key.substring(0, key.length() - 1); } else if (key.endsWith("~") || key.endsWith("?")) {//匹配正则表达式 REGEXP,查询时处理 TODO ?可能以后会被废弃,全用 ~ 和 *~ 替代,更接近 PostgreSQL 语法 key = key.substring(0, key.length() - 1); if (key.endsWith("*")) {//忽略大小写 key = key.substring(0, key.length() - 1); } } else if (key.endsWith("%")) {//数字、文本、日期范围 BETWEEN AND key = key.substring(0, key.length() - 1); } else if (key.endsWith("{}")) {//被包含 IN,或者说key对应值处于value的范围内。查询时处理 key = key.substring(0, key.length() - 2); } else if (key.endsWith("}{")) {//被包含 EXISTS,或者说key对应值处于value的范围内。查询时处理 key = key.substring(0, key.length() - 2); } else if (key.endsWith("<>")) {//包含 json_contains,或者说value处于key对应值的范围内。查询时处理 key = key.substring(0, key.length() - 2); } else if (key.endsWith("()")) {//方法,查询完后处理,先用一个Map<key,function>保存? key = key.substring(0, key.length() - 2); } else if (key.endsWith("@")) {//引用,引用对象查询完后处理。fillTarget中暂时不用处理,因为非GET请求都是由给定的id确定,不需要引用 key = key.substring(0, key.length() - 1); } else if (key.endsWith(">=")) {//比较。查询时处理 key = key.substring(0, key.length() - 2); } else if (key.endsWith("<=")) {//比较。查询时处理 key = key.substring(0, key.length() - 2); } else if (key.endsWith(">")) {//比较。查询时处理 key = key.substring(0, key.length() - 1); } else if (key.endsWith("<")) {//比较。查询时处理 key = key.substring(0, key.length() - 1); } else if (key.endsWith("+")) {//延长,PUT查询时处理 if (method == PUT) {//不为PUT就抛异常 key = key.substring(0, key.length() - 1); } } else if (key.endsWith("-")) {//缩减,PUT查询时处理 if (method == PUT) {//不为PUT就抛异常 key = key.substring(0, key.length() - 1); } } String last = null;//不用Logic优化代码,否则 key 可能变为 key| 导致 key=value 变成 key|=value 而出错 if (RequestMethod.isQueryMethod(method)) {//逻辑运算符仅供GET,HEAD方法使用 last = key.isEmpty() ? "" : key.substring(key.length() - 1); if ("&".equals(last) || "|".equals(last) || "!".equals(last)) { key = key.substring(0, key.length() - 1); } else { last = null;//避免key + StringUtil.getString(last)错误延长 } } //"User:toUser":User转换"toUser":User, User为查询同名Table得到的JSONObject。交给客户端处理更好 if (isTableKey) {//不允许在column key中使用Type:key形式 key = Pair.parseEntry(key, true).getKey();//table以左边为准 } else { key = Pair.parseEntry(key).getValue();//column以右边为准 } if (verifyName && StringUtil.isName(key.startsWith("@") ? key.substring(1) : key) == false) { throw new IllegalArgumentException(method + "请求,字符 " + originKey + " 不合法!" + " key:value 中的key只能关键词 '@key' 或 'key[逻辑符][条件符]' 或 PUT请求下的 'key+' / 'key-' !"); } if (saveLogic && last != null) { key = key + last; } Log.i(TAG, "getRealKey return key = " + key); return key; } public static interface Callback { /**获取 SQLConfig 的实例 * @param method * @param table * @return */ AbstractSQLConfig getSQLConfig(RequestMethod method, String table); /**为 post 请求新建 id, 只能是 Long 或 String * @param method * @param table * @return */ Object newId(RequestMethod method, String table); /**获取主键名 * @param schema * @param table * @return */ String getIdKey(String schema, String table); /**获取 User 的主键名 * @param schema * @param table * @return */ String getUserIdKey(String schema, String table); } public static abstract class SimpleCallback implements Callback { @Override public Object newId(RequestMethod method, String table) { return System.currentTimeMillis(); } @Override public String getIdKey(String schema, String table) { return KEY_ID; } @Override public String getUserIdKey(String schema, String table) { return KEY_USER_ID; } } }
APIJSON-Java-Server/APIJSONORM/src/main/java/zuo/biao/apijson/server/AbstractSQLConfig.java
/*Copyright ©2016 TommyLemon(https://github.com/TommyLemon/APIJSON) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.*/ package zuo.biao.apijson.server; import static zuo.biao.apijson.JSONObject.KEY_COLUMN; import static zuo.biao.apijson.JSONObject.KEY_COMBINE; import static zuo.biao.apijson.JSONObject.KEY_DATABASE; import static zuo.biao.apijson.JSONObject.KEY_EXPLAIN; import static zuo.biao.apijson.JSONObject.KEY_CACHE; import static zuo.biao.apijson.JSONObject.KEY_FROM; import static zuo.biao.apijson.JSONObject.KEY_GROUP; import static zuo.biao.apijson.JSONObject.KEY_HAVING; import static zuo.biao.apijson.JSONObject.KEY_ID; import static zuo.biao.apijson.JSONObject.KEY_ORDER; import static zuo.biao.apijson.JSONObject.KEY_ROLE; import static zuo.biao.apijson.JSONObject.KEY_SCHEMA; import static zuo.biao.apijson.JSONObject.KEY_USER_ID; import static zuo.biao.apijson.RequestMethod.DELETE; import static zuo.biao.apijson.RequestMethod.GET; import static zuo.biao.apijson.RequestMethod.GETS; import static zuo.biao.apijson.RequestMethod.HEADS; import static zuo.biao.apijson.RequestMethod.POST; import static zuo.biao.apijson.RequestMethod.PUT; import static zuo.biao.apijson.SQL.AND; import static zuo.biao.apijson.SQL.NOT; import static zuo.biao.apijson.SQL.OR; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.regex.Pattern; import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.annotation.JSONField; import zuo.biao.apijson.JSON; import zuo.biao.apijson.Log; import zuo.biao.apijson.NotNull; import zuo.biao.apijson.RequestMethod; import zuo.biao.apijson.RequestRole; import zuo.biao.apijson.SQL; import zuo.biao.apijson.StringUtil; import zuo.biao.apijson.server.exception.NotExistException; import zuo.biao.apijson.server.model.Column; import zuo.biao.apijson.server.model.PgAttribute; import zuo.biao.apijson.server.model.PgClass; import zuo.biao.apijson.server.model.Table; /**config sql for JSON Request * @author Lemon */ public abstract class AbstractSQLConfig implements SQLConfig { private static final String TAG = "AbstractSQLConfig"; public static String DEFAULT_SCHEMA = "sys"; /** * 表名映射,隐藏真实表名,对安全要求很高的表可以这么做 */ public static final Map<String, String> TABLE_KEY_MAP; static { TABLE_KEY_MAP = new HashMap<String, String>(); TABLE_KEY_MAP.put(Table.class.getSimpleName(), Table.TABLE_NAME); TABLE_KEY_MAP.put(Column.class.getSimpleName(), Column.TABLE_NAME); TABLE_KEY_MAP.put(PgAttribute.class.getSimpleName(), PgAttribute.TABLE_NAME); TABLE_KEY_MAP.put(PgClass.class.getSimpleName(), PgClass.TABLE_NAME); } @NotNull @Override public String getIdKey() { return KEY_ID; } @NotNull @Override public String getUserIdKey() { return KEY_USER_ID; } private Object id; //Table的id private RequestMethod method; //操作方法 private boolean prepared = true; //预编译 private boolean main = true; /** * TODO 被关联的表通过就忽略关联的表?(这个不行 User:{"sex@":"/Comment/toId"}) */ private RequestRole role; //发送请求的用户的角色 private String database; //表所在的数据库类型 private String schema; //表所在的数据库名 private String table; //表名 private String alias; //表别名 private String group; //分组方式的字符串数组,','分隔 private String having; //聚合函数的字符串数组,','分隔 private String order; //排序方式的字符串数组,','分隔 private Subquery from; //子查询临时表 private List<String> column; //表内字段名(或函数名,仅查询操作可用)的字符串数组,','分隔 private List<List<Object>> values; //对应表内字段的值的字符串数组,','分隔 private Map<String, Object> content; //Request内容,key:value形式,column = content.keySet(),values = content.values() private Map<String, Object> where; //筛选条件,key:value形式 private Map<String, List<String>> combine; //条件组合,{ "&":[key], "|":[key], "!":[key] } //array item <<<<<<<<<< private int count; //Table数量 private int page; //Table所在页码 private int position; //Table在[]中的位置 private int query; //JSONRequest.query private int type; //ObjectParser.type private int cache; private boolean explain; private List<Join> joinList; //连表 配置列表 //array item >>>>>>>>>> private boolean test; //测试 private String procedure; public SQLConfig setProcedure(String procedure) { this.procedure = procedure; return this; } public String getProcedure() { return procedure; } public AbstractSQLConfig(RequestMethod method) { setMethod(method); } public AbstractSQLConfig(RequestMethod method, String table) { this(method); setTable(table); } public AbstractSQLConfig(RequestMethod method, int count, int page) { this(method); setCount(count); setPage(page); } @NotNull @Override public RequestMethod getMethod() { if (method == null) { method = GET; } return method; } @Override public AbstractSQLConfig setMethod(RequestMethod method) { this.method = method; return this; } @Override public boolean isPrepared() { return prepared; } @Override public AbstractSQLConfig setPrepared(boolean prepared) { this.prepared = prepared; return this; } @Override public boolean isMain() { return main; } @Override public AbstractSQLConfig setMain(boolean main) { this.main = main; return this; } @Override public Object getId() { return id; } @Override public AbstractSQLConfig setId(Object id) { this.id = id; return this; } @Override public RequestRole getRole() { //不能 @NotNull , AbstractParser#getSQLObject 内当getRole() == null时填充默认值 return role; } public AbstractSQLConfig setRole(String roleName) throws Exception { return setRole(RequestRole.get(roleName)); } @Override public AbstractSQLConfig setRole(RequestRole role) { this.role = role; return this; } @Override public String getDatabase() { return database; } @Override public SQLConfig setDatabase(String database) { this.database = database; return this; } @Override public String getQuote() { return DATABASE_POSTGRESQL.equals(getDatabase()) ? "\"" : "`"; } @Override public String getSchema() { return schema; } public String getSQLSchema(String sqlTable) { //强制,避免因为全局默认的 @schema 自动填充进来,导致这几个类的 schema 为 sys 等其它值 if ((Table.TABLE_NAME.equals(sqlTable) || Column.TABLE_NAME.equals(sqlTable)) ) { return SCHEMA_INFORMATION; } if ((PgAttribute.TABLE_NAME.equals(sqlTable) || PgClass.TABLE_NAME.equals(sqlTable)) ) { return ""; } String sch = getSchema(); if (sch == null) { //PostgreSQL 的 pg_class 和 pg_attribute 表好像不属于任何 Schema StringUtil.isEmpty(sch, true)) { sch = DEFAULT_SCHEMA; } return sch; } @Override public AbstractSQLConfig setSchema(String schema) { if (schema != null) { String quote = getQuote(); String s = schema.startsWith(quote) && schema.endsWith(quote) ? schema.substring(1, schema.length() - 1) : schema; if (StringUtil.isEmpty(s, true) == false && StringUtil.isName(s) == false) { throw new IllegalArgumentException("@schema:value 中value必须是1个单词!"); } } this.schema = schema; return this; } /**请求传进来的Table名 * @return * @see {@link #getSQLTable()} */ @Override public String getTable() { return table; } /**数据库里的真实Table名 * 通过 {@link #TABLE_KEY_MAP} 映射 * @return */ @JSONField(serialize = false) @Override public String getSQLTable() { // String t = TABLE_KEY_MAP.containsKey(table) ? TABLE_KEY_MAP.get(table) : table; //如果要强制小写,则可在子类重写这个方法再 toLowerCase return DATABASE_POSTGRESQL.equals(getDatabase()) ? t.toLowerCase() : t; return TABLE_KEY_MAP.containsKey(table) ? TABLE_KEY_MAP.get(table) : table; } @JSONField(serialize = false) @Override public String getTablePath() { String q = getQuote(); String sqlTable = getSQLTable(); String sch = getSQLSchema(sqlTable); return (StringUtil.isEmpty(sch, true) ? "" : q + sch + q + ".") + q + sqlTable + q + ( isKeyPrefix() ? " AS " + getAlias() : ""); } @Override public AbstractSQLConfig setTable(String table) { //Table已经在Parser中校验,所以这里不用防SQL注入 this.table = table; return this; } @Override public String getAlias() { if (StringUtil.isEmpty(alias, true)) { alias = getTable(); } String q = getQuote(); //getTable 不能小写,因为Verifier用大小写敏感的名称判断权限 //如果要强制小写,则可在子类重写这个方法再 toLowerCase return q + (DATABASE_POSTGRESQL.equals(getDatabase()) ? alias.toLowerCase() : alias) + q; return q + alias + q; } @Override public AbstractSQLConfig setAlias(String alias) { this.alias = alias; return this; } @Override public String getGroup() { return group; } public AbstractSQLConfig setGroup(String... keys) { return setGroup(StringUtil.getString(keys)); } @Override public AbstractSQLConfig setGroup(String group) { this.group = group; return this; } @JSONField(serialize = false) public String getGroupString(boolean hasPrefix) { //加上子表的 group String joinGroup = ""; if (joinList != null) { SQLConfig cfg; String c; boolean first = true; for (Join j : joinList) { if (j.isAppJoin()) { continue; } cfg = j.isLeftOrRightJoin() ? j.getOutterConfig() : j.getJoinConfig(); cfg.setAlias(cfg.getTable()); c = ((AbstractSQLConfig) cfg).getGroupString(false); if (StringUtil.isEmpty(c, true) == false) { joinGroup += (first ? "" : ", ") + c; first = false; } } } group = StringUtil.getTrimedString(group); String[] keys = StringUtil.split(group); if (keys == null || keys.length <= 0) { return StringUtil.isEmpty(joinGroup, true) ? "" : (hasPrefix ? " GROUP BY " : "") + joinGroup; } for (int i = 0; i < keys.length; i++) { if (isPrepared()) { //不能通过 ? 来代替,因为SQLExecutor statement.setString后 GROUP BY 'userId' 有单引号,只能返回一条数据,必须去掉单引号才行! if (StringUtil.isName(keys[i]) == false) { throw new IllegalArgumentException("@group:value 中 value里面用 , 分割的每一项都必须是1个单词!并且不要有空格!"); } } keys[i] = getKey(keys[i]); } return (hasPrefix ? " GROUP BY " : "") + StringUtil.concat(StringUtil.getString(keys), joinGroup, ", "); } @Override public String getHaving() { return having; } public AbstractSQLConfig setHaving(String... conditions) { return setHaving(StringUtil.getString(conditions)); } @Override public AbstractSQLConfig setHaving(String having) { this.having = having; return this; } /** * @return HAVING conditoin0 AND condition1 OR condition2 ... */ @JSONField(serialize = false) public String getHavingString(boolean hasPrefix) { //加上子表的 having String joinHaving = ""; if (joinList != null) { SQLConfig cfg; String c; boolean first = true; for (Join j : joinList) { if (j.isAppJoin()) { continue; } cfg = j.isLeftOrRightJoin() ? j.getOutterConfig() : j.getJoinConfig(); cfg.setAlias(cfg.getTable()); c = ((AbstractSQLConfig) cfg).getHavingString(false); if (StringUtil.isEmpty(c, true) == false) { joinHaving += (first ? "" : ", ") + c; first = false; } } } having = StringUtil.getTrimedString(having); String[] keys = StringUtil.split(having, ";"); if (keys == null || keys.length <= 0) { return StringUtil.isEmpty(joinHaving, true) ? "" : (hasPrefix ? " HAVING " : "") + joinHaving; } String expression; String method; //暂时不允许 String prefix; String suffix; //fun0(arg0,arg1,...);fun1(arg0,arg1,...) for (int i = 0; i < keys.length; i++) { //fun(arg0,arg1,...) expression = keys[i]; int start = expression.indexOf("("); if (start < 0) { if (isPrepared() && PATTERN_HAVING.matcher(expression).matches() == false) { throw new UnsupportedOperationException("字符串 " + expression + " 不合法!" + "预编译模式下 @having:\"column?value;function(arg0,arg1,...)?value...\"" + " 中 column?value 必须符合正则表达式 ^[A-Za-z0-9%!=<>]+$ !不允许空格!"); } continue; } int end = expression.indexOf(")"); if (start >= end) { throw new IllegalArgumentException("字符 " + expression + " 不合法!" + "@having:value 中 value 里的 SQL函数必须为 function(arg0,arg1,...) 这种格式!"); } method = expression.substring(0, start); if (StringUtil.isName(method) == false) { throw new IllegalArgumentException("字符 " + method + " 不合法!" + "预编译模式下 @having:\"column?value;function(arg0,arg1,...)?value...\"" + " 中SQL函数名 function 必须符合正则表达式 ^[0-9a-zA-Z_]+$ !"); } suffix = expression.substring(end + 1, expression.length()); if (isPrepared() && PATTERN_HAVING_SUFFIX.matcher((String) suffix).matches() == false) { throw new UnsupportedOperationException("字符串 " + suffix + " 不合法!" + "预编译模式下 @having:\"column?value;function(arg0,arg1,...)?value...\"" + " 中 ?value 必须符合正则表达式 ^[0-9%!=<>]+$ !不允许空格!"); } String[] ckeys = StringUtil.split(expression.substring(start + 1, end)); if (ckeys != null) { for (int j = 0; j < ckeys.length; j++) { if (isPrepared() && (StringUtil.isName(ckeys[j]) == false || ckeys[j].startsWith("_"))) { throw new IllegalArgumentException("字符 " + ckeys[j] + " 不合法!" + "预编译模式下 @having:\"column?value;function(arg0,arg1,...)?value...\"" + " 中所有 arg 都必须是1个不以 _ 开头的单词!并且不要有空格!"); } ckeys[j] = getKey(ckeys[j]); } } keys[i] = method + "(" + StringUtil.getString(ckeys) + ")" + suffix; } //TODO 支持 OR, NOT 参考 @combine:"&key0,|key1,!key2" return (hasPrefix ? " HAVING " : "") + StringUtil.concat(StringUtil.getString(keys, AND), joinHaving, AND); } @Override public String getOrder() { return order; } public AbstractSQLConfig setOrder(String... conditions) { return setOrder(StringUtil.getString(conditions)); } @Override public AbstractSQLConfig setOrder(String order) { this.order = order; return this; } @JSONField(serialize = false) public String getOrderString(boolean hasPrefix) { //加上子表的 order String joinOrder = ""; if (joinList != null) { SQLConfig cfg; String c; boolean first = true; for (Join j : joinList) { if (j.isAppJoin()) { continue; } cfg = j.isLeftOrRightJoin() ? j.getOutterConfig() : j.getJoinConfig(); cfg.setAlias(cfg.getTable()); c = ((AbstractSQLConfig) cfg).getOrderString(false); if (StringUtil.isEmpty(c, true) == false) { joinOrder += (first ? "" : ", ") + c; first = false; } } } order = StringUtil.getTrimedString(order); if (order.contains("+")) {//replace没有包含的replacement会崩溃 order = order.replaceAll("\\+", " ASC "); } if (order.contains("-")) { order = order.replaceAll("-", " DESC "); } String[] keys = StringUtil.split(order); if (keys == null || keys.length <= 0) { return StringUtil.isEmpty(joinOrder, true) ? "" : (hasPrefix ? " ORDER BY " : "") + joinOrder; } String origin; String sort; int index; for (int i = 0; i < keys.length; i++) { index = keys[i].trim().endsWith(" ASC") ? keys[i].lastIndexOf(" ASC") : -1; //StringUtil.split返回数组中,子项不会有null if (index < 0) { index = keys[i].trim().endsWith(" DESC") ? keys[i].lastIndexOf(" DESC") : -1; sort = index <= 0 ? "" : " DESC "; } else { sort = " ASC "; } origin = index < 0 ? keys[i] : keys[i].substring(0, index); if (isPrepared()) { //不能通过 ? 来代替,SELECT 'id','name' 返回的就是 id:"id", name:"name",而不是数据库里的值! //这里既不对origin trim,也不对 ASC/DESC ignoreCase,希望前端严格传没有任何空格的字符串过来,减少传输数据量,节约服务器性能 if (StringUtil.isName(origin) == false) { throw new IllegalArgumentException("预编译模式下 @order:value 中 value里面用 , 分割的每一项" + " column+ / column- 中 column必须是1个单词!并且不要有多余的空格!"); } } keys[i] = getKey(origin) + sort; } return (hasPrefix ? " ORDER BY " : "") + StringUtil.concat(StringUtil.getString(keys), joinOrder, ", "); } @Override public Subquery getFrom() { return from; } @Override public AbstractSQLConfig setFrom(Subquery from) { this.from = from; return this; } @Override public List<String> getColumn() { return column; } @Override public AbstractSQLConfig setColumn(List<String> column) { this.column = column; return this; } @JSONField(serialize = false) public String getColumnString() throws Exception { return getColumnString(false); } @JSONField(serialize = false) public String getColumnString(boolean inSQLJoin) throws Exception { switch (getMethod()) { case HEAD: case HEADS: //StringUtil.isEmpty(column, true) || column.contains(",") 时SQL.count(column)会return "*" if (isPrepared() && column != null) { String origin; String alias; int index; for (String c : column) { index = c.lastIndexOf(":"); //StringUtil.split返回数组中,子项不会有null origin = index < 0 ? c : c.substring(0, index); alias = index < 0 ? null : c.substring(index + 1); if (StringUtil.isName(origin) == false || (alias != null && StringUtil.isName(alias) == false)) { throw new IllegalArgumentException("HEAD请求: 预编译模式下 @column:value 中 value里面用 , 分割的每一项" + " column:alias 中 column 必须是1个单词!如果有alias,则alias也必须为1个单词!并且不要有多余的空格!"); } } } return SQL.count(column != null && column.size() == 1 ? getKey(Pair.parseEntry(column.get(0), true).getKey()) : "*"); case POST: if (column == null || column.isEmpty()) { throw new IllegalArgumentException("POST 请求必须在Table内设置要保存的 key:value !"); } String s = ""; boolean pfirst = true; for (String c : column) { if (isPrepared() && StringUtil.isName(c) == false) { //不能通过 ? 来代替,SELECT 'id','name' 返回的就是 id:"id", name:"name",而不是数据库里的值! throw new IllegalArgumentException("POST请求: 每一个 key:value 中的key都必须是1个单词!"); } s += ((pfirst ? "" : ",") + getKey(c)); pfirst = false; } return "(" + s + ")"; case GET: case GETS: boolean isQuery = RequestMethod.isQueryMethod(method); //TODO 这个有啥用?上面应是 getMethod 的值 GET 和 GETS 了。 String joinColumn = ""; if (isQuery && joinList != null) { SQLConfig ecfg; SQLConfig cfg; String c; boolean first = true; for (Join j : joinList) { if (j.isAppJoin()) { continue; } ecfg = j.getOutterConfig(); if (ecfg != null && ecfg.getColumn() != null) { //优先级更高 cfg = ecfg; } else { cfg = j.getJoinConfig(); } cfg.setAlias(cfg.getTable()); c = ((AbstractSQLConfig) cfg).getColumnString(true); if (StringUtil.isEmpty(c, true) == false) { joinColumn += (first ? "" : ", ") + c; first = false; } inSQLJoin = true; } } String tableAlias = getAlias(); // String c = StringUtil.getString(column); //id,name;json_length(contactIdList):contactCount;... String[] keys = column == null ? null : column.toArray(new String[]{}); //StringUtil.split(c, ";"); if (keys == null || keys.length <= 0) { boolean noColumn = column != null && inSQLJoin; String mc = isKeyPrefix() == false ? (noColumn ? "" : "*") : (noColumn ? "" : tableAlias + ".*"); return StringUtil.concat(mc, joinColumn, ", ", true); } String expression; String method = null; //...;fun0(arg0,arg1,...):fun0;fun1(arg0,arg1,...):fun1;... for (int i = 0; i < keys.length; i++) { //fun(arg0,arg1,...) expression = keys[i]; int start = expression.indexOf("("); int end = 0; if (start >= 0) { end = expression.indexOf(")"); if (start >= end) { throw new IllegalArgumentException("字符 " + expression + " 不合法!" + "@having:value 中 value 里的 SQL函数必须为 function(arg0,arg1,...) 这种格式!"); } method = expression.substring(0, start); if (StringUtil.isName(method) == false) { throw new IllegalArgumentException("字符 " + method + " 不合法!" + "预编译模式下 @column:\"column0,column1:alias;function0(arg0,arg1,...);function1(...):alias...\"" + " 中SQL函数名 function 必须符合正则表达式 ^[0-9a-zA-Z_]+$ !"); } } boolean isColumn = start < 0; String[] ckeys = StringUtil.split(isColumn ? expression : expression.substring(start + 1, end)); String quote = getQuote(); // if (isPrepared()) { //不能通过 ? 来代替,SELECT 'id','name' 返回的就是 id:"id", name:"name",而不是数据库里的值! if (ckeys != null && ckeys.length > 0) { String origin; String alias; int index; for (int j = 0; j < ckeys.length; j++) { index = ckeys[j].lastIndexOf(":"); //StringUtil.split返回数组中,子项不会有null origin = index < 0 ? ckeys[j] : ckeys[j].substring(0, index); alias = index < 0 ? null : ckeys[j].substring(index + 1); if (isPrepared()) { if (isColumn) { if (StringUtil.isName(origin) == false || (alias != null && StringUtil.isName(alias) == false)) { throw new IllegalArgumentException("GET请求: 预编译模式下 @column:value 中 value里面用 , 分割的每一项" + " column:alias 中 column 必须是1个单词!如果有alias,则alias也必须为1个单词!并且不要有多余的空格!"); } } else { if ((StringUtil.isName(ckeys[j]) == false || ckeys[j].startsWith("_"))) { throw new IllegalArgumentException("字符 " + ckeys[j] + " 不合法!" + "预编译模式下 @column:\"column0,column1:alias;function0(arg0,arg1,...);function1(...):alias...\"" + " 中所有 arg 都必须是1个不以 _ 开头的单词!并且不要有空格!"); } } } //JOIN 副表不再在外层加副表名前缀 userId AS `Commet.userId`, 而是直接 userId AS `userId` origin = quote + origin + quote; if (isKeyPrefix()) { ckeys[j] = tableAlias + "." + origin; // if (isColumn) { // ckeys[j] += " AS " + quote + (isMain() ? "" : tableAlias + ".") + (StringUtil.isEmpty(alias, true) ? origin : alias) + quote; // } if (isColumn && StringUtil.isEmpty(alias, true) == false) { ckeys[j] += " AS " + quote + alias + quote; } } else { ckeys[j] = origin + (StringUtil.isEmpty(alias, true) ? "" : " AS " + quote + alias + quote); } } // } } if (isColumn) { keys[i] = StringUtil.getString(ckeys); } else { String suffix = expression.substring(end + 1, expression.length()); //:contactCount String alias = suffix.startsWith(":") ? suffix.substring(1) : null; //contactCount if (StringUtil.isEmpty(alias, true)) { if (suffix.isEmpty() == false) { throw new IllegalArgumentException("GET请求: 预编译模式下 @column:value 中 value里面用 ; 分割的每一项" + " function(arg0,arg1,...):alias 中 alias 如果有就必须是1个单词!并且不要有多余的空格!"); } } else { if (StringUtil.isEmpty(alias, true) == false && StringUtil.isName(alias) == false) { throw new IllegalArgumentException("GET请求: 预编译模式下 @column:value 中 value里面用 ; 分割的每一项" + " function(arg0,arg1,...):alias 中 alias 必须是1个单词!并且不要有多余的空格!"); } } String origin = method + "(" + StringUtil.getString(ckeys) + ")"; // if (isKeyPrefix()) { // keys[i] = origin + " AS " + quote + (isMain() ? "" : tableAlias + ".") + (StringUtil.isEmpty(alias, true) ? method : alias) + quote; // } // else { keys[i] = origin + (StringUtil.isEmpty(alias, true) ? "" : " AS " + quote + alias + quote); // } } } String c = StringUtil.getString(keys); return (c.contains(":") == false ? c : c.replaceAll(":", " AS ")) + (StringUtil.isEmpty(joinColumn, true) ? "" : ", " + joinColumn);//不能在这里改,后续还要用到: default: throw new UnsupportedOperationException( "服务器内部错误:getColumnString 不支持 " + RequestMethod.getName(getMethod()) + " 等 [GET,GETS,HEAD,HEADS,POST] 外的ReuqestMethod!" ); } } @Override public List<List<Object>> getValues() { return values; } @JSONField(serialize = false) public String getValuesString() { String s = ""; if (values != null && values.size() > 0) { Object[] items = new Object[values.size()]; List<Object> vs; for (int i = 0; i < values.size(); i++) { vs = values.get(i); if (vs == null) { continue; } items[i] = "("; for (int j = 0; j < vs.size(); j++) { items[i] += ((j <= 0 ? "" : ",") + getValue(vs.get(j))); } items[i] += ")"; } s = StringUtil.getString(items); } return s; } @Override public AbstractSQLConfig setValues(List<List<Object>> valuess) { this.values = valuess; return this; } @Override public Map<String, Object> getContent() { return content; } @Override public AbstractSQLConfig setContent(Map<String, Object> content) { this.content = content; return this; } @Override public int getCount() { return count; } @Override public AbstractSQLConfig setCount(int count) { this.count = count; return this; } @Override public int getPage() { return page; } @Override public AbstractSQLConfig setPage(int page) { this.page = page; return this; } @Override public int getPosition() { return position; } @Override public AbstractSQLConfig setPosition(int position) { this.position = position; return this; } @Override public int getQuery() { return query; } @Override public AbstractSQLConfig setQuery(int query) { this.query = query; return this; } @Override public int getType() { return type; } @Override public AbstractSQLConfig setType(int type) { this.type = type; return this; } @Override public int getCache() { return cache; } @Override public AbstractSQLConfig setCache(int cache) { this.cache = cache; return this; } public AbstractSQLConfig setCache(String cache) { int cache2; if (cache == null) { cache2 = JSONRequest.CACHE_ALL; } else { // if (isSubquery) { // throw new IllegalArgumentException("子查询内不支持传 " + JSONRequest.KEY_CACHE + "!"); // } switch (cache) { case "0": case JSONRequest.CACHE_ALL_STRING: cache2 = JSONRequest.CACHE_ALL; break; case "1": case JSONRequest.CACHE_ROM_STRING: cache2 = JSONRequest.CACHE_ROM; break; case "2": case JSONRequest.CACHE_RAM_STRING: cache2 = JSONRequest.CACHE_RAM; break; default: throw new IllegalArgumentException(getTable() + "/" + JSONRequest.KEY_CACHE + ":value 中 value 的值不合法!必须在 [0,1,2] 或 [ALL, ROM, RAM] 内 !"); } } setCache(cache2); return this; } @Override public boolean isExplain() { return explain; } @Override public AbstractSQLConfig setExplain(boolean explain) { this.explain = explain; return this; } @Override public List<Join> getJoinList() { return joinList; } @Override public SQLConfig setJoinList(List<Join> joinList) { this.joinList = joinList; return this; } @Override public boolean hasJoin() { return joinList != null && joinList.isEmpty() == false; } @Override public boolean isTest() { return test; } @Override public AbstractSQLConfig setTest(boolean test) { this.test = test; return this; } /**获取初始位置offset * @return */ @JSONField(serialize = false) public int getOffset() { return getOffset(getPage(), getCount()); } /**获取初始位置offset * @param page * @param count * @return */ public static int getOffset(int page, int count) { return page*count; } /**获取限制数量 * @return */ @JSONField(serialize = false) public String getLimitString() { return getLimitString(getPage(), getCount());// + 1); } /**获取限制数量 * @param limit * @return */ public static String getLimitString(int page, int count) { return count <= 0 ? "" : " LIMIT " + count + " OFFSET " + getOffset(page, count); } //WHERE <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< @Override public Map<String, Object> getWhere() { return where; } @Override public AbstractSQLConfig setWhere(Map<String, Object> where) { this.where = where; return this; } @NotNull @Override public Map<String, List<String>> getCombine() { List<String> andList = combine == null ? null : combine.get("&"); if (andList == null) { andList = where == null ? new ArrayList<String>() : new ArrayList<String>(where.keySet()); if (combine == null) { combine = new HashMap<>(); } combine.put("&", andList); } return combine; } @Override public AbstractSQLConfig setCombine(Map<String, List<String>> combine) { this.combine = combine; return this; } /** * noFunctionChar = false * @param key * @return */ @JSONField(serialize = false) @Override public Object getWhere(String key) { return getWhere(key, false); } /** * @param key * @param exactMatch * @return */ @JSONField(serialize = false) @Override public Object getWhere(String key, boolean exactMatch) { if (exactMatch) { return where == null ? null : where.get(key); } Set<String> set = key == null || where == null ? null : where.keySet(); if (set != null) { synchronized (where) { if (where != null) { int index; for (String k : set) { index = k.indexOf(key); if (index >= 0 && StringUtil.isName(k.substring(index)) == false) { return where.get(k); } } } } } return null; } @Override public AbstractSQLConfig putWhere(String key, Object value, boolean prior) { if (key != null) { if (where == null) { where = new LinkedHashMap<String, Object>(); } if (value == null) { where.remove(key); } else { where.put(key, value); } combine = getCombine(); List<String> andList = combine.get("&"); if (value == null) { andList.remove(key); } else if (andList == null || andList.contains(key) == false) { int i = 0; if (andList == null) { andList = new ArrayList<>(); } else if (prior && andList.isEmpty() == false) { String idKey = getIdKey(); String idInKey = idKey + "{}"; String userIdKey = getUserIdKey(); String userIdInKey = userIdKey + "{}"; if (andList.contains(idKey)) { i ++; } if (andList.contains(idInKey)) { i ++; } if (andList.contains(userIdKey)) { i ++; } if (andList.contains(userIdInKey)) { i ++; } } if (prior) { andList.add(i, key); //userId的优先级不能比id高 0, key); } else { andList.add(key); //AbstractSQLExecutor.onPutColumn里getSQL,要保证缓存的SQL和查询的SQL里 where 的 key:value 顺序一致 } } combine.put("&", andList); } return this; } /**获取WHERE * @return * @throws Exception */ @JSONField(serialize = false) @Override public String getWhereString(boolean hasPrefix) throws Exception { return getWhereString(hasPrefix, getMethod(), getWhere(), getCombine(), getJoinList(), ! isTest()); } /**获取WHERE * @param method * @param where * @return * @throws Exception */ @JSONField(serialize = false) public String getWhereString(boolean hasPrefix, RequestMethod method, Map<String, Object> where, Map<String, List<String>> combine, List<Join> joinList, boolean verifyName) throws Exception { Set<Entry<String, List<String>>> combineSet = combine == null ? null : combine.entrySet(); if (combineSet == null || combineSet.isEmpty()) { Log.w(TAG, "getWhereString combineSet == null || combineSet.isEmpty() >> return \"\";"); return ""; } List<String> keyList; String whereString = ""; boolean isCombineFirst = true; int logic; boolean isItemFirst; String c; String cs; for (Entry<String, List<String>> ce : combineSet) { keyList = ce == null ? null : ce.getValue(); if (keyList == null || keyList.isEmpty()) { continue; } if ("|".equals(ce.getKey())) { logic = Logic.TYPE_OR; } else if ("!".equals(ce.getKey())) { logic = Logic.TYPE_NOT; } else { logic = Logic.TYPE_AND; } isItemFirst = true; cs = ""; for (String key : keyList) { c = getWhereItem(key, where.get(key), method, verifyName); if (StringUtil.isEmpty(c, true)) {//避免SQL条件连接错误 continue; } cs += (isItemFirst ? "" : (Logic.isAnd(logic) ? AND : OR)) + "(" + c + ")"; isItemFirst = false; } whereString += (isCombineFirst ? "" : AND) + (Logic.isNot(logic) ? NOT : "") + " ( " + cs + " ) "; isCombineFirst = false; } if (joinList != null) { String newWs = ""; String ws = "" + whereString; List<Object> newPvl = new ArrayList<>(); List<Object> pvl = new ArrayList<>(preparedValueList); SQLConfig jc; String js; boolean changed = false; //各种 JOIN 没办法统一用 & | !连接,只能按优先级,和 @combine 一样? for (Join j : joinList) { switch (j.getJoinType()) { case "@": // APP JOIN case "<": // LEFT JOIN case ">": // RIGHT JOIN break; case "": // FULL JOIN case "|": // FULL JOIN 不支持 <>, [] ,避免太多符号 case "&": // INNER JOIN case "!": // OUTTER JOIN case "^": // SIDE JOIN case "*": // CROSS JOIN jc = j.getJoinConfig(); boolean isMain = jc.isMain(); jc.setMain(false).setPrepared(isPrepared()).setPreparedValueList(new ArrayList<Object>()); js = jc.getWhereString(false); jc.setMain(isMain); if (StringUtil.isEmpty(js, true)) { continue; } if (StringUtil.isEmpty(newWs, true) == false) { newWs += AND; } if ("^".equals(j.getJoinType())) { // (A & ! B) | (B & ! A) newWs += " ( ( " + ws + ( StringUtil.isEmpty(ws, true) ? "" : AND + NOT ) + " ( " + js + " ) ) " + OR + " ( " + js + AND + NOT + " ( " + ws + " ) ) ) "; newPvl.addAll(pvl); newPvl.addAll(jc.getPreparedValueList()); newPvl.addAll(jc.getPreparedValueList()); newPvl.addAll(pvl); } else { logic = Logic.getType(j.getJoinType()); newWs += " ( " + getCondition( Logic.isNot(logic), ws + ( StringUtil.isEmpty(ws, true) ? "" : (Logic.isAnd(logic) ? AND : OR) ) + " ( " + js + " ) " ) + " ) "; newPvl.addAll(pvl); newPvl.addAll(jc.getPreparedValueList()); } changed = true; break; default: throw new UnsupportedOperationException("join:value 中 value 里的 " + j.getJoinType() + "/" + j.getPath() + "错误!不支持 " + j.getJoinType() + " 等 [@ APP, < LEFT, > RIGHT, | FULL, & INNER, ! OUTTER, ^ SIDE, * CROSS] 之外的JOIN类型 !"); } } if (changed) { whereString = newWs; preparedValueList = newPvl; } } String s = whereString.isEmpty() ? "" : (hasPrefix ? " WHERE " : "") + whereString; if (s.isEmpty() && RequestMethod.isQueryMethod(method) == false) { throw new UnsupportedOperationException("写操作请求必须带条件!!!"); } return s; } /** * @param key * @param value * @param method * @param verifyName * @return * @throws Exception */ private String getWhereItem(String key, Object value , RequestMethod method, boolean verifyName) throws Exception { Log.d(TAG, "getWhereItem key = " + key); //避免筛选到全部 value = key == null ? null : where.get(key); if (key == null || value == null || key.startsWith("@") || key.endsWith("()")) {//关键字||方法, +或-直接报错 Log.d(TAG, "getWhereItem key == null || value == null" + " || key.startsWith(@) || key.endsWith(()) >> continue;"); return null; } if (key.endsWith("@")) {//引用 // key = key.substring(0, key.lastIndexOf("@")); throw new IllegalArgumentException(TAG + ".getWhereItem: 字符 " + key + " 不合法!"); } int keyType; if (key.endsWith("$")) { keyType = 1; } else if (key.endsWith("~") || key.endsWith("?")) { //TODO ?可能以后会被废弃,全用 ~ 和 *~ 替代,更接近 PostgreSQL 语法 keyType = key.charAt(key.length() - 2) == '*' ? -2 : 2; //FIXME StringIndexOutOfBoundsException } else if (key.endsWith("%")) { keyType = 3; } else if (key.endsWith("{}")) { keyType = 4; } else if (key.endsWith("}{")) { keyType = 5; } else if (key.endsWith("<>")) { keyType = 6; } else if (key.endsWith(">=")) { keyType = 7; } else if (key.endsWith("<=")) { keyType = 8; } else if (key.endsWith(">")) { keyType = 9; } else if (key.endsWith("<")) { keyType = 10; } else { //else绝对不能省,避免再次踩坑! keyType = 0; 写在for循环外面都没注意! keyType = 0; } key = getRealKey(method, key, false, true, verifyName, getQuote()); switch (keyType) { case 1: return getSearchString(key, value); case -2: case 2: return getRegExpString(key, value, keyType < 0); case 3: return getBetweenString(key, value); case 4: return getRangeString(key, value); case 5: return getExistsString(key, value); case 6: return getContainString(key, value); case 7: return getCompareString(key, value, ">="); case 8: return getCompareString(key, value, "<="); case 9: return getCompareString(key, value, ">"); case 10: return getCompareString(key, value, "<"); default: //TODO MySQL JSON类型的字段对比 key='[]' 会无结果! key LIKE '[1, 2, 3]' //TODO MySQL , 后面有空格! return getEqualString(key, value); } } @JSONField(serialize = false) public String getEqualString(String key, Object value) throws Exception { if (JSON.isBooleanOrNumberOrString(value) == false && value instanceof Subquery == false) { throw new IllegalArgumentException(key + ":value 中value不合法!非PUT请求只支持 [Boolean, Number, String] 内的类型 !"); } boolean not = key.endsWith("!"); // & | 没有任何意义,写法多了不好控制 if (not) { key = key.substring(0, key.length() - 1); } if (StringUtil.isName(key) == false) { throw new IllegalArgumentException(key + ":value 中key不合法!不支持 ! 以外的逻辑符 !"); } return getKey(key) + (not ? " != " : " = ") + (value instanceof Subquery ? getSubqueryString((Subquery) value) : getValue(value)); } @JSONField(serialize = false) public String getCompareString(String key, Object value, String type) throws Exception { if (JSON.isBooleanOrNumberOrString(value) == false && value instanceof Subquery == false) { throw new IllegalArgumentException(key + type + ":value 中value不合法!比较运算 [>, <, >=, <=] 只支持 [Boolean, Number, String] 内的类型 !"); } if (StringUtil.isName(key) == false) { throw new IllegalArgumentException(key + type + ":value 中key不合法!比较运算 [>, <, >=, <=] 不支持 [&, !, |] 中任何逻辑运算符 !"); } return getKey(key) + " " + type + " " + (value instanceof Subquery ? getSubqueryString((Subquery) value) : getValue(value)); } public String getKey(String key) { String q = getQuote(); return (isKeyPrefix() ? getAlias() + "." : "") + q + key + q; } /** * 使用prepareStatement预编译,值为 ? ,后续动态set进去 */ private List<Object> preparedValueList = new ArrayList<>(); private Object getValue(@NotNull Object value) { if (isPrepared()) { preparedValueList.add(value); return "?"; } // return (value instanceof Number || value instanceof Boolean) && DATABASE_POSTGRESQL.equals(getDatabase()) ? value : "'" + value + "'"; return (value instanceof Number || value instanceof Boolean) ? value : "'" + value + "'"; //MySQL 隐式转换用不了索引 } @Override public List<Object> getPreparedValueList() { return preparedValueList; } @Override public AbstractSQLConfig setPreparedValueList(List<Object> preparedValueList) { this.preparedValueList = preparedValueList; return this; } //$ search <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< /**search key match value * @param in * @return {@link #getSearchString(String, Object[], int)} * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getSearchString(String key, Object value) throws IllegalArgumentException { if (value == null) { return ""; } Logic logic = new Logic(key); key = logic.getKey(); Log.i(TAG, "getSearchString key = " + key); JSONArray arr = newJSONArray(value); if (arr.isEmpty()) { return ""; } return getSearchString(key, arr.toArray(), logic.getType()); } /**search key match values * @param in * @return LOGIC [ key LIKE 'values[i]' ] * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getSearchString(String key, Object[] values, int type) throws IllegalArgumentException { if (values == null || values.length <= 0) { return ""; } String condition = ""; for (int i = 0; i < values.length; i++) { if (values[i] instanceof String == false) { throw new IllegalArgumentException(key + "$:value 中value的类型只能为String或String[]!"); } condition += (i <= 0 ? "" : (Logic.isAnd(type) ? AND : OR)) + getLikeString(key, values[i]); } return getCondition(Logic.isNot(type), condition); } /**WHERE key LIKE 'value' * @param key * @param value * @return key LIKE 'value' */ @JSONField(serialize = false) public String getLikeString(String key, Object value) { return getKey(key) + " LIKE " + getValue(value); } //$ search >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //~ regexp <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< /**search key match RegExp values * @param key * @param value * @param ignoreCase * @return {@link #getRegExpString(String, Object[], int, boolean)} * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getRegExpString(String key, Object value, boolean ignoreCase) throws IllegalArgumentException { if (value == null) { return ""; } Logic logic = new Logic(key); key = logic.getKey(); Log.i(TAG, "getRegExpString key = " + key); JSONArray arr = newJSONArray(value); if (arr.isEmpty()) { return ""; } return getRegExpString(key, arr.toArray(), logic.getType(), ignoreCase); } /**search key match RegExp values * @param key * @param values * @param type * @param ignoreCase * @return LOGIC [ key REGEXP 'values[i]' ] * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getRegExpString(String key, Object[] values, int type, boolean ignoreCase) throws IllegalArgumentException { if (values == null || values.length <= 0) { return ""; } String condition = ""; for (int i = 0; i < values.length; i++) { if (values[i] instanceof String == false) { throw new IllegalArgumentException(key + "$:value 中value的类型只能为String或String[]!"); } condition += (i <= 0 ? "" : (Logic.isAnd(type) ? AND : OR)) + getRegExpString(key, (String) values[i], ignoreCase); } return getCondition(Logic.isNot(type), condition); } /**WHERE key REGEXP 'value' * @param key * @param value * @param ignoreCase * @return key REGEXP 'value' */ @JSONField(serialize = false) public String getRegExpString(String key, String value, boolean ignoreCase) { if (DATABASE_POSTGRESQL.equals(getDatabase())) { return getKey(key) + " ~" + (ignoreCase ? "* " : " ") + getValue(value); } return getKey(key) + " REGEXP " + (ignoreCase ? "" : "BINARY ") + getValue(value); } //~ regexp >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //% between <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< /**WHERE key BETWEEN 'start' AND 'end' * @param key * @param value 'start,end' * @return LOGIC [ key BETWEEN 'start' AND 'end' ] * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getBetweenString(String key, Object value) throws IllegalArgumentException { if (value == null) { return ""; } Logic logic = new Logic(key); key = logic.getKey(); Log.i(TAG, "getBetweenString key = " + key); JSONArray arr = newJSONArray(value); if (arr.isEmpty()) { return ""; } return getBetweenString(key, arr.toArray(), logic.getType()); } /**WHERE key BETWEEN 'start' AND 'end' * @param key * @param value 'start,end' TODO 在 '1,2' 和 ['1,2', '3,4'] 基础上新增支持 [1, 2] 和 [[1,2], [3,4]] ? * @return LOGIC [ key BETWEEN 'start' AND 'end' ] * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getBetweenString(String key, Object[] values, int type) throws IllegalArgumentException { if (values == null || values.length <= 0) { return ""; } String condition = ""; String[] vs; for (int i = 0; i < values.length; i++) { if (values[i] instanceof String == false) { throw new IllegalArgumentException(key + "%:value 中 value 的类型只能为 String 或 String[] !"); } vs = StringUtil.split((String) values[i]); if (vs == null || vs.length != 2) { throw new IllegalArgumentException(key + "%:value 中 value 不合法!类型为 String 时必须包括1个逗号 , 且左右两侧都有值!类型为 String[] 里面每个元素要符合前面类型为 String 的规则 !"); } condition += (i <= 0 ? "" : (Logic.isAnd(type) ? AND : OR)) + "(" + getBetweenString(key, vs[0], vs[1]) + ")"; } return getCondition(Logic.isNot(type), condition); } /**WHERE key BETWEEN 'start' AND 'end' * @param key * @param value 'start,end' TODO 在 '1,2' 和 ['1,2', '3,4'] 基础上新增支持 [1, 2] 和 [[1,2], [3,4]] ? * @return key BETWEEN 'start' AND 'end' * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getBetweenString(String key, Object start, Object end) throws IllegalArgumentException { if (JSON.isBooleanOrNumberOrString(start) == false || JSON.isBooleanOrNumberOrString(end) == false) { throw new IllegalArgumentException(key + "%:value 中 value 不合法!类型为 String 时必须包括1个逗号 , 且左右两侧都有值!类型为 String[] 里面每个元素要符合前面类型为 String 的规则 !"); } return getKey(key) + " BETWEEN " + getValue(start) + AND + getValue(end); } //% between >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //{} range <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< // * 和 / 不能同时出现,防止 /* */ 段注释! # 和 -- 不能出现,防止行注释! ; 不能出现,防止隔断SQL语句!空格不能出现,防止 CRUD,DROP,SHOW TABLES等语句! private static final Pattern PATTERN_RANGE; private static final Pattern PATTERN_HAVING; private static final Pattern PATTERN_HAVING_SUFFIX; static { PATTERN_RANGE = Pattern.compile("^[0-9%!=<>,]+$"); // ^[a-zA-Z0-9_*%!=<>(),"]+$ 导致 exists(select*from(Comment)) 通过! PATTERN_HAVING = Pattern.compile("^[A-Za-z0-9%!=<>]+$"); //TODO 改成更好的正则,校验前面为单词,中间为操作符,后面为值 PATTERN_HAVING_SUFFIX = Pattern.compile("^[0-9%!=<>]+$"); // ^[a-zA-Z0-9_*%!=<>(),"]+$ 导致 exists(select*from(Comment)) 通过! } /**WHERE key > 'key0' AND key <= 'key1' AND ... * @param key * @param range "condition0,condition1..." * @return key condition0 AND key condition1 AND ... * @throws Exception */ @JSONField(serialize = false) public String getRangeString(String key, Object range) throws Exception { Log.i(TAG, "getRangeString key = " + key); if (range == null) {//依赖的对象都没有给出有效值,这个存在无意义。如果是客户端传的,那就能在客户端确定了。 throw new NotExistException(TAG + "getRangeString(" + key + ", " + range + ") range == null"); } Logic logic = new Logic(key); key = logic.getKey(); Log.i(TAG, "getRangeString key = " + key); if (range instanceof List) { if (logic.isOr() || logic.isNot()) { return getKey(key) + getInString(key, ((List<?>) range).toArray(), logic.isNot()); } throw new IllegalArgumentException(key + "{}\":[] 中key末尾的逻辑运算符只能用'|','!'中的一种 !"); } else if (range instanceof String) {//非Number类型需要客户端拼接成 < 'value0', >= 'value1'这种 if (isPrepared() && PATTERN_RANGE.matcher((String) range).matches() == false) { throw new UnsupportedOperationException("字符串 " + range + " 不合法!预编译模式下 key{}:\"condition\" 中 condition 必须符合正则表达式 ^[0-9%!=<>,]+$ !不允许空格!"); } String[] conditions = StringUtil.split((String) range); String condition = ""; if (conditions != null) { int index; for (int i = 0; i < conditions.length; i++) {//对函数条件length(key)<=5这种不再在开头加key index = conditions[i] == null ? -1 : conditions[i].indexOf("("); condition += ((i <= 0 ? "" : (logic.isAnd() ? AND : OR))//连接方式 + (index >= 0 && index < conditions[i].indexOf(")") ? "" : getKey(key) + " ")//函数和非函数条件 + conditions[i]);//单个条件 } } if (condition.isEmpty()) { return ""; } return getCondition(logic.isNot(), condition); } else if (range instanceof Subquery) { //如果在 Parser 解析成 SQL 字符串再引用,没法保证安全性,毕竟可以再通过远程函数等方式来拼接再替代,最后引用的字符串就能注入 return getKey(key) + (logic.isNot() ? NOT : "") + " IN " + getSubqueryString((Subquery) range); } throw new IllegalArgumentException(key + "{}:range 类型为" + range.getClass().getSimpleName() + "!range 只能是 用','分隔条件的字符串 或者 可取选项JSONArray!"); } /**WHERE key IN ('key0', 'key1', ... ) * @param in * @return IN ('key0', 'key1', ... ) * @throws NotExistException */ @JSONField(serialize = false) public String getInString(String key, Object[] in, boolean not) throws NotExistException { String condition = ""; if (in != null) {//返回 "" 会导致 id:[] 空值时效果和没有筛选id一样! for (int i = 0; i < in.length; i++) { condition += ((i > 0 ? "," : "") + getValue(in[i])); } } if (condition.isEmpty()) {//条件如果存在必须执行,不能忽略。条件为空会导致出错,又很难保证条件不为空(@:条件),所以还是这样好 throw new NotExistException(TAG + ".getInString(" + key + ", [], " + not + ") >> condition.isEmpty() >> IN()"); } return (not ? NOT : "") + " IN (" + condition + ")"; } //{} range >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //}{ exists <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< /**WHERE EXISTS subquery * 如果合并到 getRangeString,一方面支持不了 [1,2,2] 和 ">1" (转成 EXISTS(SELECT IN ) 需要static newSQLConfig,但它不能传入子类实例,除非不是 static),另一方面多了子查询临时表性能会比 IN 差 * @param key * @param value * @return EXISTS ALL(SELECT ...) * @throws NotExistException */ @JSONField(serialize = false) public String getExistsString(String key, Object value) throws Exception { if (value == null) { return ""; } if (value instanceof Subquery == false) { throw new IllegalArgumentException(key + "}{:subquery 类型为" + value.getClass().getSimpleName() + "!subquery 只能是 子查询JSONObejct!"); } Logic logic = new Logic(key); key = logic.getKey(); Log.i(TAG, "getExistsString key = " + key); return (logic.isNot() ? NOT : "") + " EXISTS " + getSubqueryString((Subquery) value); } //}{ exists >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //<> contain <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< /**WHERE key contains value * @param key * @param value * @return {@link #getContainString(String, Object[], int)} * @throws NotExistException */ @JSONField(serialize = false) public String getContainString(String key, Object value) throws IllegalArgumentException { if (value == null) { return ""; } Logic logic = new Logic(key); key = logic.getKey(); Log.i(TAG, "getContainString key = " + key); return getContainString(key, newJSONArray(value).toArray(), logic.getType()); } /**WHERE key contains childs * @param key * @param childs null ? "" : (empty ? no child : contains childs) * @param type |, &, ! * @return LOGIC [ ( key LIKE '[" + childs[i] + "]' OR key LIKE '[" + childs[i] + ", %' * OR key LIKE '%, " + childs[i] + ", %' OR key LIKE '%, " + childs[i] + "]' ) ] * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getContainString(String key, Object[] childs, int type) throws IllegalArgumentException { boolean not = Logic.isNot(type); String condition = ""; if (childs != null) { for (int i = 0; i < childs.length; i++) { if (childs[i] != null) { if (childs[i] instanceof JSON) { throw new IllegalArgumentException(key + "<>:value 中value类型不能为JSON!"); } if (DATABASE_POSTGRESQL.equals(getDatabase())) { condition += (i <= 0 ? "" : (Logic.isAnd(type) ? AND : OR)) + getKey(key) + " @> " + getValue(newJSONArray(childs[i])); //operator does not exist: jsonb @> character varying "[" + childs[i] + "]"); } else { condition += (i <= 0 ? "" : (Logic.isAnd(type) ? AND : OR)) + "json_contains(" + getKey(key) + ", " + getValue(childs[i].toString()) + ")"; } } } if (condition.isEmpty()) { condition = (getKey(key) + SQL.isNull(true) + OR + getLikeString(key, "[]")); // key = '[]' 无结果! } else { condition = (getKey(key) + SQL.isNull(false) + AND + "(" + condition + ")"); } } if (condition.isEmpty()) { return ""; } return getCondition(not, condition); } //<> contain >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //key@:{} Subquery <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< @Override public String getSubqueryString(Subquery subquery) throws Exception { String range = subquery.getRange(); SQLConfig cfg = subquery.getConfig(); cfg.setPreparedValueList(new ArrayList<>()); String sql = (range == null || range.isEmpty() ? "" : range) + "(" + cfg.getSQL(isPrepared()) + ") "; preparedValueList.addAll(cfg.getPreparedValueList()); return sql; } //key@:{} Subquery >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> /**拼接条件 * @param not * @param condition * @return */ private static String getCondition(boolean not, String condition) { return not ? NOT + "(" + condition + ")" : condition; } /**转为JSONArray * @param tv * @return */ @NotNull public static JSONArray newJSONArray(Object obj) { JSONArray array = new JSONArray(); if (obj != null) { if (obj instanceof Collection) { array.addAll((Collection<?>) obj); } else { array.add(obj); } } return array; } //WHERE >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //SET <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< /**获取SET * @return * @throws Exception */ @JSONField(serialize = false) public String getSetString() throws Exception { return getSetString(getMethod(), getContent(), ! isTest()); } /**获取SET * @param method * @param content * @return * @throws Exception */ @JSONField(serialize = false) public String getSetString(RequestMethod method, Map<String, Object> content, boolean verifyName) throws Exception { Set<String> set = content == null ? null : content.keySet(); String setString = ""; if (set != null && set.size() > 0) { String quote = getQuote(); boolean isFirst = true; int keyType = 0;// 0 - =; 1 - +, 2 - - Object value; String idKey = getIdKey(); for (String key : set) { //避免筛选到全部 value = key == null ? null : content.get(key); if (key == null || idKey.equals(key)) { continue; } if (key.endsWith("+")) { keyType = 1; } else if (key.endsWith("-")) { keyType = 2; } value = content.get(key); key = getRealKey(method, key, false, true, verifyName, quote); setString += (isFirst ? "" : ", ") + (getKey(key) + "=" + (keyType == 1 ? getAddString(key, value) : (keyType == 2 ? getRemoveString(key, value) : getValue(value)) ) ); isFirst = false; } } if (setString.isEmpty()) { throw new IllegalArgumentException("PUT 请求必须在Table内设置要修改的 key:value !"); } return " SET " + setString; } /**SET key = CONCAT (key, 'value') * @param key * @param value * @return CONCAT (key, 'value') * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getAddString(String key, Object value) throws IllegalArgumentException { if (value instanceof Number) { return getKey(key) + " + " + value; } if (value instanceof String) { return " CONCAT (" + getKey(key) + ", " + getValue(value) + ") "; } throw new IllegalArgumentException(key + "+ 对应的值 " + value + " 不是Number,String,Array中的任何一种!"); } /**SET key = replace(key, 'value', '') * @param key * @param value * @return REPLACE (key, 'value', '') * @throws IllegalArgumentException */ @JSONField(serialize = false) public String getRemoveString(String key, Object value) throws IllegalArgumentException { if (value instanceof Number) { return getKey(key) + " - " + value; } if (value instanceof String) { return SQL.replace(getKey(key), (String) getValue(value), "");// " replace(" + key + ", '" + value + "', '') "; } throw new IllegalArgumentException(key + "- 对应的值 " + value + " 不是Number,String,Array中的任何一种!"); } //SET >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> /** * @return * @throws Exception */ @JSONField(serialize = false) @Override public String getSQL(boolean prepared) throws Exception { return getSQL(this.setPrepared(prepared)); } /** * @param config * @return * @throws Exception */ public static String getSQL(AbstractSQLConfig config) throws Exception { if (config == null) { Log.i(TAG, "getSQL config == null >> return null;"); return null; } //TODO procedure 改为 List<Procedure> procedureList; behind : true; function: callFunction(); String key; ... // for (...) { Call procedure1();\n SQL \n; Call procedure2(); ... } // 貌似不需要,因为 ObjecParser 里就已经处理的顺序等,只是这里要解决下 Schema 问题。 String sch = config.getSQLSchema(config.getSQLTable()); if (StringUtil.isNotEmpty(config.getProcedure(), true)) { String q = config.getQuote(); return "CALL " + q + sch + q + "."+ config.getProcedure(); } String tablePath = config.getTablePath(); if (StringUtil.isNotEmpty(tablePath, true) == false) { Log.i(TAG, "getSQL StringUtil.isNotEmpty(tablePath, true) == false >> return null;"); return null; } switch (config.getMethod()) { case POST: return "INSERT INTO " + tablePath + config.getColumnString() + " VALUES" + config.getValuesString(); case PUT: return "UPDATE " + tablePath + config.getSetString() + config.getWhereString(true); case DELETE: return "DELETE FROM " + tablePath + config.getWhereString(true); default: config.setPreparedValueList(new ArrayList<Object>()); String column = config.getColumnString(); return (config.isExplain() ? "EXPLAIN " : "") + "SELECT " + (config.getCache() == JSONRequest.CACHE_RAM ? "SQL_NO_CACHE " : "") + column + " FROM " + getConditionString(column, tablePath, config); } } /**获取条件SQL字符串 * @param page * @param column * @param table * @param where * @return * @throws Exception */ private static String getConditionString(String column, String table, AbstractSQLConfig config) throws Exception { String where = config.getWhereString(true); Subquery from = config.getFrom(); if (from != null) { table = config.getSubqueryString(from) + " AS " + config.getAlias() + " "; //TODO Comment:c 转为 AS `Comment:c` } String condition = table + config.getJoinString() + where + ( RequestMethod.isGetMethod(config.getMethod(), true) == false ? "" : config.getGroupString(true) + config.getHavingString(true) + config.getOrderString(true) ) ; //+ config.getLimitString(); //no need to optimize // if (config.getPage() <= 0 || ID.equals(column.trim())) { return condition + config.getLimitString(); // } // // // //order: id+ -> id >= idOfStartIndex; id- -> id <= idOfStartIndex <<<<<<<<<<<<<<<<<<< // String order = StringUtil.getNoBlankString(config.getOrder()); // List<String> orderList = order.isEmpty() ? null : Arrays.asList(StringUtil.split(order)); // // int type = 0; // if (BaseModel.isEmpty(orderList) || BaseModel.isContain(orderList, ID+"+")) { // type = 1; // } // else if (BaseModel.isContain(orderList, ID+"-")) { // type = 2; // } // // if (type > 0) { // return condition.replace("WHERE", // "WHERE id " + (type == 1 ? ">=" : "<=") + " (SELECT id FROM " + table // + where + " ORDER BY id " + (type == 1 ? "ASC" : "DESC") + " LIMIT " + config.getOffset() + ", 1) AND" // ) // + " LIMIT " + config.getCount(); //子查询起始id不一定准确,只能作为最小可能! ;// // } // //order: id+ -> id >= idOfStartIndex; id- -> id <= idOfStartIndex >>>>>>>>>>>>>>>>>> // // // //结果错误!SELECT * FROM User AS t0 INNER JOIN // (SELECT id FROM User ORDER BY date ASC LIMIT 20, 10) AS t1 ON t0.id = t1.id // //common case, inner join // condition += config.getLimitString(); // return table + " AS t0 INNER JOIN (SELECT id FROM " + condition + ") AS t1 ON t0.id = t1.id"; } private boolean keyPrefix; @Override public boolean isKeyPrefix() { return keyPrefix; } @Override public AbstractSQLConfig setKeyPrefix(boolean keyPrefix) { this.keyPrefix = keyPrefix; return this; } public String getJoinString() throws Exception { String joinOns = ""; if (joinList != null) { String quote = getQuote(); List<Object> pvl = new ArrayList<>(); boolean changed = false; String sql = null; SQLConfig jc; String jt; String tn; for (Join j : joinList) { if (j.isAppJoin()) { // APP JOIN,只是作为一个标记,执行完主表的查询后自动执行副表的查询 User.id IN($commentIdList) continue; } //LEFT JOIN sys.apijson_user AS User ON User.id = Moment.userId, 都是用 = ,通过relateType处理缓存 // <"INNER JOIN User ON User.id = Moment.userId", UserConfig> TODO AS 放 getSQLTable 内 jc = j.getJoinConfig(); jc.setPrepared(isPrepared()); jt = jc.getTable(); tn = j.getTargetName(); //如果要强制小写,则可在子类重写这个方法再 toLowerCase // if (DATABASE_POSTGRESQL.equals(getDatabase())) { // jt = jt.toLowerCase(); // tn = tn.toLowerCase(); // } switch (j.getJoinType()) { //TODO $ SELF JOIN // case "@": // APP JOIN // continue; case "<": // LEFT JOIN case ">": // RIGHT JOIN jc.setMain(true).setKeyPrefix(false); sql = ( ">".equals(j.getJoinType()) ? " RIGHT" : " LEFT") + " JOIN ( " + jc.getSQL(isPrepared()) + " ) AS " + quote + jt + quote + " ON " + quote + jt + quote + "." + quote + j.getKey() + quote + " = " + quote + tn + quote + "." + quote + j.getTargetKey() + quote; jc.setMain(false).setKeyPrefix(true); // preparedValueList.addAll(jc.getPreparedValueList()); pvl.addAll(jc.getPreparedValueList()); changed = true; break; case "": // FULL JOIN case "|": // FULL JOIN 不支持 <>, [] ,避免太多符号 case "&": // INNER JOIN case "!": // OUTTER JOIN case "^": // SIDE JOIN //场景少且性能差,默认禁用 case "*": // CROSS JOIN sql = ("*".equals(j.getJoinType()) ? " CROSS JOIN " : " INNER JOIN ") + jc.getTablePath() + " ON " + quote + jt + quote + "." + quote + j.getKey() + quote + " = " + quote + tn + quote + "." + quote + j.getTargetKey() + quote; break; default: throw new UnsupportedOperationException("join:value 中 value 里的 " + j.getJoinType() + "/" + j.getPath() + "错误!不支持 " + j.getJoinType() + " 等 [@ APP, < LEFT, > RIGHT, | FULL, & INNER, ! OUTTER, ^ SIDE, * CROSS] 之外的JOIN类型 !"); } joinOns += " \n " + sql; } if (changed) { pvl.addAll(preparedValueList); preparedValueList = pvl; } } return joinOns; } /**新建SQL配置 * @param table * @param request * @param joinList * @param isProcedure * @param callback * @return * @throws Exception */ public static AbstractSQLConfig newSQLConfig(RequestMethod method, String table, JSONObject request, List<Join> joinList, boolean isProcedure, Callback callback) throws Exception { if (request == null) { // User:{} 这种空内容在查询时也有效 throw new NullPointerException(TAG + ": newSQLConfig request == null!"); } AbstractSQLConfig config = callback.getSQLConfig(method, table); String database = request.getString(KEY_DATABASE); String schema = request.getString(KEY_SCHEMA); config.setDatabase(database); //不删,后面表对象还要用的,必须放在 parseJoin 前 config.setSchema(schema); //不删,后面表对象还要用的 //放后面会导致主表是空对象时 joinList 未解析 if (isProcedure == false) { config = parseJoin(method, config, joinList, callback); } if (request.isEmpty()) { // User:{} 这种空内容在查询时也有效 return config; //request.remove(key); 前都可以直接return,之后必须保证 put 回去 } if (isProcedure) { return config; } String idKey = callback.getIdKey(schema, table); String idInKey = idKey + "{}"; String userIdKey = callback.getUserIdKey(schema, table); String userIdInKey = userIdKey + "{}"; Object idIn = request.get(idInKey); //可能是 id{}:">0" if (method == POST) { if (idIn != null) { //不能在这里确定[]的长度,只能在外面传进来 if ((idIn instanceof List == false) || ((List<?>)idIn).isEmpty()) { // id{}:[] 表示同时插入多条记录 throw new IllegalArgumentException("POST请求,生成多条记录请用 id{}:[] ! [] 类型为JSONArray且不能为空!"); } } else if (request.get(idKey) == null) { request.put(idKey, callback.newId(method, table)); } } //对id和id{}处理,这两个一定会作为条件 Object id = request.get(idKey); if (id != null) { //null无效 if (id instanceof Number) { if (((Number) id).longValue() <= 0) { //一定没有值 throw new NotExistException(TAG + ": newSQLConfig " + table + ".id <= 0"); } } else if (id instanceof String) { if (StringUtil.isEmpty(id, true)) { //一定没有值 throw new NotExistException(TAG + ": newSQLConfig StringUtil.isEmpty(" + table + ".id, true)"); } } else if (id instanceof Subquery) {} else { throw new IllegalArgumentException(idKey + ":value 中 value 的类型只能是 Long , String 或 Subquery !"); } if (idIn instanceof List) { //共用idIn场景少性能差 boolean contains = false; List<?> ids = ((List<?>) idIn); Object d; for (int i = 0; i < ids.size(); i++) { //不用 idIn.contains(id) 因为 idIn 里存到很可能是 Integer,id 又是 Long! d = ids.get(i); if (d != null && id.toString().equals(d.toString())) { contains = true; break; } } if (contains == false) {//empty有效 BaseModel.isEmpty(idIn) == false) { throw new NotExistException(TAG + ": newSQLConfig idIn != null && (((List<?>) idIn).contains(id) == false"); } } } String role = request.getString(KEY_ROLE); boolean explain = request.getBooleanValue(KEY_EXPLAIN); String cache = request.getString(KEY_CACHE); String combine = request.getString(KEY_COMBINE); Subquery from = (Subquery) request.get(KEY_FROM); String column = request.getString(KEY_COLUMN); String group = request.getString(KEY_GROUP); String having = request.getString(KEY_HAVING); String order = request.getString(KEY_ORDER); //强制作为条件且放在最前面优化性能 request.remove(idKey); request.remove(idInKey); //关键词 request.remove(KEY_ROLE); request.remove(KEY_EXPLAIN); request.remove(KEY_CACHE); request.remove(KEY_DATABASE); request.remove(KEY_SCHEMA); request.remove(KEY_COMBINE); request.remove(KEY_FROM); request.remove(KEY_COLUMN); request.remove(KEY_GROUP); request.remove(KEY_HAVING); request.remove(KEY_ORDER); Map<String, Object> tableWhere = new LinkedHashMap<String, Object>();//保证顺序好优化 WHERE id > 1 AND name LIKE... //已经remove了id和id{},以及@key Set<String> set = request.keySet(); //前面已经判断request是否为空 if (method == POST) {//POST操作 if (set != null && set.isEmpty() == false) { //不能直接return,要走完下面的流程 List<Object> idList; if (id != null) { //单条记录 if (idIn != null) { throw new IllegalArgumentException("POST请求中 id 和 id{} 不能同时存在!"); } idList = new ArrayList<Object>(1); idList.add(id); } else { //多条记录 idList = new ArrayList<Object>((JSONArray) idIn); } //idIn不为空时,valuesString有多条,唯一的区别就是id String[] columns = set.toArray(new String[]{}); Collection<Object> valueCollection = request.values(); Object[] values = valueCollection == null ? null : valueCollection.toArray(); if (values == null || values.length != columns.length) { throw new Exception("服务器内部错误:\n" + TAG + " newSQLConfig values == null || values.length != columns.length !"); } column = idKey + "," + StringUtil.getString(columns); //set已经判断过不为空 final int size = columns.length + 1; //以key数量为准 List<List<Object>> valuess = new ArrayList<>(idList.size()); // [idList.size()][] List<Object> items; //(item0, item1, ...) for (int i = 0; i < idList.size(); i++) { items = new ArrayList<>(size); items.add(idList.get(i)); //第0个就是id for (int j = 1; j < size; j++) { items.add(values[j-1]); //从第1个开始,允许"null" } valuess.add(items); } config.setValues(valuess); } } else { //非POST操作 final boolean isWhere = method != PUT;//除了POST,PUT,其它全是条件!!! //条件<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< List<String> whereList = null; Map<String, List<String>> combineMap = new LinkedHashMap<>(); List<String> andList = new ArrayList<>(); List<String> orList = new ArrayList<>(); List<String> notList = new ArrayList<>(); //强制作为条件且放在最前面优化性能 if (id != null) { tableWhere.put(idKey, id); andList.add(idKey); } if (idIn != null) { tableWhere.put(idInKey, idIn); andList.add(idInKey); } String[] ws = StringUtil.split(combine); if (ws != null) { if (method == DELETE || method == GETS || method == HEADS) { throw new IllegalArgumentException("DELETE,GETS,HEADS 请求不允许传 @combine:\"conditons\" !"); } whereList = new ArrayList<>(); String w; for (int i = 0; i < ws.length; i++) { //去除 &,|,! 前缀 w = ws[i]; if (w != null) { if (w.startsWith("&")) { w = w.substring(1); andList.add(w); } else if (w.startsWith("|")) { if (method == PUT) { throw new IllegalArgumentException(table + ":{} 里的 @combine:value 中的value里条件 " + ws[i] + " 不合法!" + "PUT请求的 @combine:\"key0,key1,...\" 不允许传 |key 或 !key !"); } w = w.substring(1); orList.add(w); } else if (w.startsWith("!")) { if (method == PUT) { throw new IllegalArgumentException(table + ":{} 里的 @combine:value 中的value里条件 " + ws[i] + " 不合法!" + "PUT请求的 @combine:\"key0,key1,...\" 不允许传 |key 或 !key !"); } w = w.substring(1); notList.add(w); } else { orList.add(w); } if (w.isEmpty()) { throw new IllegalArgumentException(table + ":{} 里的 @combine:value 中的value里条件 " + ws[i] + " 不合法!不允许为空值!"); } else { if (idKey.equals(w) || idInKey.equals(w) || userIdKey.equals(w) || userIdInKey.equals(w)) { throw new UnsupportedOperationException(table + ":{} 里的 @combine:value 中的value里 " + ws[i] + " 不合法!" + "不允许传 [" + idKey + ", " + idInKey + ", " + userIdKey + ", " + userIdInKey + "] 其中任何一个!"); } } whereList.add(w); } if (request.containsKey(w) == false) { throw new IllegalArgumentException(table + ":{} 里的 @combine:value 中的value里 " + ws[i] + " 对应的 " + w + " 不在它里面!"); } } } //条件>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> Map<String, Object> tableContent = new LinkedHashMap<String, Object>(); Object value; for (String key : set) { value = request.get(key); if (value instanceof Map) {//只允许常规Object throw new IllegalArgumentException("不允许 " + key + " 等任何key的value类型为 {JSONObject} !"); } //解决AccessVerifier新增userId没有作为条件,而是作为内容,导致PUT,DELETE出错 if (isWhere) { tableWhere.put(key, value); if (whereList == null || whereList.contains(key) == false) { andList.add(key); } } else if (whereList != null && whereList.contains(key)) { tableWhere.put(key, value); } else { tableContent.put(key, value);//一样 instanceof JSONArray ? JSON.toJSONString(value) : value); } } combineMap.put("&", andList); combineMap.put("|", orList); combineMap.put("!", notList); config.setCombine(combineMap); config.setContent(tableContent); } List<String> cs = new ArrayList<>(); String[] fks = StringUtil.split(column, ";"); // key0,key1;fun0(key0,...);fun1(key0,...);key3;fun2(key0,...) if (fks != null) { String[] ks; for (String fk : fks) { if (fk.contains("(")) { //fun0(key0,...) cs.add(fk); } else { //key0,key1... ks = StringUtil.split(fk); if (ks != null && ks.length > 0) { cs.addAll(Arrays.asList(ks)); } } } } config.setExplain(explain); config.setCache(cache); config.setFrom(from); config.setColumn(column == null ? null : cs); //解决总是 config.column != null,总是不能得到 * config.setWhere(tableWhere); config.setId(id); //在 tableWhere 第0个 config.setIdIn(idIn); config.setRole(role); config.setGroup(group); config.setHaving(having); config.setOrder(order); //TODO 解析JOIN,包括 @column,@group 等要合并 //后面还可能用到,要还原 //id或id{}条件 request.put(idKey, id); request.put(idInKey, idIn); //关键词 request.put(KEY_DATABASE, database); request.put(KEY_ROLE, role); request.put(KEY_EXPLAIN, explain); request.put(KEY_CACHE, cache); request.put(KEY_SCHEMA, schema); request.put(KEY_COMBINE, combine); request.put(KEY_FROM, from); request.put(KEY_COLUMN, column); request.put(KEY_GROUP, group); request.put(KEY_HAVING, having); request.put(KEY_ORDER, order); return config; } /** * @param method * @param config * @param joinList * @param callback * @return * @throws Exception */ public static AbstractSQLConfig parseJoin(RequestMethod method, AbstractSQLConfig config, List<Join> joinList, Callback callback) throws Exception { boolean isQuery = RequestMethod.isQueryMethod(method); config.setKeyPrefix(isQuery && config.isMain() == false); //TODO 解析出 SQLConfig 再合并 column, order, group 等 if (joinList == null || joinList.isEmpty() || RequestMethod.isQueryMethod(method) == false) { return config; } String name; for (Join j : joinList) { name = j.getName(); //JOIN子查询不能设置LIMIT,因为ON关系是在子查询后处理的,会导致结果会错误 SQLConfig joinConfig = newSQLConfig(method, name, j.getTable(), null, false, callback); SQLConfig cacheConfig = newSQLConfig(method, name, j.getTable(), null, false, callback).setCount(1); if (j.isAppJoin() == false) { //除了 @ APP JOIN,其它都是 SQL JOIN,则副表要这样配置 if (joinConfig.getDatabase() == null) { joinConfig.setDatabase(config.getDatabase()); //解决主表 JOIN 副表,引号不一致 } else if (joinConfig.getDatabase().equals(config.getDatabase()) == false) { throw new IllegalArgumentException("主表 " + config.getTable() + " 的 @database:" + config.getDatabase() + " 和它 SQL JOIN 的副表 " + name + " 的 @database:" + joinConfig.getDatabase() + " 不一致!"); } if (joinConfig.getSchema() == null) { joinConfig.setSchema(config.getSchema()); //主表 JOIN 副表,默认 schema 一致 } cacheConfig.setDatabase(joinConfig.getDatabase()).setSchema(joinConfig.getSchema()); //解决主表 JOIN 副表,引号不一致 if (isQuery) { config.setKeyPrefix(true); } joinConfig.setMain(false).setKeyPrefix(true); if (j.isLeftOrRightJoin()) { SQLConfig outterConfig = newSQLConfig(method, name, j.getOutter(), null, false, callback); outterConfig.setMain(false).setKeyPrefix(true).setDatabase(joinConfig.getDatabase()).setSchema(joinConfig.getSchema()); //解决主表 JOIN 副表,引号不一致 j.setOutterConfig(outterConfig); } } //解决 query: 1/2 查数量时报错 /* SELECT count(*) AS count FROM sys.Moment AS Moment LEFT JOIN ( SELECT count(*) AS count FROM sys.Comment ) AS Comment ON Comment.momentId = Moment.id LIMIT 1 OFFSET 0 */ if (RequestMethod.isHeadMethod(method, true)) { joinConfig.setMethod(GET); //子查询不能为 SELECT count(*) ,而应该是 SELECT momentId joinConfig.setColumn(Arrays.asList(j.getKey())); //优化性能,不取非必要的字段 cacheConfig.setMethod(GET); //子查询不能为 SELECT count(*) ,而应该是 SELECT momentId cacheConfig.setColumn(Arrays.asList(j.getKey())); //优化性能,不取非必要的字段 } j.setJoinConfig(joinConfig); j.setCacheConfig(cacheConfig); } config.setJoinList(joinList); return config; } /**获取客户端实际需要的key * verifyName = true * @param method * @param originKey * @param isTableKey * @param saveLogic 保留逻辑运算符 & | ! * @return */ public static String getRealKey(RequestMethod method, String originKey , boolean isTableKey, boolean saveLogic, String quote) throws Exception { return getRealKey(method, originKey, isTableKey, saveLogic, true, quote); } /**获取客户端实际需要的key * @param method * @param originKey * @param isTableKey * @param saveLogic 保留逻辑运算符 & | ! * @param verifyName 验证key名是否符合代码变量/常量名 * @return */ public static String getRealKey(RequestMethod method, String originKey , boolean isTableKey, boolean saveLogic, boolean verifyName, String quote) throws Exception { Log.i(TAG, "getRealKey saveLogic = " + saveLogic + "; originKey = " + originKey); if (originKey == null || originKey.startsWith(quote) || zuo.biao.apijson.JSONObject.isArrayKey(originKey)) { Log.w(TAG, "getRealKey originKey == null || originKey.startsWith(`)" + " || zuo.biao.apijson.JSONObject.isArrayKey(originKey) >> return originKey;"); return originKey; } String key = new String(originKey); if (key.endsWith("$")) {//搜索 LIKE,查询时处理 key = key.substring(0, key.length() - 1); } else if (key.endsWith("~") || key.endsWith("?")) {//匹配正则表达式 REGEXP,查询时处理 TODO ?可能以后会被废弃,全用 ~ 和 *~ 替代,更接近 PostgreSQL 语法 key = key.substring(0, key.length() - 1); if (key.endsWith("*")) {//忽略大小写 key = key.substring(0, key.length() - 1); } } else if (key.endsWith("%")) {//数字、文本、日期范围 BETWEEN AND key = key.substring(0, key.length() - 1); } else if (key.endsWith("{}")) {//被包含 IN,或者说key对应值处于value的范围内。查询时处理 key = key.substring(0, key.length() - 2); } else if (key.endsWith("}{")) {//被包含 EXISTS,或者说key对应值处于value的范围内。查询时处理 key = key.substring(0, key.length() - 2); } else if (key.endsWith("<>")) {//包含 json_contains,或者说value处于key对应值的范围内。查询时处理 key = key.substring(0, key.length() - 2); } else if (key.endsWith("()")) {//方法,查询完后处理,先用一个Map<key,function>保存? key = key.substring(0, key.length() - 2); } else if (key.endsWith("@")) {//引用,引用对象查询完后处理。fillTarget中暂时不用处理,因为非GET请求都是由给定的id确定,不需要引用 key = key.substring(0, key.length() - 1); } else if (key.endsWith(">=")) {//比较。查询时处理 key = key.substring(0, key.length() - 2); } else if (key.endsWith("<=")) {//比较。查询时处理 key = key.substring(0, key.length() - 2); } else if (key.endsWith(">")) {//比较。查询时处理 key = key.substring(0, key.length() - 1); } else if (key.endsWith("<")) {//比较。查询时处理 key = key.substring(0, key.length() - 1); } else if (key.endsWith("+")) {//延长,PUT查询时处理 if (method == PUT) {//不为PUT就抛异常 key = key.substring(0, key.length() - 1); } } else if (key.endsWith("-")) {//缩减,PUT查询时处理 if (method == PUT) {//不为PUT就抛异常 key = key.substring(0, key.length() - 1); } } String last = null;//不用Logic优化代码,否则 key 可能变为 key| 导致 key=value 变成 key|=value 而出错 if (RequestMethod.isQueryMethod(method)) {//逻辑运算符仅供GET,HEAD方法使用 last = key.isEmpty() ? "" : key.substring(key.length() - 1); if ("&".equals(last) || "|".equals(last) || "!".equals(last)) { key = key.substring(0, key.length() - 1); } else { last = null;//避免key + StringUtil.getString(last)错误延长 } } //"User:toUser":User转换"toUser":User, User为查询同名Table得到的JSONObject。交给客户端处理更好 if (isTableKey) {//不允许在column key中使用Type:key形式 key = Pair.parseEntry(key, true).getKey();//table以左边为准 } else { key = Pair.parseEntry(key).getValue();//column以右边为准 } if (verifyName && StringUtil.isName(key.startsWith("@") ? key.substring(1) : key) == false) { throw new IllegalArgumentException(method + "请求,字符 " + originKey + " 不合法!" + " key:value 中的key只能关键词 '@key' 或 'key[逻辑符][条件符]' 或 PUT请求下的 'key+' / 'key-' !"); } if (saveLogic && last != null) { key = key + last; } Log.i(TAG, "getRealKey return key = " + key); return key; } public static interface Callback { /**获取 SQLConfig 的实例 * @param method * @param table * @return */ AbstractSQLConfig getSQLConfig(RequestMethod method, String table); /**为 post 请求新建 id, 只能是 Long 或 String * @param method * @param table * @return */ Object newId(RequestMethod method, String table); /**获取主键名 * @param schema * @param table * @return */ String getIdKey(String schema, String table); /**获取 User 的主键名 * @param schema * @param table * @return */ String getUserIdKey(String schema, String table); } public static abstract class SimpleCallback implements Callback { @Override public Object newId(RequestMethod method, String table) { return System.currentTimeMillis(); } @Override public String getIdKey(String schema, String table) { return KEY_ID; } @Override public String getUserIdKey(String schema, String table) { return KEY_USER_ID; } } }
Server:新增支持去重关键词 DISTINCT
APIJSON-Java-Server/APIJSONORM/src/main/java/zuo/biao/apijson/server/AbstractSQLConfig.java
Server:新增支持去重关键词 DISTINCT
Java
apache-2.0
3fa188426c563e423f0bfd230f6ab92e0b0a2422
0
ekumenlabs/AndroidStreamingClient,ekumenlabs/AndroidStreamingClient,creativa77/AndroidStreamingClient,creativa77/AndroidStreamingClient,ekumenlabs/AndroidStreamingClient,creativa77/AndroidStreamingClient
package com.c77.rtpmediaplayer.lib.rtp; import com.biasedbit.efflux.packet.DataPacket; import com.biasedbit.efflux.participant.RtpParticipantInfo; import com.biasedbit.efflux.session.RtpSession; import com.biasedbit.efflux.session.RtpSessionDataListener; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.util.Collection; import java.util.Properties; import java.util.concurrent.ConcurrentSkipListMap; /** * Created by ashi on 1/13/15. */ public class RtpMediaBufferWithJitterAvoidance implements RtpSessionDataListener { public static final String DEBUGGING_PROPERTY = "DEBUGGING"; public static final java.lang.String FRAMES_WINDOW_PROPERTY = "FRAMES_WINDOW"; private State streamingState; private long lastTimestamp; private long maxTimeCycleTime = 0; private int counter = 0; private long sumTimeCycleTimes = 0; // Stream streamingState protected enum State { IDLE, // Just started. Didn't receive any packets yet CONFIGURING, // looking for frame delay STREAMING // Receiving packets } private static boolean DEBUGGING = false; private static long SENDING_DELAY = 30; private static long FRAMES_DELAY_MILLISECONDS = 500; private final RtpSessionDataListener upstream; private final DataPacketSenderThread dataPacketSenderThread; // frames sorted by their timestamp ConcurrentSkipListMap<Long, Frame> frames = new ConcurrentSkipListMap<Long, Frame>(); private Log log = LogFactory.getLog(RtpMediaBufferWithJitterAvoidance.class); private long downTimestampBound; private long upTimestampBound; RtpSession session; RtpParticipantInfo participant; public RtpMediaBufferWithJitterAvoidance(RtpSessionDataListener upstream) { this.upstream = upstream; streamingState = State.IDLE; dataPacketSenderThread = new DataPacketSenderThread(); } public RtpMediaBufferWithJitterAvoidance(RtpSessionDataListener upstream, Properties properties) { this.upstream = upstream; streamingState = State.IDLE; dataPacketSenderThread = new DataPacketSenderThread(); DEBUGGING = Boolean.parseBoolean(properties.getProperty(DEBUGGING_PROPERTY, "false")); FRAMES_DELAY_MILLISECONDS = Long.parseLong(properties.getProperty(FRAMES_WINDOW_PROPERTY, "800")); } @Override public void dataPacketReceived(RtpSession session, RtpParticipantInfo participant, DataPacket packet) { if (streamingState == State.IDLE) { this.session = session; this.participant = participant; lastTimestamp = getConvertedTimestamp(packet); downTimestampBound = lastTimestamp - FRAMES_DELAY_MILLISECONDS; upTimestampBound = downTimestampBound + SENDING_DELAY; streamingState = State.STREAMING; dataPacketSenderThread.start(); } // discard packets that are too late if (State.STREAMING == streamingState && getConvertedTimestamp(packet) < downTimestampBound) { if (DEBUGGING) { log.info("Discarded packet with timestamp " + getConvertedTimestamp(packet)); } return; } Frame frame = getFrameForPacket(packet); frames.put(new Long(frame.timestamp), frame); } public void logValues() { log.info("Average: " + sumTimeCycleTimes/counter); log.info("Max delay: " + maxTimeCycleTime); } private long getConvertedTimestamp(DataPacket packet) { return packet.getTimestamp() / 90; } private Frame getFrameForPacket(DataPacket packet) { Frame frame; long timestamp = getConvertedTimestamp(packet); if (frames.containsKey(timestamp)) { // if a frame with this timestamp already exists, add packet to it frame = frames.get(timestamp); // add packet to frame frame.addPacket(packet); } else { // if no frames with this timestamp exists, create a new one frame = new Frame(packet); } return frame; } private class Frame { private final long timestamp; // packets sorted by their sequence number ConcurrentSkipListMap<Integer, DataPacket> packets; /** * Create a frame from a packet * * @param packet */ public Frame(DataPacket packet) { packets = new ConcurrentSkipListMap<Integer, DataPacket>(); timestamp = getConvertedTimestamp(packet); packets.put(new Integer(packet.getSequenceNumber()), packet); } public void addPacket(DataPacket packet) { packets.put(new Integer(packet.getSequenceNumber()), packet); } public java.util.Collection<DataPacket> getPackets() { return packets.values(); } } public void stop() { if (dataPacketSenderThread != null) { dataPacketSenderThread.shutdown(); } } private class DataPacketSenderThread extends Thread { private boolean running = true; @Override public void run() { super.run(); long timeWhenCycleStarted; long delay; while (running) { if (RtpMediaBufferWithJitterAvoidance.State.STREAMING == streamingState) { timeWhenCycleStarted = System.currentTimeMillis(); // go through all the frames which timestamp is the range [downTimestampBound,upTimestampBound) for (ConcurrentSkipListMap.Entry<Long, Frame> entry : frames.entrySet()) { Frame frame = entry.getValue(); if (DEBUGGING) { log.info("Looking for frames between: [" + downTimestampBound + "," + upTimestampBound + ")"); } long timestamp = frame.timestamp; if (timestamp < upTimestampBound && timestamp >= downTimestampBound) { Collection<DataPacket> packets = frame.getPackets(); for (DataPacket packet : packets) { upstream.dataPacketReceived(session, participant, packet); } frames.remove(entry.getKey()); } else if (timestamp < downTimestampBound) { // remove old packages frames.remove(entry.getKey()); } } try { sleep(SENDING_DELAY); downTimestampBound = upTimestampBound; // use actual delay instead of SENDING_DELAY delay = (System.currentTimeMillis() - timeWhenCycleStarted); if (DEBUGGING) { log.info("actual delay: " + delay); maxTimeCycleTime = Math.max(delay, maxTimeCycleTime); sumTimeCycleTimes += delay; counter++; } upTimestampBound += delay; } catch (InterruptedException e) { log.error("Error while waiting to send next frame", e); } } if (DEBUGGING && counter == 100) { log.info(counter); logValues(); } } } public void shutdown() { running = false; } } }
rtpmediaplayer_lib/src/main/java/com/c77/rtpmediaplayer/lib/rtp/RtpMediaBufferWithJitterAvoidance.java
package com.c77.rtpmediaplayer.lib.rtp; import com.biasedbit.efflux.packet.DataPacket; import com.biasedbit.efflux.participant.RtpParticipantInfo; import com.biasedbit.efflux.session.RtpSession; import com.biasedbit.efflux.session.RtpSessionDataListener; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.util.Collection; import java.util.Properties; import java.util.SortedMap; import java.util.TreeMap; /** * Created by ashi on 1/13/15. */ public class RtpMediaBufferWithJitterAvoidance implements RtpSessionDataListener { public static final String DEBUGGING_PROPERTY = "DEBUGGING"; public static final java.lang.String FRAMES_WINDOW_PROPERTY = "FRAMES_WINDOW"; private State streamingState; private long lastTimestamp; private long maxTimeCycleTime = 0; private int counter = 0; private long sumTimeCycleTimes = 0; // Stream streamingState protected enum State { IDLE, // Just started. Didn't receive any packets yet CONFIGURING, // looking for frame delay STREAMING // Receiving packets } private static boolean DEBUGGING = false; private static long SENDING_DELAY = 30; private static long FRAMES_DELAY_MILLISECONDS = 500; private final RtpSessionDataListener upstream; private final DataPacketSenderThread dataPacketSenderThread; // frames sorted by their timestamp SortedMap<Long, Frame> frames = new TreeMap<Long, Frame>(); private Log log = LogFactory.getLog(RtpMediaBufferWithJitterAvoidance.class); private long downTimestampBound; private long upTimestampBound; RtpSession session; RtpParticipantInfo participant; public RtpMediaBufferWithJitterAvoidance(RtpSessionDataListener upstream) { this.upstream = upstream; streamingState = State.IDLE; dataPacketSenderThread = new DataPacketSenderThread(); } public RtpMediaBufferWithJitterAvoidance(RtpSessionDataListener upstream, Properties properties) { this.upstream = upstream; streamingState = State.IDLE; dataPacketSenderThread = new DataPacketSenderThread(); DEBUGGING = Boolean.parseBoolean(properties.getProperty(DEBUGGING_PROPERTY, "false")); FRAMES_DELAY_MILLISECONDS = Long.parseLong(properties.getProperty(FRAMES_WINDOW_PROPERTY, "800")); } @Override public void dataPacketReceived(RtpSession session, RtpParticipantInfo participant, DataPacket packet) { if (streamingState == State.IDLE) { this.session = session; this.participant = participant; lastTimestamp = getConvertedTimestamp(packet); downTimestampBound = lastTimestamp - FRAMES_DELAY_MILLISECONDS; upTimestampBound = downTimestampBound + SENDING_DELAY; streamingState = State.STREAMING; dataPacketSenderThread.start(); } // discard packets that are too late if (State.STREAMING == streamingState && getConvertedTimestamp(packet) < downTimestampBound) { if (DEBUGGING) { log.info("Discarded packet with timestamp " + getConvertedTimestamp(packet)); } return; } synchronized (frames) { Frame frame = getFrameForPacket(packet); frames.put(new Long(frame.timestamp), frame); } } public void logValues() { log.info("Average: " + sumTimeCycleTimes/counter); log.info("Max delay: " + maxTimeCycleTime); } private long getConvertedTimestamp(DataPacket packet) { return packet.getTimestamp() / 90; } private Frame getFrameForPacket(DataPacket packet) { Frame frame; long timestamp = getConvertedTimestamp(packet); if (frames.containsKey(timestamp)) { // if a frame with this timestamp already exists, add packet to it frame = frames.get(timestamp); // add packet to frame frame.addPacket(packet); } else { // if no frames with this timestamp exists, create a new one frame = new Frame(packet); } return frame; } private class Frame { private final long timestamp; // packets sorted by their sequence number SortedMap<Integer, DataPacket> packets; /** * Create a frame from a packet * * @param packet */ public Frame(DataPacket packet) { packets = new TreeMap<Integer, DataPacket>(); timestamp = getConvertedTimestamp(packet); packets.put(new Integer(packet.getSequenceNumber()), packet); } public void addPacket(DataPacket packet) { packets.put(new Integer(packet.getSequenceNumber()), packet); } public java.util.Collection<DataPacket> getPackets() { return packets.values(); } } public void stop() { if (dataPacketSenderThread != null) { dataPacketSenderThread.shutdown(); } } private class DataPacketSenderThread extends Thread { private boolean running = true; @Override public void run() { super.run(); long timeWhenCycleStarted; long delay; while (running) { if (RtpMediaBufferWithJitterAvoidance.State.STREAMING == streamingState) { timeWhenCycleStarted = System.currentTimeMillis(); // go through all the frames which timestamp is the range [downTimestampBound,upTimestampBound) SortedMap<Long, Frame> copy = new TreeMap<Long, Frame>(); synchronized (frames) { if (DEBUGGING) { log.info("Copying #" + frames.size() + " frames"); } copy.putAll(frames); } for (SortedMap.Entry<Long, Frame> entry : copy.entrySet()) { Frame frame = entry.getValue(); if (DEBUGGING) { log.info("Looking for frames between: [" + downTimestampBound + "," + upTimestampBound + ")"); } long timestamp = frame.timestamp; synchronized (frames) { if (timestamp < upTimestampBound && timestamp >= downTimestampBound) { Collection<DataPacket> packets = frame.getPackets(); for (DataPacket packet : packets) { upstream.dataPacketReceived(session, participant, packet); } frames.remove(entry.getKey()); } else if (timestamp < downTimestampBound) { // remove old packages frames.remove(entry.getKey()); } } } try { sleep(SENDING_DELAY); downTimestampBound = upTimestampBound; // use actual delay instead of SENDING_DELAY delay = (System.currentTimeMillis() - timeWhenCycleStarted); if (DEBUGGING) { log.info("actual delay: " + delay); maxTimeCycleTime = Math.max(delay, maxTimeCycleTime); sumTimeCycleTimes += delay; counter++; } upTimestampBound += delay; } catch (InterruptedException e) { log.error("Error while waiting to send next frame", e); } } if (DEBUGGING && counter == 100) { logValues(); } } } public void shutdown() { running = false; } } }
structures changed
rtpmediaplayer_lib/src/main/java/com/c77/rtpmediaplayer/lib/rtp/RtpMediaBufferWithJitterAvoidance.java
structures changed
Java
apache-2.0
e42a0133abcec8090aa787226270896ad730faf2
0
henrichg/PhoneProfilesPlus
package com.noob.noobcameraflash.Utilities; import android.annotation.TargetApi; import android.content.Context; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraManager; import android.os.Build; import androidx.annotation.NonNull; /** * Created by Abhishek on 28-11-2015. */ @SuppressWarnings("ConstantConditions") @TargetApi(Build.VERSION_CODES.M) public class CameraUtilMarshMallow extends BaseCameraUtil { private CameraManager mCameraManager; private CameraManager.TorchCallback mTorchCallback; public CameraUtilMarshMallow(Context context) throws CameraAccessException { super(context); openCamera(); } private void openCamera() throws CameraAccessException { if (mCameraManager == null) mCameraManager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE); if (isFlashAvailable()) { mTorchCallback = new CameraManager.TorchCallback() { @Override public void onTorchModeUnavailable(@NonNull String cameraId) { super.onTorchModeUnavailable(cameraId); onCameraTorchModeChanged(TorchMode.Unavailable); } @Override public void onTorchModeChanged(@NonNull String cameraId, boolean enabled) { super.onTorchModeChanged(cameraId, enabled); if (enabled) setTorchMode(TorchMode.SwitchedOn); else setTorchMode(TorchMode.SwitchedOff); } }; mCameraManager.registerTorchCallback(mTorchCallback, null); } } private boolean isFlashAvailable() throws CameraAccessException { CameraCharacteristics cameraCharacteristics = mCameraManager.getCameraCharacteristics("0"); return cameraCharacteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); } @Override public void turnOnFlash() throws CameraAccessException { String[] cameraIds = getCameraManager().getCameraIdList(); for (String id : cameraIds) { CameraCharacteristics characteristics = getCameraManager().getCameraCharacteristics(id); if (characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)) { // added facing check - allowed is only back flash Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if ((facing != null) && (facing == CameraCharacteristics.LENS_FACING_BACK)) { getCameraManager().setTorchMode(id, true); setTorchMode(TorchMode.SwitchedOn); } } } } @Override public void turnOffFlash() throws CameraAccessException { String[] cameraIds = getCameraManager().getCameraIdList(); for (String id : cameraIds) { CameraCharacteristics characteristics = getCameraManager().getCameraCharacteristics(id); if (characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)) { // added facing check - allowed is only back flash Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if ((facing != null) && (facing == CameraCharacteristics.LENS_FACING_BACK)) { getCameraManager().setTorchMode(id, false); setTorchMode(TorchMode.SwitchedOff); } } } } @Override public void release() { if (mCameraManager != null) { mCameraManager.unregisterTorchCallback(mTorchCallback); mCameraManager = null; } } //region Accessors private CameraManager getCameraManager() throws CameraAccessException { if (mCameraManager == null) { openCamera(); } return mCameraManager; } //endregion }
noobcameraflash/src/main/java/com/noob/noobcameraflash/Utilities/CameraUtilMarshMallow.java
package com.noob.noobcameraflash.Utilities; import android.annotation.TargetApi; import android.content.Context; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraManager; import android.os.Build; import androidx.annotation.NonNull; /** * Created by Abhishek on 28-11-2015. */ @SuppressWarnings("ConstantConditions") @TargetApi(Build.VERSION_CODES.M) public class CameraUtilMarshMallow extends BaseCameraUtil { private CameraManager mCameraManager; private CameraManager.TorchCallback mTorchCallback; public CameraUtilMarshMallow(Context context) throws CameraAccessException { super(context); openCamera(); } private void openCamera() throws CameraAccessException { if (mCameraManager == null) mCameraManager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE); if (isFlashAvailable()) { mTorchCallback = new CameraManager.TorchCallback() { @Override public void onTorchModeUnavailable(@NonNull String cameraId) { super.onTorchModeUnavailable(cameraId); onCameraTorchModeChanged(TorchMode.Unavailable); } @Override public void onTorchModeChanged(@NonNull String cameraId, boolean enabled) { super.onTorchModeChanged(cameraId, enabled); if (enabled) setTorchMode(TorchMode.SwitchedOn); else setTorchMode(TorchMode.SwitchedOff); } }; mCameraManager.registerTorchCallback(mTorchCallback, null); } } private boolean isFlashAvailable() throws CameraAccessException { CameraCharacteristics cameraCharacteristics = mCameraManager.getCameraCharacteristics("0"); return cameraCharacteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); } @Override public void turnOnFlash() throws CameraAccessException { String[] cameraIds = getCameraManager().getCameraIdList(); for (String id : cameraIds) { CameraCharacteristics characteristics = getCameraManager().getCameraCharacteristics(id); if (characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)) { getCameraManager().setTorchMode(id, true); setTorchMode(TorchMode.SwitchedOn); } } } @Override public void turnOffFlash() throws CameraAccessException { String[] cameraIds = getCameraManager().getCameraIdList(); for (String id : cameraIds) { CameraCharacteristics characteristics = getCameraManager().getCameraCharacteristics(id); if (characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)) { getCameraManager().setTorchMode(id, false); setTorchMode(TorchMode.SwitchedOff); } } } @Override public void release() { if (mCameraManager != null) { mCameraManager.unregisterTorchCallback(mTorchCallback); mCameraManager = null; } } //region Accessors private CameraManager getCameraManager() throws CameraAccessException { if (mCameraManager == null) { openCamera(); } return mCameraManager; } //endregion }
Added camera flashlight switch into Profile preferences. (6)
noobcameraflash/src/main/java/com/noob/noobcameraflash/Utilities/CameraUtilMarshMallow.java
Added camera flashlight switch into Profile preferences. (6)
Java
apache-2.0
8835168282d6b5e14bb583d06fc4651562cd6c27
0
FHannes/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,signed/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,mglukhikh/intellij-community,signed/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,fitermay/intellij-community,asedunov/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,asedunov/intellij-community,fitermay/intellij-community,apixandru/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,retomerz/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,fitermay/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,hurricup/intellij-community,fitermay/intellij-community,hurricup/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,xfournet/intellij-community,ibinti/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,xfournet/intellij-community,ibinti/intellij-community,FHannes/intellij-community,da1z/intellij-community,xfournet/intellij-community,asedunov/intellij-community,semonte/intellij-community,da1z/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,mglukhikh/intellij-community,signed/intellij-community,semonte/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,FHannes/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,vvv1559/intellij-community,allotria/intellij-community,asedunov/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,retomerz/intellij-community,fitermay/intellij-community,apixandru/intellij-community,signed/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,da1z/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,ibinti/intellij-community,apixandru/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,apixandru/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,apixandru/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,vvv1559/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,allotria/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,allotria/intellij-community,vvv1559/intellij-community,allotria/intellij-community,asedunov/intellij-community,retomerz/intellij-community,da1z/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,hurricup/intellij-community,FHannes/intellij-community,hurricup/intellij-community,signed/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,asedunov/intellij-community,allotria/intellij-community,semonte/intellij-community,asedunov/intellij-community,semonte/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,allotria/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,ibinti/intellij-community,xfournet/intellij-community,da1z/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,ibinti/intellij-community,apixandru/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,da1z/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,retomerz/intellij-community,da1z/intellij-community,semonte/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,allotria/intellij-community,hurricup/intellij-community,signed/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,hurricup/intellij-community,youdonghai/intellij-community
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.application.options.editor; import com.intellij.codeInsight.daemon.*; import com.intellij.ide.plugins.IdeaPluginDescriptor; import com.intellij.lang.LanguageExtensionPoint; import com.intellij.openapi.extensions.ExtensionPoint; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.extensions.PluginDescriptor; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.util.Comparing; import com.intellij.ui.CheckBoxList; import com.intellij.ui.SeparatorWithText; import com.intellij.util.Function; import com.intellij.util.NullableFunction; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import com.intellij.util.ui.EmptyIcon; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.util.*; import java.util.List; /** * @author Dmitry Avdeev */ public class GutterIconsConfigurable implements Configurable, Configurable.NoScroll { private JPanel myPanel; private CheckBoxList<GutterIconDescriptor> myList; private List<GutterIconDescriptor> myDescriptors; private Map<GutterIconDescriptor, PluginDescriptor> myFirstDescriptors = new HashMap<GutterIconDescriptor, PluginDescriptor>(); @Nls @Override public String getDisplayName() { return "Gutter Icons"; } @Nullable @Override public String getHelpTopic() { return "reference.settings.editor.gutter.icons"; } @Nullable @Override public JComponent createComponent() { ExtensionPoint<LineMarkerProvider> point = Extensions.getRootArea().getExtensionPoint(LineMarkerProviders.EP_NAME); @SuppressWarnings("unchecked") LanguageExtensionPoint<LineMarkerProvider>[] extensions = (LanguageExtensionPoint<LineMarkerProvider>[])point.getExtensions(); NullableFunction<LanguageExtensionPoint<LineMarkerProvider>, PluginDescriptor> function = new NullableFunction<LanguageExtensionPoint<LineMarkerProvider>, PluginDescriptor>() { @Nullable @Override public PluginDescriptor fun(LanguageExtensionPoint<LineMarkerProvider> point) { LineMarkerProvider instance = point.getInstance(); return instance instanceof LineMarkerProviderDescriptor && ((LineMarkerProviderDescriptor)instance).getName() != null ? point.getPluginDescriptor() : null; } }; MultiMap<PluginDescriptor, LanguageExtensionPoint<LineMarkerProvider>> map = ContainerUtil.groupBy(Arrays.asList(extensions), function); Map<GutterIconDescriptor, PluginDescriptor> pluginDescriptorMap = ContainerUtil.newHashMap(); myDescriptors = new ArrayList<GutterIconDescriptor>(); for (final PluginDescriptor descriptor : map.keySet()) { Collection<LanguageExtensionPoint<LineMarkerProvider>> points = map.get(descriptor); for (LanguageExtensionPoint<LineMarkerProvider> extensionPoint : points) { GutterIconDescriptor instance = (GutterIconDescriptor)extensionPoint.getInstance(); if (instance.getOptions().length > 0) { for (GutterIconDescriptor option : instance.getOptions()) { myDescriptors.add(option); pluginDescriptorMap.put(option, descriptor); } } else { myDescriptors.add(instance); pluginDescriptorMap.put(instance, descriptor); } } } List<GutterIconDescriptor> options = new ArrayList<GutterIconDescriptor>(); for (Iterator<GutterIconDescriptor> iterator = myDescriptors.iterator(); iterator.hasNext(); ) { GutterIconDescriptor descriptor = iterator.next(); if (descriptor.getOptions().length > 0) { options.addAll(Arrays.asList(descriptor.getOptions())); iterator.remove(); } } myDescriptors.addAll(options); myDescriptors.sort(new Comparator<GutterIconDescriptor>() { @Override public int compare(GutterIconDescriptor o1, GutterIconDescriptor o2) { if (pluginDescriptorMap.get(o1) != pluginDescriptorMap.get(o2)) return 0; return Comparing.compare(o1.getName(), o2.getName()); } }); PluginDescriptor current = null; for (GutterIconDescriptor descriptor : myDescriptors) { PluginDescriptor pluginDescriptor = pluginDescriptorMap.get(descriptor); if (pluginDescriptor != current) { myFirstDescriptors.put(descriptor, pluginDescriptor); current = pluginDescriptor; } } myList.setItems(myDescriptors, new Function<GutterIconDescriptor, String>() { @Override public String fun(GutterIconDescriptor descriptor) { return descriptor.getName(); } }); return myPanel; } @Override public boolean isModified() { for (GutterIconDescriptor descriptor : myDescriptors) { if (myList.isItemSelected(descriptor) != LineMarkerSettings.getSettings().isEnabled(descriptor)) { return true; } } return false; } @Override public void apply() throws ConfigurationException { for (GutterIconDescriptor descriptor : myDescriptors) { LineMarkerSettings.getSettings().setEnabled(descriptor, myList.isItemSelected(descriptor)); } for (Project project : ProjectManager.getInstance().getOpenProjects()) { DaemonCodeAnalyzer.getInstance(project).restart(); } } @Override public void reset() { for (GutterIconDescriptor descriptor : myDescriptors) { myList.setItemSelected(descriptor, LineMarkerSettings.getSettings().isEnabled(descriptor)); } } @Override public void disposeUIResources() { } private void createUIComponents() { myList = new CheckBoxList<GutterIconDescriptor>() { @Override protected JComponent adjustRendering(JComponent rootComponent, JCheckBox checkBox, int index, boolean selected, boolean hasFocus) { JPanel panel = new JPanel(new BorderLayout()); panel.setBorder(BorderFactory.createEmptyBorder()); GutterIconDescriptor descriptor = myList.getItemAt(index); Icon icon = descriptor == null ? null : descriptor.getIcon(); JLabel label = new JLabel(icon == null ? EmptyIcon.ICON_16 : icon); label.setOpaque(true); label.setPreferredSize(new Dimension(25, -1)); label.setHorizontalAlignment(SwingConstants.CENTER); panel.add(label, BorderLayout.WEST); panel.add(checkBox, BorderLayout.CENTER); panel.setBackground(getBackground(false)); label.setBackground(getBackground(selected)); if (!checkBox.isOpaque()) { checkBox.setOpaque(true); } checkBox.setBorder(null); PluginDescriptor pluginDescriptor = myFirstDescriptors.get(descriptor); if (pluginDescriptor instanceof IdeaPluginDescriptor) { SeparatorWithText separator = new SeparatorWithText(); String name = ((IdeaPluginDescriptor)pluginDescriptor).getName(); separator.setCaption("IDEA CORE".equals(name) ? "Common" : name); panel.add(separator, BorderLayout.NORTH); } return panel; } @Nullable @Override protected Point findPointRelativeToCheckBox(int x, int y, @NotNull JCheckBox checkBox, int index) { return super.findPointRelativeToCheckBoxWithAdjustedRendering(x, y, checkBox, index); } }; myList.setBorder(BorderFactory.createEmptyBorder()); } }
platform/lang-impl/src/com/intellij/application/options/editor/GutterIconsConfigurable.java
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.application.options.editor; import com.intellij.codeInsight.daemon.*; import com.intellij.ide.plugins.IdeaPluginDescriptor; import com.intellij.lang.LanguageExtensionPoint; import com.intellij.openapi.extensions.ExtensionPoint; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.extensions.PluginDescriptor; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.util.Comparing; import com.intellij.ui.CheckBoxList; import com.intellij.ui.SeparatorWithText; import com.intellij.util.Function; import com.intellij.util.NullableFunction; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import com.intellij.util.ui.EmptyIcon; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.util.*; import java.util.List; /** * @author Dmitry Avdeev */ public class GutterIconsConfigurable implements Configurable, Configurable.NoScroll { private JPanel myPanel; private CheckBoxList<GutterIconDescriptor> myList; private List<GutterIconDescriptor> myDescriptors; private Map<GutterIconDescriptor, PluginDescriptor> myFirstDescriptors = new HashMap<GutterIconDescriptor, PluginDescriptor>(); @Nls @Override public String getDisplayName() { return "Gutter Icons"; } @Nullable @Override public String getHelpTopic() { return "reference.settings.editor.gutter.icons"; } @Nullable @Override public JComponent createComponent() { ExtensionPoint<LineMarkerProvider> point = Extensions.getRootArea().getExtensionPoint(LineMarkerProviders.EP_NAME); @SuppressWarnings("unchecked") LanguageExtensionPoint<LineMarkerProvider>[] extensions = (LanguageExtensionPoint<LineMarkerProvider>[])point.getExtensions(); NullableFunction<LanguageExtensionPoint<LineMarkerProvider>, PluginDescriptor> function = new NullableFunction<LanguageExtensionPoint<LineMarkerProvider>, PluginDescriptor>() { @Nullable @Override public PluginDescriptor fun(LanguageExtensionPoint<LineMarkerProvider> point) { LineMarkerProvider instance = point.getInstance(); return instance instanceof LineMarkerProviderDescriptor && ((LineMarkerProviderDescriptor)instance).getName() != null ? point.getPluginDescriptor() : null; } }; MultiMap<PluginDescriptor, LanguageExtensionPoint<LineMarkerProvider>> map = ContainerUtil.groupBy(Arrays.asList(extensions), function); Map<GutterIconDescriptor, PluginDescriptor> pluginDescriptorMap = ContainerUtil.newHashMap(); myDescriptors = new ArrayList<GutterIconDescriptor>(); for (final PluginDescriptor descriptor : map.keySet()) { Collection<LanguageExtensionPoint<LineMarkerProvider>> points = map.get(descriptor); for (LanguageExtensionPoint<LineMarkerProvider> extensionPoint : points) { GutterIconDescriptor instance = (GutterIconDescriptor)extensionPoint.getInstance(); if (instance.getOptions().length > 0) { for (GutterIconDescriptor option : instance.getOptions()) { myDescriptors.add(option); pluginDescriptorMap.put(option, descriptor); } } else { myDescriptors.add(instance); pluginDescriptorMap.put(instance, descriptor); } } } List<GutterIconDescriptor> options = new ArrayList<GutterIconDescriptor>(); for (Iterator<GutterIconDescriptor> iterator = myDescriptors.iterator(); iterator.hasNext(); ) { GutterIconDescriptor descriptor = iterator.next(); if (descriptor.getOptions().length > 0) { options.addAll(Arrays.asList(descriptor.getOptions())); iterator.remove(); } } myDescriptors.addAll(options); myDescriptors.sort(new Comparator<GutterIconDescriptor>() { @Override public int compare(GutterIconDescriptor o1, GutterIconDescriptor o2) { if (pluginDescriptorMap.get(o1) != pluginDescriptorMap.get(o2)) return 0; return Comparing.compare(o1.getName(), o2.getName()); } }); PluginDescriptor current = null; for (GutterIconDescriptor descriptor : myDescriptors) { PluginDescriptor pluginDescriptor = pluginDescriptorMap.get(descriptor); if (pluginDescriptor != current) { myFirstDescriptors.put(descriptor, pluginDescriptor); current = pluginDescriptor; } } myList.setItems(myDescriptors, new Function<GutterIconDescriptor, String>() { @Override public String fun(GutterIconDescriptor descriptor) { return descriptor.getName(); } }); return myPanel; } @Override public boolean isModified() { for (GutterIconDescriptor descriptor : myDescriptors) { if (myList.isItemSelected(descriptor) != LineMarkerSettings.getSettings().isEnabled(descriptor)) { return true; } } return false; } @Override public void apply() throws ConfigurationException { for (GutterIconDescriptor descriptor : myDescriptors) { LineMarkerSettings.getSettings().setEnabled(descriptor, myList.isItemSelected(descriptor)); } for (Project project : ProjectManager.getInstance().getOpenProjects()) { DaemonCodeAnalyzer.getInstance(project).restart(); } } @Override public void reset() { for (GutterIconDescriptor descriptor : myDescriptors) { myList.setItemSelected(descriptor, LineMarkerSettings.getSettings().isEnabled(descriptor)); } } @Override public void disposeUIResources() { } private void createUIComponents() { myList = new CheckBoxList<GutterIconDescriptor>() { @Override protected JComponent adjustRendering(JComponent rootComponent, JCheckBox checkBox, int index, boolean selected, boolean hasFocus) { JPanel panel = new JPanel(new BorderLayout()); panel.setBorder(BorderFactory.createEmptyBorder()); GutterIconDescriptor descriptor = myList.getItemAt(index); Icon icon = descriptor == null ? null : descriptor.getIcon(); JLabel label = new JLabel(icon == null ? EmptyIcon.ICON_16 : icon); label.setOpaque(true); label.setPreferredSize(new Dimension(25, -1)); label.setHorizontalAlignment(SwingConstants.CENTER); panel.add(label, BorderLayout.WEST); panel.add(checkBox, BorderLayout.CENTER); panel.setBackground(getBackground(false)); label.setBackground(getBackground(selected)); checkBox.setBorder(null); PluginDescriptor pluginDescriptor = myFirstDescriptors.get(descriptor); if (pluginDescriptor instanceof IdeaPluginDescriptor) { SeparatorWithText separator = new SeparatorWithText(); String name = ((IdeaPluginDescriptor)pluginDescriptor).getName(); separator.setCaption("IDEA CORE".equals(name) ? "Common" : name); panel.add(separator, BorderLayout.NORTH); } return panel; } @Nullable @Override protected Point findPointRelativeToCheckBox(int x, int y, @NotNull JCheckBox checkBox, int index) { return super.findPointRelativeToCheckBoxWithAdjustedRendering(x, y, checkBox, index); } }; myList.setBorder(BorderFactory.createEmptyBorder()); } }
checkbox should be opaque in gutter icons renderer
platform/lang-impl/src/com/intellij/application/options/editor/GutterIconsConfigurable.java
checkbox should be opaque in gutter icons renderer
Java
apache-2.0
c7d83d0f0bad02b2c84ac06cba6aefd919497dc2
0
apache/mina-sshd,apache/mina-sshd,apache/mina-sshd,apache/mina-sshd
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sshd.sftp.common; import java.io.EOFException; import java.io.FileNotFoundException; import java.net.UnknownServiceException; import java.nio.channels.OverlappingFileLockException; import java.nio.charset.StandardCharsets; import java.nio.file.AccessDeniedException; import java.nio.file.DirectoryNotEmptyException; import java.nio.file.FileAlreadyExistsException; import java.nio.file.FileSystemLoopException; import java.nio.file.InvalidPathException; import java.nio.file.NoSuchFileException; import java.nio.file.NotDirectoryException; import java.nio.file.attribute.AclEntry; import java.nio.file.attribute.AclEntryFlag; import java.nio.file.attribute.AclEntryPermission; import java.nio.file.attribute.AclEntryType; import java.nio.file.attribute.FileTime; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; import java.nio.file.attribute.UserPrincipal; import java.nio.file.attribute.UserPrincipalNotFoundException; import java.security.Principal; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Objects; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import org.apache.sshd.common.PropertyResolver; import org.apache.sshd.common.SshConstants; import org.apache.sshd.common.util.GenericUtils; import org.apache.sshd.common.util.MapEntryUtils; import org.apache.sshd.common.util.OsUtils; import org.apache.sshd.common.util.ValidateUtils; import org.apache.sshd.common.util.buffer.Buffer; import org.apache.sshd.common.util.buffer.BufferUtils; import org.apache.sshd.common.util.buffer.ByteArrayBuffer; import org.apache.sshd.common.util.io.IoUtils; import org.apache.sshd.sftp.SftpModuleProperties; import org.apache.sshd.sftp.client.SftpClient.Attribute; import org.apache.sshd.sftp.client.SftpClient.Attributes; import org.apache.sshd.sftp.server.DefaultGroupPrincipal; import org.apache.sshd.sftp.server.InvalidHandleException; import org.apache.sshd.sftp.server.UnixDateFormat; /** * @author <a href="mailto:dev@mina.apache.org">Apache MINA SSHD Project</a> */ public final class SftpHelper { public static final Map<Integer, String> DEFAULT_SUBSTATUS_MESSAGE; static { Map<Integer, String> map = new TreeMap<>(Comparator.naturalOrder()); map.put(SftpConstants.SSH_FX_OK, "Success"); map.put(SftpConstants.SSH_FX_EOF, "End of file"); map.put(SftpConstants.SSH_FX_NO_SUCH_FILE, "No such file or directory"); map.put(SftpConstants.SSH_FX_PERMISSION_DENIED, "Permission denied"); map.put(SftpConstants.SSH_FX_FAILURE, "General failure"); map.put(SftpConstants.SSH_FX_BAD_MESSAGE, "Bad message data"); map.put(SftpConstants.SSH_FX_NO_CONNECTION, "No connection to server"); map.put(SftpConstants.SSH_FX_CONNECTION_LOST, "Connection lost"); map.put(SftpConstants.SSH_FX_OP_UNSUPPORTED, "Unsupported operation requested"); map.put(SftpConstants.SSH_FX_INVALID_HANDLE, "Invalid handle value"); map.put(SftpConstants.SSH_FX_NO_SUCH_PATH, "No such path"); map.put(SftpConstants.SSH_FX_FILE_ALREADY_EXISTS, "File/Directory already exists"); map.put(SftpConstants.SSH_FX_WRITE_PROTECT, "File/Directory is write-protected"); map.put(SftpConstants.SSH_FX_NO_MEDIA, "No such meadia"); map.put(SftpConstants.SSH_FX_NO_SPACE_ON_FILESYSTEM, "No space left on device"); map.put(SftpConstants.SSH_FX_QUOTA_EXCEEDED, "Quota exceeded"); map.put(SftpConstants.SSH_FX_UNKNOWN_PRINCIPAL, "Unknown user/group"); map.put(SftpConstants.SSH_FX_LOCK_CONFLICT, "Lock conflict"); map.put(SftpConstants.SSH_FX_DIR_NOT_EMPTY, "Directory not empty"); map.put(SftpConstants.SSH_FX_NOT_A_DIRECTORY, "Accessed location is not a directory"); map.put(SftpConstants.SSH_FX_INVALID_FILENAME, "Invalid filename"); map.put(SftpConstants.SSH_FX_LINK_LOOP, "Link loop"); map.put(SftpConstants.SSH_FX_CANNOT_DELETE, "Cannot remove"); map.put(SftpConstants.SSH_FX_INVALID_PARAMETER, "Invalid parameter"); map.put(SftpConstants.SSH_FX_FILE_IS_A_DIRECTORY, "Accessed location is a directory"); map.put(SftpConstants.SSH_FX_BYTE_RANGE_LOCK_CONFLICT, "Range lock conflict"); map.put(SftpConstants.SSH_FX_BYTE_RANGE_LOCK_REFUSED, "Range lock refused"); map.put(SftpConstants.SSH_FX_DELETE_PENDING, "Delete pending"); map.put(SftpConstants.SSH_FX_FILE_CORRUPT, "Corrupted file/directory"); map.put(SftpConstants.SSH_FX_OWNER_INVALID, "Invalid file/directory owner"); map.put(SftpConstants.SSH_FX_GROUP_INVALID, "Invalid file/directory group"); map.put(SftpConstants.SSH_FX_NO_MATCHING_BYTE_RANGE_LOCK, "No matching byte range lock"); DEFAULT_SUBSTATUS_MESSAGE = Collections.unmodifiableMap(map); } private SftpHelper() { throw new UnsupportedOperationException("No instance allowed"); } /** * Retrieves the end-of-file indicator for {@code SSH_FXP_DATA} responses, provided the version is at least 6, and * the buffer has enough available data * * @param buffer The {@link Buffer} to retrieve the data from * @param version The SFTP version being used * @return The indicator value - {@code null} if none retrieved * @see <A HREF="https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#section-9.3">SFTP v6 - section * 9.3</A> */ public static Boolean getEndOfFileIndicatorValue(Buffer buffer, int version) { return (version < SftpConstants.SFTP_V6) || (buffer.available() < 1) ? null : buffer.getBoolean(); } /** * Retrieves the end-of-list indicator for {@code SSH_FXP_NAME} responses, provided the version is at least 6, and * the buffer has enough available data * * @param buffer The {@link Buffer} to retrieve the data from * @param version The SFTP version being used * @return The indicator value - {@code null} if none retrieved * @see <A HREF="https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#section-9.4">SFTP v6 - section * 9.4</A> * @see #indicateEndOfNamesList(Buffer, int, PropertyResolver, boolean) */ public static Boolean getEndOfListIndicatorValue(Buffer buffer, int version) { return (version < SftpConstants.SFTP_V6) || (buffer.available() < 1) ? null : buffer.getBoolean(); } /** * Appends the end-of-list={@code TRUE} indicator for {@code SSH_FXP_NAME} responses, provided the version is at * least 6 and the feature is enabled * * @param buffer The {@link Buffer} to append the indicator * @param version The SFTP version being used * @param resolver The {@link PropertyResolver} to query whether to enable the feature * @return The actual indicator value used - {@code null} if none appended * @see #indicateEndOfNamesList(Buffer, int, PropertyResolver, boolean) */ public static Boolean indicateEndOfNamesList(Buffer buffer, int version, PropertyResolver resolver) { return indicateEndOfNamesList(buffer, version, resolver, true); } /** * Appends the end-of-list indicator for {@code SSH_FXP_NAME} responses, provided the version is at least 6, the * feature is enabled and the indicator value is not {@code null} * * @param buffer The {@link Buffer} to append the indicator * @param version The SFTP version being used * @param resolver The {@link PropertyResolver} to query whether to enable the feature * @param indicatorValue The indicator value - {@code null} means don't append the indicator * @return The actual indicator value used - {@code null} if none appended * @see <A HREF="https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#section-9.4">SFTP v6 - * section 9.4</A> * @see SftpModuleProperties#APPEND_END_OF_LIST_INDICATOR */ public static Boolean indicateEndOfNamesList( Buffer buffer, int version, PropertyResolver resolver, boolean indicatorValue) { if (version < SftpConstants.SFTP_V6) { return null; } if (!SftpModuleProperties.APPEND_END_OF_LIST_INDICATOR.getRequired(resolver)) { return null; } buffer.putBoolean(indicatorValue); return indicatorValue; } /** * Writes a file / folder's attributes to a buffer * * @param <B> Type of {@link Buffer} being updated * @param buffer The target buffer instance * @param version The output encoding version * @param attributes The {@link Map} of attributes * @return The updated buffer * @see #writeAttrsV3(Buffer, int, Map) * @see #writeAttrsV4(Buffer, int, Map) */ public static <B extends Buffer> B writeAttrs(B buffer, int version, Map<String, ?> attributes) { if (version == SftpConstants.SFTP_V3) { return writeAttrsV3(buffer, version, attributes); } else if (version >= SftpConstants.SFTP_V4) { return writeAttrsV4(buffer, version, attributes); } else { throw new IllegalStateException("Unsupported SFTP version: " + version); } } /** * Writes the retrieved file / directory attributes in V3 format * * @param <B> Type of {@link Buffer} being updated * @param buffer The target buffer instance * @param version The actual version - must be {@link SftpConstants#SFTP_V3} * @param attributes The {@link Map} of attributes * @return The updated buffer */ public static <B extends Buffer> B writeAttrsV3(B buffer, int version, Map<String, ?> attributes) { ValidateUtils.checkTrue(version == SftpConstants.SFTP_V3, "Illegal version: %d", version); boolean isReg = getBool((Boolean) attributes.get(IoUtils.REGFILE_VIEW_ATTR)); boolean isDir = getBool((Boolean) attributes.get(IoUtils.DIRECTORY_VIEW_ATTR)); boolean isLnk = getBool((Boolean) attributes.get(IoUtils.SYMLINK_VIEW_ATTR)); @SuppressWarnings("unchecked") Collection<PosixFilePermission> perms = (Collection<PosixFilePermission>) attributes.get(IoUtils.PERMISSIONS_VIEW_ATTR); Number size = (Number) attributes.get(IoUtils.SIZE_VIEW_ATTR); FileTime lastModifiedTime = (FileTime) attributes.get(IoUtils.LASTMOD_TIME_VIEW_ATTR); FileTime lastAccessTime = (FileTime) attributes.get(IoUtils.LASTACC_TIME_VIEW_ATTR); Map<?, ?> extensions = (Map<?, ?>) attributes.get(IoUtils.EXTENDED_VIEW_ATTR); int flags = ((isReg || isLnk) && (size != null) ? SftpConstants.SSH_FILEXFER_ATTR_SIZE : 0) | (attributes.containsKey(IoUtils.USERID_VIEW_ATTR) && attributes.containsKey(IoUtils.GROUPID_VIEW_ATTR) ? SftpConstants.SSH_FILEXFER_ATTR_UIDGID : 0) | ((perms != null) ? SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS : 0) | (((lastModifiedTime != null) && (lastAccessTime != null)) ? SftpConstants.SSH_FILEXFER_ATTR_ACMODTIME : 0) | ((extensions != null) ? SftpConstants.SSH_FILEXFER_ATTR_EXTENDED : 0); buffer.putInt(flags); if ((flags & SftpConstants.SSH_FILEXFER_ATTR_SIZE) != 0) { buffer.putLong(size.longValue()); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_UIDGID) != 0) { buffer.putInt(((Number) attributes.get(IoUtils.USERID_VIEW_ATTR)).intValue()); buffer.putInt(((Number) attributes.get(IoUtils.GROUPID_VIEW_ATTR)).intValue()); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS) != 0) { buffer.putInt(attributesToPermissions(isReg, isDir, isLnk, perms)); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_ACMODTIME) != 0) { buffer = writeTime(buffer, version, flags, lastAccessTime); buffer = writeTime(buffer, version, flags, lastModifiedTime); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_EXTENDED) != 0) { buffer = writeExtensions(buffer, extensions); } return buffer; } /** * Writes the retrieved file / directory attributes in V4+ format * * @param <B> Type of {@link Buffer} being updated * @param buffer The target buffer instance * @param version The actual version - must be at least {@link SftpConstants#SFTP_V4} * @param attributes The {@link Map} of attributes * @return The updated buffer */ public static <B extends Buffer> B writeAttrsV4(B buffer, int version, Map<String, ?> attributes) { ValidateUtils.checkTrue(version >= SftpConstants.SFTP_V4, "Illegal version: %d", version); boolean isReg = getBool((Boolean) attributes.get(IoUtils.REGFILE_VIEW_ATTR)); boolean isDir = getBool((Boolean) attributes.get(IoUtils.DIRECTORY_VIEW_ATTR)); boolean isLnk = getBool((Boolean) attributes.get(IoUtils.SYMLINK_VIEW_ATTR)); @SuppressWarnings("unchecked") Collection<PosixFilePermission> perms = (Collection<PosixFilePermission>) attributes.get(IoUtils.PERMISSIONS_VIEW_ATTR); Number size = (Number) attributes.get(IoUtils.SIZE_VIEW_ATTR); FileTime lastModifiedTime = (FileTime) attributes.get(IoUtils.LASTMOD_TIME_VIEW_ATTR); FileTime lastAccessTime = (FileTime) attributes.get(IoUtils.LASTACC_TIME_VIEW_ATTR); FileTime creationTime = (FileTime) attributes.get(IoUtils.CREATE_TIME_VIEW_ATTR); @SuppressWarnings("unchecked") Collection<AclEntry> acl = (Collection<AclEntry>) attributes.get(IoUtils.ACL_VIEW_ATTR); Map<?, ?> extensions = (Map<?, ?>) attributes.get(IoUtils.EXTENDED_VIEW_ATTR); int flags = (((isReg || isLnk) && (size != null)) ? SftpConstants.SSH_FILEXFER_ATTR_SIZE : 0) | ((attributes.containsKey(IoUtils.OWNER_VIEW_ATTR) && attributes.containsKey(IoUtils.GROUP_VIEW_ATTR)) ? SftpConstants.SSH_FILEXFER_ATTR_OWNERGROUP : 0) | ((perms != null) ? SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS : 0) | ((lastModifiedTime != null) ? SftpConstants.SSH_FILEXFER_ATTR_MODIFYTIME : 0) | ((creationTime != null) ? SftpConstants.SSH_FILEXFER_ATTR_CREATETIME : 0) | ((lastAccessTime != null) ? SftpConstants.SSH_FILEXFER_ATTR_ACCESSTIME : 0) | ((acl != null) ? SftpConstants.SSH_FILEXFER_ATTR_ACL : 0) | ((extensions != null) ? SftpConstants.SSH_FILEXFER_ATTR_EXTENDED : 0); buffer.putInt(flags); buffer.putByte((byte) (isReg ? SftpConstants.SSH_FILEXFER_TYPE_REGULAR : isDir ? SftpConstants.SSH_FILEXFER_TYPE_DIRECTORY : isLnk ? SftpConstants.SSH_FILEXFER_TYPE_SYMLINK : SftpConstants.SSH_FILEXFER_TYPE_UNKNOWN)); if ((flags & SftpConstants.SSH_FILEXFER_ATTR_SIZE) != 0) { buffer.putLong(size.longValue()); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_OWNERGROUP) != 0) { buffer.putString( Objects.toString(attributes.get(IoUtils.OWNER_VIEW_ATTR), SftpUniversalOwnerAndGroup.Owner.getName())); buffer.putString( Objects.toString(attributes.get(IoUtils.GROUP_VIEW_ATTR), SftpUniversalOwnerAndGroup.Group.getName())); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS) != 0) { buffer.putInt(attributesToPermissions(isReg, isDir, isLnk, perms)); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_ACCESSTIME) != 0) { buffer = writeTime(buffer, version, flags, lastAccessTime); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_CREATETIME) != 0) { buffer = writeTime(buffer, version, flags, creationTime); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_MODIFYTIME) != 0) { buffer = writeTime(buffer, version, flags, lastModifiedTime); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_ACL) != 0) { buffer = writeACLs(buffer, version, acl); } // TODO: ctime // TODO: bits if ((flags & SftpConstants.SSH_FILEXFER_ATTR_EXTENDED) != 0) { buffer = writeExtensions(buffer, extensions); } return buffer; } public static <B extends Buffer> B writeAttributes(B buffer, Attributes attributes, int sftpVersion) { int flagsMask = 0; Collection<Attribute> flags = Objects.requireNonNull(attributes, "No attributes").getFlags(); if (sftpVersion == SftpConstants.SFTP_V3) { for (Attribute a : flags) { switch (a) { case Size: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_SIZE; break; case UidGid: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_UIDGID; break; case Perms: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS; break; case AccessTime: if (flags.contains(Attribute.ModifyTime)) { flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_ACMODTIME; } break; case ModifyTime: if (flags.contains(Attribute.AccessTime)) { flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_ACMODTIME; } break; case Extensions: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_EXTENDED; break; default: // do nothing } } buffer.putInt(flagsMask); if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_SIZE) != 0) { buffer.putLong(attributes.getSize()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_UIDGID) != 0) { buffer.putInt(attributes.getUserId()); buffer.putInt(attributes.getGroupId()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS) != 0) { buffer.putInt(attributes.getPermissions()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_ACMODTIME) != 0) { buffer = SftpHelper.writeTime(buffer, sftpVersion, flagsMask, attributes.getAccessTime()); buffer = SftpHelper.writeTime(buffer, sftpVersion, flagsMask, attributes.getModifyTime()); } } else if (sftpVersion >= SftpConstants.SFTP_V4) { for (Attribute a : flags) { switch (a) { case Size: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_SIZE; break; case OwnerGroup: { /* * According to * https://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/draft-ietf-secsh-filexfer-13.txt * section 7.5 * * If either the owner or group field is zero length, the field should be considered absent, and no * change should be made to that specific field during a modification operation. */ String owner = attributes.getOwner(); String group = attributes.getGroup(); if (GenericUtils.isNotEmpty(owner) && GenericUtils.isNotEmpty(group)) { flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_OWNERGROUP; } break; } case Perms: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS; break; case AccessTime: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_ACCESSTIME; break; case ModifyTime: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_MODIFYTIME; break; case CreateTime: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_CREATETIME; break; case Acl: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_ACL; break; case Extensions: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_EXTENDED; break; default: // do nothing } } buffer.putInt(flagsMask); buffer.putByte((byte) attributes.getType()); if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_SIZE) != 0) { buffer.putLong(attributes.getSize()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_OWNERGROUP) != 0) { String owner = attributes.getOwner(); buffer.putString(owner); String group = attributes.getGroup(); buffer.putString(group); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS) != 0) { buffer.putInt(attributes.getPermissions()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_ACCESSTIME) != 0) { buffer = SftpHelper.writeTime(buffer, sftpVersion, flagsMask, attributes.getAccessTime()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_CREATETIME) != 0) { buffer = SftpHelper.writeTime(buffer, sftpVersion, flagsMask, attributes.getCreateTime()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_MODIFYTIME) != 0) { buffer = SftpHelper.writeTime(buffer, sftpVersion, flagsMask, attributes.getModifyTime()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_ACL) != 0) { buffer = SftpHelper.writeACLs(buffer, sftpVersion, attributes.getAcl()); } // TODO: for v5 ? 6? add CTIME (see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#page-16 - v6) } else { throw new UnsupportedOperationException("writeAttributes(" + attributes + ") unsupported version: " + sftpVersion); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_EXTENDED) != 0) { buffer = SftpHelper.writeExtensions(buffer, attributes.getExtensions()); } return buffer; } /** * @param bool The {@link Boolean} value * @return {@code true} it the argument is non-{@code null} and its {@link Boolean#booleanValue()} is * {@code true} */ public static boolean getBool(Boolean bool) { return bool != null && bool; } /** * Converts a file / folder's attributes into a mask * * @param isReg {@code true} if this is a normal file * @param isDir {@code true} if this is a directory * @param isLnk {@code true} if this is a symbolic link * @param perms The file / folder's access {@link PosixFilePermission}s * @return A mask encoding the file / folder's attributes */ public static int attributesToPermissions( boolean isReg, boolean isDir, boolean isLnk, Collection<PosixFilePermission> perms) { int pf = 0; if (perms != null) { for (PosixFilePermission p : perms) { switch (p) { case OWNER_READ: pf |= SftpConstants.S_IRUSR; break; case OWNER_WRITE: pf |= SftpConstants.S_IWUSR; break; case OWNER_EXECUTE: pf |= SftpConstants.S_IXUSR; break; case GROUP_READ: pf |= SftpConstants.S_IRGRP; break; case GROUP_WRITE: pf |= SftpConstants.S_IWGRP; break; case GROUP_EXECUTE: pf |= SftpConstants.S_IXGRP; break; case OTHERS_READ: pf |= SftpConstants.S_IROTH; break; case OTHERS_WRITE: pf |= SftpConstants.S_IWOTH; break; case OTHERS_EXECUTE: pf |= SftpConstants.S_IXOTH; break; default: // ignored } } } pf |= isReg ? SftpConstants.S_IFREG : 0; pf |= isDir ? SftpConstants.S_IFDIR : 0; pf |= isLnk ? SftpConstants.S_IFLNK : 0; return pf; } /** * Converts a POSIX permissions mask to a file type value * * @param perms The POSIX permissions mask * @return The file type - see {@code SSH_FILEXFER_TYPE_xxx} values */ public static int permissionsToFileType(int perms) { if ((SftpConstants.S_IFLNK & perms) == SftpConstants.S_IFLNK) { return SftpConstants.SSH_FILEXFER_TYPE_SYMLINK; } else if ((SftpConstants.S_IFREG & perms) == SftpConstants.S_IFREG) { return SftpConstants.SSH_FILEXFER_TYPE_REGULAR; } else if ((SftpConstants.S_IFDIR & perms) == SftpConstants.S_IFDIR) { return SftpConstants.SSH_FILEXFER_TYPE_DIRECTORY; } else if ((SftpConstants.S_IFSOCK & perms) == SftpConstants.S_IFSOCK) { return SftpConstants.SSH_FILEXFER_TYPE_SOCKET; } else if ((SftpConstants.S_IFBLK & perms) == SftpConstants.S_IFBLK) { return SftpConstants.SSH_FILEXFER_TYPE_BLOCK_DEVICE; } else if ((SftpConstants.S_IFCHR & perms) == SftpConstants.S_IFCHR) { return SftpConstants.SSH_FILEXFER_TYPE_CHAR_DEVICE; } else if ((SftpConstants.S_IFIFO & perms) == SftpConstants.S_IFIFO) { return SftpConstants.SSH_FILEXFER_TYPE_FIFO; } else { return SftpConstants.SSH_FILEXFER_TYPE_UNKNOWN; } } /** * Converts a file type into a POSIX permission mask value * * @param type File type - see {@code SSH_FILEXFER_TYPE_xxx} values * @return The matching POSIX permission mask value */ public static int fileTypeToPermission(int type) { switch (type) { case SftpConstants.SSH_FILEXFER_TYPE_REGULAR: return SftpConstants.S_IFREG; case SftpConstants.SSH_FILEXFER_TYPE_DIRECTORY: return SftpConstants.S_IFDIR; case SftpConstants.SSH_FILEXFER_TYPE_SYMLINK: return SftpConstants.S_IFLNK; case SftpConstants.SSH_FILEXFER_TYPE_SOCKET: return SftpConstants.S_IFSOCK; case SftpConstants.SSH_FILEXFER_TYPE_BLOCK_DEVICE: return SftpConstants.S_IFBLK; case SftpConstants.SSH_FILEXFER_TYPE_CHAR_DEVICE: return SftpConstants.S_IFCHR; case SftpConstants.SSH_FILEXFER_TYPE_FIFO: return SftpConstants.S_IFIFO; default: return 0; } } /** * Translates a mask of permissions into its enumeration values equivalents * * @param perms The permissions mask * @return A {@link Set} of the equivalent {@link PosixFilePermission}s */ public static Set<PosixFilePermission> permissionsToAttributes(int perms) { Set<PosixFilePermission> p = EnumSet.noneOf(PosixFilePermission.class); if ((perms & SftpConstants.S_IRUSR) != 0) { p.add(PosixFilePermission.OWNER_READ); } if ((perms & SftpConstants.S_IWUSR) != 0) { p.add(PosixFilePermission.OWNER_WRITE); } if ((perms & SftpConstants.S_IXUSR) != 0) { p.add(PosixFilePermission.OWNER_EXECUTE); } if ((perms & SftpConstants.S_IRGRP) != 0) { p.add(PosixFilePermission.GROUP_READ); } if ((perms & SftpConstants.S_IWGRP) != 0) { p.add(PosixFilePermission.GROUP_WRITE); } if ((perms & SftpConstants.S_IXGRP) != 0) { p.add(PosixFilePermission.GROUP_EXECUTE); } if ((perms & SftpConstants.S_IROTH) != 0) { p.add(PosixFilePermission.OTHERS_READ); } if ((perms & SftpConstants.S_IWOTH) != 0) { p.add(PosixFilePermission.OTHERS_WRITE); } if ((perms & SftpConstants.S_IXOTH) != 0) { p.add(PosixFilePermission.OTHERS_EXECUTE); } return p; } /** * Returns the most adequate sub-status for the provided exception * * @param t The thrown {@link Throwable} * @return The matching sub-status */ @SuppressWarnings("checkstyle:ReturnCount") public static int resolveSubstatus(Throwable t) { if ((t instanceof NoSuchFileException) || (t instanceof FileNotFoundException)) { return SftpConstants.SSH_FX_NO_SUCH_FILE; } else if (t instanceof InvalidHandleException) { return SftpConstants.SSH_FX_INVALID_HANDLE; } else if (t instanceof FileAlreadyExistsException) { return SftpConstants.SSH_FX_FILE_ALREADY_EXISTS; } else if (t instanceof DirectoryNotEmptyException) { return SftpConstants.SSH_FX_DIR_NOT_EMPTY; } else if (t instanceof NotDirectoryException) { return SftpConstants.SSH_FX_NOT_A_DIRECTORY; } else if (t instanceof AccessDeniedException) { return SftpConstants.SSH_FX_PERMISSION_DENIED; } else if (t instanceof EOFException) { return SftpConstants.SSH_FX_EOF; } else if (t instanceof OverlappingFileLockException) { return SftpConstants.SSH_FX_LOCK_CONFLICT; } else if ((t instanceof UnsupportedOperationException) || (t instanceof UnknownServiceException)) { return SftpConstants.SSH_FX_OP_UNSUPPORTED; } else if (t instanceof InvalidPathException) { return SftpConstants.SSH_FX_INVALID_FILENAME; } else if (t instanceof IllegalArgumentException) { return SftpConstants.SSH_FX_INVALID_PARAMETER; } else if (t instanceof UserPrincipalNotFoundException) { return SftpConstants.SSH_FX_UNKNOWN_PRINCIPAL; } else if (t instanceof FileSystemLoopException) { return SftpConstants.SSH_FX_LINK_LOOP; } else if (t instanceof SftpException) { return ((SftpException) t).getStatus(); } else { return SftpConstants.SSH_FX_FAILURE; } } public static String resolveStatusMessage(int subStatus) { String message = DEFAULT_SUBSTATUS_MESSAGE.get(subStatus); return GenericUtils.isEmpty(message) ? ("Unknown error: " + subStatus) : message; } public static NavigableMap<String, Object> readAttrs(Buffer buffer, int version) { NavigableMap<String, Object> attrs = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); int flags = buffer.getInt(); if (version >= SftpConstants.SFTP_V4) { int type = buffer.getUByte(); switch (type) { case SftpConstants.SSH_FILEXFER_TYPE_REGULAR: attrs.put(IoUtils.REGFILE_VIEW_ATTR, Boolean.TRUE); break; case SftpConstants.SSH_FILEXFER_TYPE_DIRECTORY: attrs.put(IoUtils.DIRECTORY_VIEW_ATTR, Boolean.TRUE); break; case SftpConstants.SSH_FILEXFER_TYPE_SYMLINK: attrs.put(IoUtils.SYMLINK_VIEW_ATTR, Boolean.TRUE); break; case SftpConstants.SSH_FILEXFER_TYPE_SOCKET: case SftpConstants.SSH_FILEXFER_TYPE_CHAR_DEVICE: case SftpConstants.SSH_FILEXFER_TYPE_BLOCK_DEVICE: case SftpConstants.SSH_FILEXFER_TYPE_FIFO: attrs.put(IoUtils.OTHERFILE_VIEW_ATTR, Boolean.TRUE); break; default: // ignored } } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_SIZE) != 0) { attrs.put(IoUtils.SIZE_VIEW_ATTR, buffer.getLong()); } if (version == SftpConstants.SFTP_V3) { if ((flags & SftpConstants.SSH_FILEXFER_ATTR_UIDGID) != 0) { attrs.put(IoUtils.USERID_VIEW_ATTR, buffer.getInt()); attrs.put(IoUtils.GROUPID_VIEW_ATTR, buffer.getInt()); } } else { if ((version >= SftpConstants.SFTP_V6) && ((flags & SftpConstants.SSH_FILEXFER_ATTR_ALLOCATION_SIZE) != 0)) { @SuppressWarnings("unused") long allocSize = buffer.getLong(); // TODO handle allocation size } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_OWNERGROUP) != 0) { attrs.put(IoUtils.OWNER_VIEW_ATTR, new DefaultGroupPrincipal(buffer.getString())); attrs.put(IoUtils.GROUP_VIEW_ATTR, new DefaultGroupPrincipal(buffer.getString())); } } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS) != 0) { attrs.put(IoUtils.PERMISSIONS_VIEW_ATTR, permissionsToAttributes(buffer.getInt())); } if (version == SftpConstants.SFTP_V3) { if ((flags & SftpConstants.SSH_FILEXFER_ATTR_ACMODTIME) != 0) { attrs.put(IoUtils.LASTACC_TIME_VIEW_ATTR, readTime(buffer, version, flags)); attrs.put(IoUtils.LASTMOD_TIME_VIEW_ATTR, readTime(buffer, version, flags)); } } else if (version >= SftpConstants.SFTP_V4) { if ((flags & SftpConstants.SSH_FILEXFER_ATTR_ACCESSTIME) != 0) { attrs.put(IoUtils.LASTACC_TIME_VIEW_ATTR, readTime(buffer, version, flags)); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_CREATETIME) != 0) { attrs.put(IoUtils.CREATE_TIME_VIEW_ATTR, readTime(buffer, version, flags)); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_MODIFYTIME) != 0) { attrs.put(IoUtils.LASTMOD_TIME_VIEW_ATTR, readTime(buffer, version, flags)); } // modification time sub-seconds if ((version >= SftpConstants.SFTP_V6) && (flags & SftpConstants.SSH_FILEXFER_ATTR_CTIME) != 0) { attrs.put("ctime", readTime(buffer, version, flags)); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_ACL) != 0) { attrs.put(IoUtils.ACL_VIEW_ATTR, readACLs(buffer, version)); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_BITS) != 0) { @SuppressWarnings("unused") int bits = buffer.getInt(); @SuppressWarnings("unused") int valid = 0xffffffff; if (version >= SftpConstants.SFTP_V6) { valid = buffer.getInt(); } // TODO: handle attrib bits } if (version >= SftpConstants.SFTP_V6) { if ((flags & SftpConstants.SSH_FILEXFER_ATTR_TEXT_HINT) != 0) { @SuppressWarnings("unused") boolean text = buffer.getBoolean(); // TODO: handle text } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_MIME_TYPE) != 0) { @SuppressWarnings("unused") String mimeType = buffer.getString(); // TODO: handle mime-type } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_LINK_COUNT) != 0) { @SuppressWarnings("unused") int nlink = buffer.getInt(); // TODO: handle link-count } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_UNTRANSLATED_NAME) != 0) { @SuppressWarnings("unused") String untranslated = buffer.getString(); // TODO: handle untranslated-name } } } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_EXTENDED) != 0) { attrs.put(IoUtils.EXTENDED_VIEW_ATTR, readExtensions(buffer)); } return attrs; } public static NavigableMap<String, byte[]> readExtensions(Buffer buffer) { int count = buffer.getInt(); // Protect against malicious or malformed packets if ((count < 0) || (count > SshConstants.SSH_REQUIRED_PAYLOAD_PACKET_LENGTH_SUPPORT)) { throw new IndexOutOfBoundsException("Illogical extensions count: " + count); } // NOTE NavigableMap<String, byte[]> extended = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); for (int i = 1; i <= count; i++) { String key = buffer.getString(); byte[] val = buffer.getBytes(); byte[] prev = extended.put(key, val); ValidateUtils.checkTrue(prev == null, "Duplicate values for extended key=%s", key); } return extended; } public static <B extends Buffer> B writeExtensions(B buffer, Map<?, ?> extensions) { int numExtensions = MapEntryUtils.size(extensions); buffer.putInt(numExtensions); if (numExtensions <= 0) { return buffer; } for (Map.Entry<?, ?> ee : extensions.entrySet()) { Object key = Objects.requireNonNull(ee.getKey(), "No extension type"); Object value = Objects.requireNonNull(ee.getValue(), "No extension value"); buffer.putString(key.toString()); if (value instanceof byte[]) { buffer.putBytes((byte[]) value); } else { buffer.putString(value.toString()); } } return buffer; } public static NavigableMap<String, String> toStringExtensions(Map<String, ?> extensions) { if (MapEntryUtils.isEmpty(extensions)) { return Collections.emptyNavigableMap(); } // NOTE: even though extensions are probably case sensitive we do not allow duplicate name that differs only in // case NavigableMap<String, String> map = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); for (Map.Entry<?, ?> ee : extensions.entrySet()) { Object key = Objects.requireNonNull(ee.getKey(), "No extension type"); Object value = ValidateUtils.checkNotNull(ee.getValue(), "No value for extension=%s", key); String prev = map.put(key.toString(), (value instanceof byte[]) ? new String((byte[]) value, StandardCharsets.UTF_8) : value.toString()); ValidateUtils.checkTrue(prev == null, "Multiple values for extension=%s", key); } return map; } public static NavigableMap<String, byte[]> toBinaryExtensions(Map<String, String> extensions) { if (MapEntryUtils.isEmpty(extensions)) { return Collections.emptyNavigableMap(); } // NOTE: even though extensions are probably case sensitive we do not allow duplicate name that differs only in // case NavigableMap<String, byte[]> map = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); extensions.forEach((key, value) -> { ValidateUtils.checkNotNull(value, "No value for extension=%s", key); byte[] prev = map.put(key, value.getBytes(StandardCharsets.UTF_8)); ValidateUtils.checkTrue(prev == null, "Multiple values for extension=%s", key); }); return map; } // for v4,5 see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-05#page-15 // for v6 see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#page-21 public static List<AclEntry> readACLs(Buffer buffer, int version) { int aclSize = buffer.getInt(); // Protect against malicious or malformed packets if ((aclSize < 0) || (aclSize > (2 * SshConstants.SSH_REQUIRED_PAYLOAD_PACKET_LENGTH_SUPPORT))) { throw new IndexOutOfBoundsException("Illogical ACL entries size: " + aclSize); } int startPos = buffer.rpos(); Buffer aclBuffer = new ByteArrayBuffer(buffer.array(), startPos, aclSize, true); List<AclEntry> acl = decodeACLs(aclBuffer, version); buffer.rpos(startPos + aclSize); return acl; } public static List<AclEntry> decodeACLs(Buffer buffer, int version) { @SuppressWarnings("unused") int aclFlags = 0; // TODO handle ACL flags if (version >= SftpConstants.SFTP_V6) { aclFlags = buffer.getInt(); } int count = buffer.getInt(); /* * NOTE: although the value is defined as UINT32 we do not expected a count greater than several hundreds + * protect against malicious or corrupted packets */ if ((count < 0) || (count > SshConstants.SSH_REQUIRED_PAYLOAD_PACKET_LENGTH_SUPPORT)) { throw new IndexOutOfBoundsException("Illogical ACL entries count: " + count); } ValidateUtils.checkTrue(count >= 0, "Invalid ACL entries count: %d", count); if (count == 0) { return Collections.emptyList(); } List<AclEntry> acls = new ArrayList<>(count); for (int i = 1; i <= count; i++) { int aclType = buffer.getInt(); int aclFlag = buffer.getInt(); int aclMask = buffer.getInt(); String aclWho = buffer.getString(); acls.add(buildAclEntry(aclType, aclFlag, aclMask, aclWho)); } return acls; } public static AclEntry buildAclEntry(int aclType, int aclFlag, int aclMask, String aclWho) { UserPrincipal who = new DefaultGroupPrincipal(aclWho); return AclEntry.newBuilder() .setType(ValidateUtils.checkNotNull(decodeAclEntryType(aclType), "Unknown ACL type: %d", aclType)) .setFlags(decodeAclFlags(aclFlag)) .setPermissions(decodeAclMask(aclMask)) .setPrincipal(who) .build(); } /** * @param aclType The {@code ACE4_ACCESS_xxx_ACE_TYPE} value * @return The matching {@link AclEntryType} or {@code null} if unknown value */ public static AclEntryType decodeAclEntryType(int aclType) { switch (aclType) { case SftpConstants.ACE4_ACCESS_ALLOWED_ACE_TYPE: return AclEntryType.ALLOW; case SftpConstants.ACE4_ACCESS_DENIED_ACE_TYPE: return AclEntryType.DENY; case SftpConstants.ACE4_SYSTEM_AUDIT_ACE_TYPE: return AclEntryType.AUDIT; case SftpConstants.ACE4_SYSTEM_ALARM_ACE_TYPE: return AclEntryType.ALARM; default: return null; } } public static Set<AclEntryFlag> decodeAclFlags(int aclFlag) { Set<AclEntryFlag> flags = EnumSet.noneOf(AclEntryFlag.class); if ((aclFlag & SftpConstants.ACE4_FILE_INHERIT_ACE) != 0) { flags.add(AclEntryFlag.FILE_INHERIT); } if ((aclFlag & SftpConstants.ACE4_DIRECTORY_INHERIT_ACE) != 0) { flags.add(AclEntryFlag.DIRECTORY_INHERIT); } if ((aclFlag & SftpConstants.ACE4_NO_PROPAGATE_INHERIT_ACE) != 0) { flags.add(AclEntryFlag.NO_PROPAGATE_INHERIT); } if ((aclFlag & SftpConstants.ACE4_INHERIT_ONLY_ACE) != 0) { flags.add(AclEntryFlag.INHERIT_ONLY); } return flags; } public static Set<AclEntryPermission> decodeAclMask(int aclMask) { Set<AclEntryPermission> mask = EnumSet.noneOf(AclEntryPermission.class); if ((aclMask & SftpConstants.ACE4_READ_DATA) != 0) { mask.add(AclEntryPermission.READ_DATA); } if ((aclMask & SftpConstants.ACE4_LIST_DIRECTORY) != 0) { mask.add(AclEntryPermission.LIST_DIRECTORY); } if ((aclMask & SftpConstants.ACE4_WRITE_DATA) != 0) { mask.add(AclEntryPermission.WRITE_DATA); } if ((aclMask & SftpConstants.ACE4_ADD_FILE) != 0) { mask.add(AclEntryPermission.ADD_FILE); } if ((aclMask & SftpConstants.ACE4_APPEND_DATA) != 0) { mask.add(AclEntryPermission.APPEND_DATA); } if ((aclMask & SftpConstants.ACE4_ADD_SUBDIRECTORY) != 0) { mask.add(AclEntryPermission.ADD_SUBDIRECTORY); } if ((aclMask & SftpConstants.ACE4_READ_NAMED_ATTRS) != 0) { mask.add(AclEntryPermission.READ_NAMED_ATTRS); } if ((aclMask & SftpConstants.ACE4_WRITE_NAMED_ATTRS) != 0) { mask.add(AclEntryPermission.WRITE_NAMED_ATTRS); } if ((aclMask & SftpConstants.ACE4_EXECUTE) != 0) { mask.add(AclEntryPermission.EXECUTE); } if ((aclMask & SftpConstants.ACE4_DELETE_CHILD) != 0) { mask.add(AclEntryPermission.DELETE_CHILD); } if ((aclMask & SftpConstants.ACE4_READ_ATTRIBUTES) != 0) { mask.add(AclEntryPermission.READ_ATTRIBUTES); } if ((aclMask & SftpConstants.ACE4_WRITE_ATTRIBUTES) != 0) { mask.add(AclEntryPermission.WRITE_ATTRIBUTES); } if ((aclMask & SftpConstants.ACE4_DELETE) != 0) { mask.add(AclEntryPermission.DELETE); } if ((aclMask & SftpConstants.ACE4_READ_ACL) != 0) { mask.add(AclEntryPermission.READ_ACL); } if ((aclMask & SftpConstants.ACE4_WRITE_ACL) != 0) { mask.add(AclEntryPermission.WRITE_ACL); } if ((aclMask & SftpConstants.ACE4_WRITE_OWNER) != 0) { mask.add(AclEntryPermission.WRITE_OWNER); } if ((aclMask & SftpConstants.ACE4_SYNCHRONIZE) != 0) { mask.add(AclEntryPermission.SYNCHRONIZE); } return mask; } public static <B extends Buffer> B writeACLs(B buffer, int version, Collection<? extends AclEntry> acl) { int lenPos = buffer.wpos(); buffer.putInt(0); // length placeholder buffer = encodeACLs(buffer, version, acl); BufferUtils.updateLengthPlaceholder(buffer, lenPos); return buffer; } public static <B extends Buffer> B encodeACLs(B buffer, int version, Collection<? extends AclEntry> acl) { Objects.requireNonNull(acl, "No ACL"); if (version >= SftpConstants.SFTP_V6) { buffer.putInt(0); // TODO handle ACL flags } int numEntries = GenericUtils.size(acl); buffer.putInt(numEntries); if (numEntries > 0) { for (AclEntry e : acl) { buffer = writeAclEntry(buffer, e); } } return buffer; } public static <B extends Buffer> B writeAclEntry(B buffer, AclEntry acl) { Objects.requireNonNull(acl, "No ACL"); AclEntryType type = acl.type(); int aclType = encodeAclEntryType(type); ValidateUtils.checkTrue(aclType >= 0, "Unknown ACL type: %s", type); buffer.putInt(aclType); buffer.putInt(encodeAclFlags(acl.flags())); buffer.putInt(encodeAclMask(acl.permissions())); Principal user = acl.principal(); buffer.putString(user.getName()); return buffer; } /** * Returns the equivalent SFTP value for the ACL type * * @param type The {@link AclEntryType} * @return The equivalent {@code ACE_SYSTEM_xxx_TYPE} or negative if {@code null} or unknown type */ public static int encodeAclEntryType(AclEntryType type) { if (type == null) { return Integer.MIN_VALUE; } switch (type) { case ALARM: return SftpConstants.ACE4_SYSTEM_ALARM_ACE_TYPE; case ALLOW: return SftpConstants.ACE4_ACCESS_ALLOWED_ACE_TYPE; case AUDIT: return SftpConstants.ACE4_SYSTEM_AUDIT_ACE_TYPE; case DENY: return SftpConstants.ACE4_ACCESS_DENIED_ACE_TYPE; default: return -1; } } public static long encodeAclFlags(Collection<AclEntryFlag> flags) { if (GenericUtils.isEmpty(flags)) { return 0L; } long aclFlag = 0L; if (flags.contains(AclEntryFlag.FILE_INHERIT)) { aclFlag |= SftpConstants.ACE4_FILE_INHERIT_ACE; } if (flags.contains(AclEntryFlag.DIRECTORY_INHERIT)) { aclFlag |= SftpConstants.ACE4_DIRECTORY_INHERIT_ACE; } if (flags.contains(AclEntryFlag.NO_PROPAGATE_INHERIT)) { aclFlag |= SftpConstants.ACE4_NO_PROPAGATE_INHERIT_ACE; } if (flags.contains(AclEntryFlag.INHERIT_ONLY)) { aclFlag |= SftpConstants.ACE4_INHERIT_ONLY_ACE; } return aclFlag; } public static long encodeAclMask(Collection<AclEntryPermission> mask) { if (GenericUtils.isEmpty(mask)) { return 0L; } long aclMask = 0L; if (mask.contains(AclEntryPermission.READ_DATA)) { aclMask |= SftpConstants.ACE4_READ_DATA; } if (mask.contains(AclEntryPermission.LIST_DIRECTORY)) { aclMask |= SftpConstants.ACE4_LIST_DIRECTORY; } if (mask.contains(AclEntryPermission.WRITE_DATA)) { aclMask |= SftpConstants.ACE4_WRITE_DATA; } if (mask.contains(AclEntryPermission.ADD_FILE)) { aclMask |= SftpConstants.ACE4_ADD_FILE; } if (mask.contains(AclEntryPermission.APPEND_DATA)) { aclMask |= SftpConstants.ACE4_APPEND_DATA; } if (mask.contains(AclEntryPermission.ADD_SUBDIRECTORY)) { aclMask |= SftpConstants.ACE4_ADD_SUBDIRECTORY; } if (mask.contains(AclEntryPermission.READ_NAMED_ATTRS)) { aclMask |= SftpConstants.ACE4_READ_NAMED_ATTRS; } if (mask.contains(AclEntryPermission.WRITE_NAMED_ATTRS)) { aclMask |= SftpConstants.ACE4_WRITE_NAMED_ATTRS; } if (mask.contains(AclEntryPermission.EXECUTE)) { aclMask |= SftpConstants.ACE4_EXECUTE; } if (mask.contains(AclEntryPermission.DELETE_CHILD)) { aclMask |= SftpConstants.ACE4_DELETE_CHILD; } if (mask.contains(AclEntryPermission.READ_ATTRIBUTES)) { aclMask |= SftpConstants.ACE4_READ_ATTRIBUTES; } if (mask.contains(AclEntryPermission.WRITE_ATTRIBUTES)) { aclMask |= SftpConstants.ACE4_WRITE_ATTRIBUTES; } if (mask.contains(AclEntryPermission.DELETE)) { aclMask |= SftpConstants.ACE4_DELETE; } if (mask.contains(AclEntryPermission.READ_ACL)) { aclMask |= SftpConstants.ACE4_READ_ACL; } if (mask.contains(AclEntryPermission.WRITE_ACL)) { aclMask |= SftpConstants.ACE4_WRITE_ACL; } if (mask.contains(AclEntryPermission.WRITE_OWNER)) { aclMask |= SftpConstants.ACE4_WRITE_OWNER; } if (mask.contains(AclEntryPermission.SYNCHRONIZE)) { aclMask |= SftpConstants.ACE4_SYNCHRONIZE; } return aclMask; } /** * Encodes a {@link FileTime} value into a buffer * * @param <B> Type of {@link Buffer} being updated * @param buffer The target buffer instance * @param version The encoding version * @param flags The encoding flags * @param time The value to encode * @return The updated buffer */ public static <B extends Buffer> B writeTime(B buffer, int version, int flags, FileTime time) { // for v3 see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-02#page-8 // for v6 see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#page-16 if (version >= SftpConstants.SFTP_V4) { buffer.putLong(time.to(TimeUnit.SECONDS)); if ((flags & SftpConstants.SSH_FILEXFER_ATTR_SUBSECOND_TIMES) != 0) { long nanos = time.to(TimeUnit.NANOSECONDS); nanos = nanos % TimeUnit.SECONDS.toNanos(1); buffer.putInt((int) nanos); } } else { buffer.putInt(time.to(TimeUnit.SECONDS)); } return buffer; } /** * Decodes a {@link FileTime} value from a buffer * * @param buffer The source {@link Buffer} * @param version The encoding version * @param flags The encoding flags * @return The decoded value */ public static FileTime readTime(Buffer buffer, int version, int flags) { // for v3 see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-02#page-8 // for v6 see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#page-16 long secs = (version >= SftpConstants.SFTP_V4) ? buffer.getLong() : buffer.getUInt(); long millis = TimeUnit.SECONDS.toMillis(secs); if ((version >= SftpConstants.SFTP_V4) && ((flags & SftpConstants.SSH_FILEXFER_ATTR_SUBSECOND_TIMES) != 0)) { long nanoseconds = buffer.getUInt(); millis += TimeUnit.NANOSECONDS.toMillis(nanoseconds); } return FileTime.from(millis, TimeUnit.MILLISECONDS); } /** * Creates an &quot;ls -l&quot; compatible long name string * * @param shortName The short file name - can also be &quot;.&quot; or &quot;..&quot; * @param attributes The file's attributes - e.g., size, owner, permissions, etc. * @return A {@link String} representing the &quot;long&quot; file name as per * <A HREF="https://tools.ietf.org/html/draft-ietf-secsh-filexfer-02">SFTP version 3 - section * 7</A> */ public static String getLongName(String shortName, Map<String, ?> attributes) { String owner = Objects.toString(attributes.get(IoUtils.OWNER_VIEW_ATTR), null); String username = OsUtils.getCanonicalUser(owner); if (GenericUtils.isEmpty(username)) { username = SftpUniversalOwnerAndGroup.Owner.getName(); } String group = Objects.toString(attributes.get(IoUtils.GROUP_VIEW_ATTR), null); group = OsUtils.resolveCanonicalGroup(group, owner); if (GenericUtils.isEmpty(group)) { group = SftpUniversalOwnerAndGroup.Group.getName(); } Number length = (Number) attributes.get(IoUtils.SIZE_VIEW_ATTR); if (length == null) { length = 0L; } String lengthString = String.format("%1$8s", length); String linkCount = Objects.toString(attributes.get(IoUtils.NUMLINKS_VIEW_ATTR), null); if (GenericUtils.isEmpty(linkCount)) { linkCount = "1"; } Boolean isDirectory = (Boolean) attributes.get(IoUtils.DIRECTORY_VIEW_ATTR); Boolean isLink = (Boolean) attributes.get(IoUtils.SYMLINK_VIEW_ATTR); @SuppressWarnings("unchecked") Set<PosixFilePermission> perms = (Set<PosixFilePermission>) attributes.get(IoUtils.PERMISSIONS_VIEW_ATTR); if (perms == null) { perms = EnumSet.noneOf(PosixFilePermission.class); } String permsString = PosixFilePermissions.toString(perms); String timeStamp = UnixDateFormat.getUnixDate((FileTime) attributes.get(IoUtils.LASTMOD_TIME_VIEW_ATTR)); StringBuilder sb = new StringBuilder( GenericUtils.length(linkCount) + GenericUtils.length(username) + GenericUtils.length(group) + GenericUtils.length(timeStamp) + GenericUtils.length(lengthString) + GenericUtils.length(permsString) + GenericUtils.length(shortName) + Integer.SIZE); sb.append(SftpHelper.getBool(isDirectory) ? 'd' : (SftpHelper.getBool(isLink) ? 'l' : '-')).append(permsString); sb.append(' '); for (int index = linkCount.length(); index < 3; index++) { sb.append(' '); } sb.append(linkCount); sb.append(' ').append(username); for (int index = username.length(); index < 8; index++) { sb.append(' '); } sb.append(' ').append(group); for (int index = group.length(); index < 8; index++) { sb.append(' '); } sb.append(' ').append(lengthString).append(' ').append(timeStamp).append(' ').append(shortName); return sb.toString(); } }
sshd-sftp/src/main/java/org/apache/sshd/sftp/common/SftpHelper.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sshd.sftp.common; import java.io.EOFException; import java.io.FileNotFoundException; import java.net.UnknownServiceException; import java.nio.channels.OverlappingFileLockException; import java.nio.charset.StandardCharsets; import java.nio.file.AccessDeniedException; import java.nio.file.DirectoryNotEmptyException; import java.nio.file.FileAlreadyExistsException; import java.nio.file.FileSystemLoopException; import java.nio.file.InvalidPathException; import java.nio.file.NoSuchFileException; import java.nio.file.NotDirectoryException; import java.nio.file.attribute.AclEntry; import java.nio.file.attribute.AclEntryFlag; import java.nio.file.attribute.AclEntryPermission; import java.nio.file.attribute.AclEntryType; import java.nio.file.attribute.FileTime; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; import java.nio.file.attribute.UserPrincipal; import java.nio.file.attribute.UserPrincipalNotFoundException; import java.security.Principal; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Objects; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import org.apache.sshd.common.PropertyResolver; import org.apache.sshd.common.SshConstants; import org.apache.sshd.common.util.GenericUtils; import org.apache.sshd.common.util.MapEntryUtils; import org.apache.sshd.common.util.OsUtils; import org.apache.sshd.common.util.ValidateUtils; import org.apache.sshd.common.util.buffer.Buffer; import org.apache.sshd.common.util.buffer.BufferUtils; import org.apache.sshd.common.util.buffer.ByteArrayBuffer; import org.apache.sshd.common.util.io.IoUtils; import org.apache.sshd.sftp.SftpModuleProperties; import org.apache.sshd.sftp.client.SftpClient.Attribute; import org.apache.sshd.sftp.client.SftpClient.Attributes; import org.apache.sshd.sftp.server.DefaultGroupPrincipal; import org.apache.sshd.sftp.server.InvalidHandleException; import org.apache.sshd.sftp.server.UnixDateFormat; /** * @author <a href="mailto:dev@mina.apache.org">Apache MINA SSHD Project</a> */ public final class SftpHelper { public static final Map<Integer, String> DEFAULT_SUBSTATUS_MESSAGE; static { Map<Integer, String> map = new TreeMap<>(Comparator.naturalOrder()); map.put(SftpConstants.SSH_FX_OK, "Success"); map.put(SftpConstants.SSH_FX_EOF, "End of file"); map.put(SftpConstants.SSH_FX_NO_SUCH_FILE, "No such file or directory"); map.put(SftpConstants.SSH_FX_PERMISSION_DENIED, "Permission denied"); map.put(SftpConstants.SSH_FX_FAILURE, "General failure"); map.put(SftpConstants.SSH_FX_BAD_MESSAGE, "Bad message data"); map.put(SftpConstants.SSH_FX_NO_CONNECTION, "No connection to server"); map.put(SftpConstants.SSH_FX_CONNECTION_LOST, "Connection lost"); map.put(SftpConstants.SSH_FX_OP_UNSUPPORTED, "Unsupported operation requested"); map.put(SftpConstants.SSH_FX_INVALID_HANDLE, "Invalid handle value"); map.put(SftpConstants.SSH_FX_NO_SUCH_PATH, "No such path"); map.put(SftpConstants.SSH_FX_FILE_ALREADY_EXISTS, "File/Directory already exists"); map.put(SftpConstants.SSH_FX_WRITE_PROTECT, "File/Directory is write-protected"); map.put(SftpConstants.SSH_FX_NO_MEDIA, "No such meadia"); map.put(SftpConstants.SSH_FX_NO_SPACE_ON_FILESYSTEM, "No space left on device"); map.put(SftpConstants.SSH_FX_QUOTA_EXCEEDED, "Quota exceeded"); map.put(SftpConstants.SSH_FX_UNKNOWN_PRINCIPAL, "Unknown user/group"); map.put(SftpConstants.SSH_FX_LOCK_CONFLICT, "Lock conflict"); map.put(SftpConstants.SSH_FX_DIR_NOT_EMPTY, "Directory not empty"); map.put(SftpConstants.SSH_FX_NOT_A_DIRECTORY, "Accessed location is not a directory"); map.put(SftpConstants.SSH_FX_INVALID_FILENAME, "Invalid filename"); map.put(SftpConstants.SSH_FX_LINK_LOOP, "Link loop"); map.put(SftpConstants.SSH_FX_CANNOT_DELETE, "Cannot remove"); map.put(SftpConstants.SSH_FX_INVALID_PARAMETER, "Invalid parameter"); map.put(SftpConstants.SSH_FX_FILE_IS_A_DIRECTORY, "Accessed location is a directory"); map.put(SftpConstants.SSH_FX_BYTE_RANGE_LOCK_CONFLICT, "Range lock conflict"); map.put(SftpConstants.SSH_FX_BYTE_RANGE_LOCK_REFUSED, "Range lock refused"); map.put(SftpConstants.SSH_FX_DELETE_PENDING, "Delete pending"); map.put(SftpConstants.SSH_FX_FILE_CORRUPT, "Corrupted file/directory"); map.put(SftpConstants.SSH_FX_OWNER_INVALID, "Invalid file/directory owner"); map.put(SftpConstants.SSH_FX_GROUP_INVALID, "Invalid file/directory group"); map.put(SftpConstants.SSH_FX_NO_MATCHING_BYTE_RANGE_LOCK, "No matching byte range lock"); DEFAULT_SUBSTATUS_MESSAGE = Collections.unmodifiableMap(map); } private SftpHelper() { throw new UnsupportedOperationException("No instance allowed"); } /** * Retrieves the end-of-file indicator for {@code SSH_FXP_DATA} responses, provided the version is at least 6, and * the buffer has enough available data * * @param buffer The {@link Buffer} to retrieve the data from * @param version The SFTP version being used * @return The indicator value - {@code null} if none retrieved * @see <A HREF="https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#section-9.3">SFTP v6 - section * 9.3</A> */ public static Boolean getEndOfFileIndicatorValue(Buffer buffer, int version) { return (version < SftpConstants.SFTP_V6) || (buffer.available() < 1) ? null : buffer.getBoolean(); } /** * Retrieves the end-of-list indicator for {@code SSH_FXP_NAME} responses, provided the version is at least 6, and * the buffer has enough available data * * @param buffer The {@link Buffer} to retrieve the data from * @param version The SFTP version being used * @return The indicator value - {@code null} if none retrieved * @see <A HREF="https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#section-9.4">SFTP v6 - section * 9.4</A> * @see #indicateEndOfNamesList(Buffer, int, PropertyResolver, boolean) */ public static Boolean getEndOfListIndicatorValue(Buffer buffer, int version) { return (version < SftpConstants.SFTP_V6) || (buffer.available() < 1) ? null : buffer.getBoolean(); } /** * Appends the end-of-list={@code TRUE} indicator for {@code SSH_FXP_NAME} responses, provided the version is at * least 6 and the feature is enabled * * @param buffer The {@link Buffer} to append the indicator * @param version The SFTP version being used * @param resolver The {@link PropertyResolver} to query whether to enable the feature * @return The actual indicator value used - {@code null} if none appended * @see #indicateEndOfNamesList(Buffer, int, PropertyResolver, boolean) */ public static Boolean indicateEndOfNamesList(Buffer buffer, int version, PropertyResolver resolver) { return indicateEndOfNamesList(buffer, version, resolver, true); } /** * Appends the end-of-list indicator for {@code SSH_FXP_NAME} responses, provided the version is at least 6, the * feature is enabled and the indicator value is not {@code null} * * @param buffer The {@link Buffer} to append the indicator * @param version The SFTP version being used * @param resolver The {@link PropertyResolver} to query whether to enable the feature * @param indicatorValue The indicator value - {@code null} means don't append the indicator * @return The actual indicator value used - {@code null} if none appended * @see <A HREF="https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#section-9.4">SFTP v6 - * section 9.4</A> * @see SftpModuleProperties#APPEND_END_OF_LIST_INDICATOR */ public static Boolean indicateEndOfNamesList( Buffer buffer, int version, PropertyResolver resolver, boolean indicatorValue) { if (version < SftpConstants.SFTP_V6) { return null; } if (!SftpModuleProperties.APPEND_END_OF_LIST_INDICATOR.getRequired(resolver)) { return null; } buffer.putBoolean(indicatorValue); return indicatorValue; } /** * Writes a file / folder's attributes to a buffer * * @param <B> Type of {@link Buffer} being updated * @param buffer The target buffer instance * @param version The output encoding version * @param attributes The {@link Map} of attributes * @return The updated buffer * @see #writeAttrsV3(Buffer, int, Map) * @see #writeAttrsV4(Buffer, int, Map) */ public static <B extends Buffer> B writeAttrs(B buffer, int version, Map<String, ?> attributes) { if (version == SftpConstants.SFTP_V3) { return writeAttrsV3(buffer, version, attributes); } else if (version >= SftpConstants.SFTP_V4) { return writeAttrsV4(buffer, version, attributes); } else { throw new IllegalStateException("Unsupported SFTP version: " + version); } } /** * Writes the retrieved file / directory attributes in V3 format * * @param <B> Type of {@link Buffer} being updated * @param buffer The target buffer instance * @param version The actual version - must be {@link SftpConstants#SFTP_V3} * @param attributes The {@link Map} of attributes * @return The updated buffer */ public static <B extends Buffer> B writeAttrsV3(B buffer, int version, Map<String, ?> attributes) { ValidateUtils.checkTrue(version == SftpConstants.SFTP_V3, "Illegal version: %d", version); boolean isReg = getBool((Boolean) attributes.get(IoUtils.REGFILE_VIEW_ATTR)); boolean isDir = getBool((Boolean) attributes.get(IoUtils.DIRECTORY_VIEW_ATTR)); boolean isLnk = getBool((Boolean) attributes.get(IoUtils.SYMLINK_VIEW_ATTR)); @SuppressWarnings("unchecked") Collection<PosixFilePermission> perms = (Collection<PosixFilePermission>) attributes.get(IoUtils.PERMISSIONS_VIEW_ATTR); Number size = (Number) attributes.get(IoUtils.SIZE_VIEW_ATTR); FileTime lastModifiedTime = (FileTime) attributes.get(IoUtils.LASTMOD_TIME_VIEW_ATTR); FileTime lastAccessTime = (FileTime) attributes.get(IoUtils.LASTACC_TIME_VIEW_ATTR); Map<?, ?> extensions = (Map<?, ?>) attributes.get(IoUtils.EXTENDED_VIEW_ATTR); int flags = ((isReg || isLnk) && (size != null) ? SftpConstants.SSH_FILEXFER_ATTR_SIZE : 0) | (attributes.containsKey(IoUtils.USERID_VIEW_ATTR) && attributes.containsKey(IoUtils.GROUPID_VIEW_ATTR) ? SftpConstants.SSH_FILEXFER_ATTR_UIDGID : 0) | ((perms != null) ? SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS : 0) | (((lastModifiedTime != null) && (lastAccessTime != null)) ? SftpConstants.SSH_FILEXFER_ATTR_ACMODTIME : 0) | ((extensions != null) ? SftpConstants.SSH_FILEXFER_ATTR_EXTENDED : 0); buffer.putInt(flags); if ((flags & SftpConstants.SSH_FILEXFER_ATTR_SIZE) != 0) { buffer.putLong(size.longValue()); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_UIDGID) != 0) { buffer.putInt(((Number) attributes.get(IoUtils.USERID_VIEW_ATTR)).intValue()); buffer.putInt(((Number) attributes.get(IoUtils.GROUPID_VIEW_ATTR)).intValue()); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS) != 0) { buffer.putInt(attributesToPermissions(isReg, isDir, isLnk, perms)); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_ACMODTIME) != 0) { buffer = writeTime(buffer, version, flags, lastAccessTime); buffer = writeTime(buffer, version, flags, lastModifiedTime); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_EXTENDED) != 0) { buffer = writeExtensions(buffer, extensions); } return buffer; } /** * Writes the retrieved file / directory attributes in V4+ format * * @param <B> Type of {@link Buffer} being updated * @param buffer The target buffer instance * @param version The actual version - must be at least {@link SftpConstants#SFTP_V4} * @param attributes The {@link Map} of attributes * @return The updated buffer */ public static <B extends Buffer> B writeAttrsV4(B buffer, int version, Map<String, ?> attributes) { ValidateUtils.checkTrue(version >= SftpConstants.SFTP_V4, "Illegal version: %d", version); boolean isReg = getBool((Boolean) attributes.get(IoUtils.REGFILE_VIEW_ATTR)); boolean isDir = getBool((Boolean) attributes.get(IoUtils.DIRECTORY_VIEW_ATTR)); boolean isLnk = getBool((Boolean) attributes.get(IoUtils.SYMLINK_VIEW_ATTR)); @SuppressWarnings("unchecked") Collection<PosixFilePermission> perms = (Collection<PosixFilePermission>) attributes.get(IoUtils.PERMISSIONS_VIEW_ATTR); Number size = (Number) attributes.get(IoUtils.SIZE_VIEW_ATTR); FileTime lastModifiedTime = (FileTime) attributes.get(IoUtils.LASTMOD_TIME_VIEW_ATTR); FileTime lastAccessTime = (FileTime) attributes.get(IoUtils.LASTACC_TIME_VIEW_ATTR); FileTime creationTime = (FileTime) attributes.get(IoUtils.CREATE_TIME_VIEW_ATTR); @SuppressWarnings("unchecked") Collection<AclEntry> acl = (Collection<AclEntry>) attributes.get(IoUtils.ACL_VIEW_ATTR); Map<?, ?> extensions = (Map<?, ?>) attributes.get(IoUtils.EXTENDED_VIEW_ATTR); int flags = (((isReg || isLnk) && (size != null)) ? SftpConstants.SSH_FILEXFER_ATTR_SIZE : 0) | ((attributes.containsKey(IoUtils.OWNER_VIEW_ATTR) && attributes.containsKey(IoUtils.GROUP_VIEW_ATTR)) ? SftpConstants.SSH_FILEXFER_ATTR_OWNERGROUP : 0) | ((perms != null) ? SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS : 0) | ((lastModifiedTime != null) ? SftpConstants.SSH_FILEXFER_ATTR_MODIFYTIME : 0) | ((creationTime != null) ? SftpConstants.SSH_FILEXFER_ATTR_CREATETIME : 0) | ((lastAccessTime != null) ? SftpConstants.SSH_FILEXFER_ATTR_ACCESSTIME : 0) | ((acl != null) ? SftpConstants.SSH_FILEXFER_ATTR_ACL : 0) | ((extensions != null) ? SftpConstants.SSH_FILEXFER_ATTR_EXTENDED : 0); buffer.putInt(flags); buffer.putByte((byte) (isReg ? SftpConstants.SSH_FILEXFER_TYPE_REGULAR : isDir ? SftpConstants.SSH_FILEXFER_TYPE_DIRECTORY : isLnk ? SftpConstants.SSH_FILEXFER_TYPE_SYMLINK : SftpConstants.SSH_FILEXFER_TYPE_UNKNOWN)); if ((flags & SftpConstants.SSH_FILEXFER_ATTR_SIZE) != 0) { buffer.putLong(size.longValue()); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_OWNERGROUP) != 0) { buffer.putString( Objects.toString(attributes.get(IoUtils.OWNER_VIEW_ATTR), SftpUniversalOwnerAndGroup.Owner.getName())); buffer.putString( Objects.toString(attributes.get(IoUtils.GROUP_VIEW_ATTR), SftpUniversalOwnerAndGroup.Group.getName())); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS) != 0) { buffer.putInt(attributesToPermissions(isReg, isDir, isLnk, perms)); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_ACCESSTIME) != 0) { buffer = writeTime(buffer, version, flags, lastAccessTime); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_CREATETIME) != 0) { buffer = writeTime(buffer, version, flags, lastAccessTime); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_MODIFYTIME) != 0) { buffer = writeTime(buffer, version, flags, lastModifiedTime); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_ACL) != 0) { buffer = writeACLs(buffer, version, acl); } // TODO: ctime // TODO: bits if ((flags & SftpConstants.SSH_FILEXFER_ATTR_EXTENDED) != 0) { buffer = writeExtensions(buffer, extensions); } return buffer; } public static <B extends Buffer> B writeAttributes(B buffer, Attributes attributes, int sftpVersion) { int flagsMask = 0; Collection<Attribute> flags = Objects.requireNonNull(attributes, "No attributes").getFlags(); if (sftpVersion == SftpConstants.SFTP_V3) { for (Attribute a : flags) { switch (a) { case Size: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_SIZE; break; case UidGid: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_UIDGID; break; case Perms: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS; break; case AccessTime: if (flags.contains(Attribute.ModifyTime)) { flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_ACMODTIME; } break; case ModifyTime: if (flags.contains(Attribute.AccessTime)) { flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_ACMODTIME; } break; case Extensions: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_EXTENDED; break; default: // do nothing } } buffer.putInt(flagsMask); if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_SIZE) != 0) { buffer.putLong(attributes.getSize()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_UIDGID) != 0) { buffer.putInt(attributes.getUserId()); buffer.putInt(attributes.getGroupId()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS) != 0) { buffer.putInt(attributes.getPermissions()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_ACMODTIME) != 0) { buffer = SftpHelper.writeTime(buffer, sftpVersion, flagsMask, attributes.getAccessTime()); buffer = SftpHelper.writeTime(buffer, sftpVersion, flagsMask, attributes.getModifyTime()); } } else if (sftpVersion >= SftpConstants.SFTP_V4) { for (Attribute a : flags) { switch (a) { case Size: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_SIZE; break; case OwnerGroup: { /* * According to * https://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/draft-ietf-secsh-filexfer-13.txt * section 7.5 * * If either the owner or group field is zero length, the field should be considered absent, and no * change should be made to that specific field during a modification operation. */ String owner = attributes.getOwner(); String group = attributes.getGroup(); if (GenericUtils.isNotEmpty(owner) && GenericUtils.isNotEmpty(group)) { flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_OWNERGROUP; } break; } case Perms: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS; break; case AccessTime: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_ACCESSTIME; break; case ModifyTime: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_MODIFYTIME; break; case CreateTime: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_CREATETIME; break; case Acl: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_ACL; break; case Extensions: flagsMask |= SftpConstants.SSH_FILEXFER_ATTR_EXTENDED; break; default: // do nothing } } buffer.putInt(flagsMask); buffer.putByte((byte) attributes.getType()); if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_SIZE) != 0) { buffer.putLong(attributes.getSize()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_OWNERGROUP) != 0) { String owner = attributes.getOwner(); buffer.putString(owner); String group = attributes.getGroup(); buffer.putString(group); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS) != 0) { buffer.putInt(attributes.getPermissions()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_ACCESSTIME) != 0) { buffer = SftpHelper.writeTime(buffer, sftpVersion, flagsMask, attributes.getAccessTime()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_CREATETIME) != 0) { buffer = SftpHelper.writeTime(buffer, sftpVersion, flagsMask, attributes.getCreateTime()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_MODIFYTIME) != 0) { buffer = SftpHelper.writeTime(buffer, sftpVersion, flagsMask, attributes.getModifyTime()); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_ACL) != 0) { buffer = SftpHelper.writeACLs(buffer, sftpVersion, attributes.getAcl()); } // TODO: for v5 ? 6? add CTIME (see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#page-16 - v6) } else { throw new UnsupportedOperationException("writeAttributes(" + attributes + ") unsupported version: " + sftpVersion); } if ((flagsMask & SftpConstants.SSH_FILEXFER_ATTR_EXTENDED) != 0) { buffer = SftpHelper.writeExtensions(buffer, attributes.getExtensions()); } return buffer; } /** * @param bool The {@link Boolean} value * @return {@code true} it the argument is non-{@code null} and its {@link Boolean#booleanValue()} is * {@code true} */ public static boolean getBool(Boolean bool) { return bool != null && bool; } /** * Converts a file / folder's attributes into a mask * * @param isReg {@code true} if this is a normal file * @param isDir {@code true} if this is a directory * @param isLnk {@code true} if this is a symbolic link * @param perms The file / folder's access {@link PosixFilePermission}s * @return A mask encoding the file / folder's attributes */ public static int attributesToPermissions( boolean isReg, boolean isDir, boolean isLnk, Collection<PosixFilePermission> perms) { int pf = 0; if (perms != null) { for (PosixFilePermission p : perms) { switch (p) { case OWNER_READ: pf |= SftpConstants.S_IRUSR; break; case OWNER_WRITE: pf |= SftpConstants.S_IWUSR; break; case OWNER_EXECUTE: pf |= SftpConstants.S_IXUSR; break; case GROUP_READ: pf |= SftpConstants.S_IRGRP; break; case GROUP_WRITE: pf |= SftpConstants.S_IWGRP; break; case GROUP_EXECUTE: pf |= SftpConstants.S_IXGRP; break; case OTHERS_READ: pf |= SftpConstants.S_IROTH; break; case OTHERS_WRITE: pf |= SftpConstants.S_IWOTH; break; case OTHERS_EXECUTE: pf |= SftpConstants.S_IXOTH; break; default: // ignored } } } pf |= isReg ? SftpConstants.S_IFREG : 0; pf |= isDir ? SftpConstants.S_IFDIR : 0; pf |= isLnk ? SftpConstants.S_IFLNK : 0; return pf; } /** * Converts a POSIX permissions mask to a file type value * * @param perms The POSIX permissions mask * @return The file type - see {@code SSH_FILEXFER_TYPE_xxx} values */ public static int permissionsToFileType(int perms) { if ((SftpConstants.S_IFLNK & perms) == SftpConstants.S_IFLNK) { return SftpConstants.SSH_FILEXFER_TYPE_SYMLINK; } else if ((SftpConstants.S_IFREG & perms) == SftpConstants.S_IFREG) { return SftpConstants.SSH_FILEXFER_TYPE_REGULAR; } else if ((SftpConstants.S_IFDIR & perms) == SftpConstants.S_IFDIR) { return SftpConstants.SSH_FILEXFER_TYPE_DIRECTORY; } else if ((SftpConstants.S_IFSOCK & perms) == SftpConstants.S_IFSOCK) { return SftpConstants.SSH_FILEXFER_TYPE_SOCKET; } else if ((SftpConstants.S_IFBLK & perms) == SftpConstants.S_IFBLK) { return SftpConstants.SSH_FILEXFER_TYPE_BLOCK_DEVICE; } else if ((SftpConstants.S_IFCHR & perms) == SftpConstants.S_IFCHR) { return SftpConstants.SSH_FILEXFER_TYPE_CHAR_DEVICE; } else if ((SftpConstants.S_IFIFO & perms) == SftpConstants.S_IFIFO) { return SftpConstants.SSH_FILEXFER_TYPE_FIFO; } else { return SftpConstants.SSH_FILEXFER_TYPE_UNKNOWN; } } /** * Converts a file type into a POSIX permission mask value * * @param type File type - see {@code SSH_FILEXFER_TYPE_xxx} values * @return The matching POSIX permission mask value */ public static int fileTypeToPermission(int type) { switch (type) { case SftpConstants.SSH_FILEXFER_TYPE_REGULAR: return SftpConstants.S_IFREG; case SftpConstants.SSH_FILEXFER_TYPE_DIRECTORY: return SftpConstants.S_IFDIR; case SftpConstants.SSH_FILEXFER_TYPE_SYMLINK: return SftpConstants.S_IFLNK; case SftpConstants.SSH_FILEXFER_TYPE_SOCKET: return SftpConstants.S_IFSOCK; case SftpConstants.SSH_FILEXFER_TYPE_BLOCK_DEVICE: return SftpConstants.S_IFBLK; case SftpConstants.SSH_FILEXFER_TYPE_CHAR_DEVICE: return SftpConstants.S_IFCHR; case SftpConstants.SSH_FILEXFER_TYPE_FIFO: return SftpConstants.S_IFIFO; default: return 0; } } /** * Translates a mask of permissions into its enumeration values equivalents * * @param perms The permissions mask * @return A {@link Set} of the equivalent {@link PosixFilePermission}s */ public static Set<PosixFilePermission> permissionsToAttributes(int perms) { Set<PosixFilePermission> p = EnumSet.noneOf(PosixFilePermission.class); if ((perms & SftpConstants.S_IRUSR) != 0) { p.add(PosixFilePermission.OWNER_READ); } if ((perms & SftpConstants.S_IWUSR) != 0) { p.add(PosixFilePermission.OWNER_WRITE); } if ((perms & SftpConstants.S_IXUSR) != 0) { p.add(PosixFilePermission.OWNER_EXECUTE); } if ((perms & SftpConstants.S_IRGRP) != 0) { p.add(PosixFilePermission.GROUP_READ); } if ((perms & SftpConstants.S_IWGRP) != 0) { p.add(PosixFilePermission.GROUP_WRITE); } if ((perms & SftpConstants.S_IXGRP) != 0) { p.add(PosixFilePermission.GROUP_EXECUTE); } if ((perms & SftpConstants.S_IROTH) != 0) { p.add(PosixFilePermission.OTHERS_READ); } if ((perms & SftpConstants.S_IWOTH) != 0) { p.add(PosixFilePermission.OTHERS_WRITE); } if ((perms & SftpConstants.S_IXOTH) != 0) { p.add(PosixFilePermission.OTHERS_EXECUTE); } return p; } /** * Returns the most adequate sub-status for the provided exception * * @param t The thrown {@link Throwable} * @return The matching sub-status */ @SuppressWarnings("checkstyle:ReturnCount") public static int resolveSubstatus(Throwable t) { if ((t instanceof NoSuchFileException) || (t instanceof FileNotFoundException)) { return SftpConstants.SSH_FX_NO_SUCH_FILE; } else if (t instanceof InvalidHandleException) { return SftpConstants.SSH_FX_INVALID_HANDLE; } else if (t instanceof FileAlreadyExistsException) { return SftpConstants.SSH_FX_FILE_ALREADY_EXISTS; } else if (t instanceof DirectoryNotEmptyException) { return SftpConstants.SSH_FX_DIR_NOT_EMPTY; } else if (t instanceof NotDirectoryException) { return SftpConstants.SSH_FX_NOT_A_DIRECTORY; } else if (t instanceof AccessDeniedException) { return SftpConstants.SSH_FX_PERMISSION_DENIED; } else if (t instanceof EOFException) { return SftpConstants.SSH_FX_EOF; } else if (t instanceof OverlappingFileLockException) { return SftpConstants.SSH_FX_LOCK_CONFLICT; } else if ((t instanceof UnsupportedOperationException) || (t instanceof UnknownServiceException)) { return SftpConstants.SSH_FX_OP_UNSUPPORTED; } else if (t instanceof InvalidPathException) { return SftpConstants.SSH_FX_INVALID_FILENAME; } else if (t instanceof IllegalArgumentException) { return SftpConstants.SSH_FX_INVALID_PARAMETER; } else if (t instanceof UserPrincipalNotFoundException) { return SftpConstants.SSH_FX_UNKNOWN_PRINCIPAL; } else if (t instanceof FileSystemLoopException) { return SftpConstants.SSH_FX_LINK_LOOP; } else if (t instanceof SftpException) { return ((SftpException) t).getStatus(); } else { return SftpConstants.SSH_FX_FAILURE; } } public static String resolveStatusMessage(int subStatus) { String message = DEFAULT_SUBSTATUS_MESSAGE.get(subStatus); return GenericUtils.isEmpty(message) ? ("Unknown error: " + subStatus) : message; } public static NavigableMap<String, Object> readAttrs(Buffer buffer, int version) { NavigableMap<String, Object> attrs = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); int flags = buffer.getInt(); if (version >= SftpConstants.SFTP_V4) { int type = buffer.getUByte(); switch (type) { case SftpConstants.SSH_FILEXFER_TYPE_REGULAR: attrs.put(IoUtils.REGFILE_VIEW_ATTR, Boolean.TRUE); break; case SftpConstants.SSH_FILEXFER_TYPE_DIRECTORY: attrs.put(IoUtils.DIRECTORY_VIEW_ATTR, Boolean.TRUE); break; case SftpConstants.SSH_FILEXFER_TYPE_SYMLINK: attrs.put(IoUtils.SYMLINK_VIEW_ATTR, Boolean.TRUE); break; case SftpConstants.SSH_FILEXFER_TYPE_SOCKET: case SftpConstants.SSH_FILEXFER_TYPE_CHAR_DEVICE: case SftpConstants.SSH_FILEXFER_TYPE_BLOCK_DEVICE: case SftpConstants.SSH_FILEXFER_TYPE_FIFO: attrs.put(IoUtils.OTHERFILE_VIEW_ATTR, Boolean.TRUE); break; default: // ignored } } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_SIZE) != 0) { attrs.put(IoUtils.SIZE_VIEW_ATTR, buffer.getLong()); } if (version == SftpConstants.SFTP_V3) { if ((flags & SftpConstants.SSH_FILEXFER_ATTR_UIDGID) != 0) { attrs.put(IoUtils.USERID_VIEW_ATTR, buffer.getInt()); attrs.put(IoUtils.GROUPID_VIEW_ATTR, buffer.getInt()); } } else { if ((version >= SftpConstants.SFTP_V6) && ((flags & SftpConstants.SSH_FILEXFER_ATTR_ALLOCATION_SIZE) != 0)) { @SuppressWarnings("unused") long allocSize = buffer.getLong(); // TODO handle allocation size } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_OWNERGROUP) != 0) { attrs.put(IoUtils.OWNER_VIEW_ATTR, new DefaultGroupPrincipal(buffer.getString())); attrs.put(IoUtils.GROUP_VIEW_ATTR, new DefaultGroupPrincipal(buffer.getString())); } } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_PERMISSIONS) != 0) { attrs.put(IoUtils.PERMISSIONS_VIEW_ATTR, permissionsToAttributes(buffer.getInt())); } if (version == SftpConstants.SFTP_V3) { if ((flags & SftpConstants.SSH_FILEXFER_ATTR_ACMODTIME) != 0) { attrs.put(IoUtils.LASTACC_TIME_VIEW_ATTR, readTime(buffer, version, flags)); attrs.put(IoUtils.LASTMOD_TIME_VIEW_ATTR, readTime(buffer, version, flags)); } } else if (version >= SftpConstants.SFTP_V4) { if ((flags & SftpConstants.SSH_FILEXFER_ATTR_ACCESSTIME) != 0) { attrs.put(IoUtils.LASTACC_TIME_VIEW_ATTR, readTime(buffer, version, flags)); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_CREATETIME) != 0) { attrs.put(IoUtils.CREATE_TIME_VIEW_ATTR, readTime(buffer, version, flags)); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_MODIFYTIME) != 0) { attrs.put(IoUtils.LASTMOD_TIME_VIEW_ATTR, readTime(buffer, version, flags)); } // modification time sub-seconds if ((version >= SftpConstants.SFTP_V6) && (flags & SftpConstants.SSH_FILEXFER_ATTR_CTIME) != 0) { attrs.put("ctime", readTime(buffer, version, flags)); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_ACL) != 0) { attrs.put(IoUtils.ACL_VIEW_ATTR, readACLs(buffer, version)); } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_BITS) != 0) { @SuppressWarnings("unused") int bits = buffer.getInt(); @SuppressWarnings("unused") int valid = 0xffffffff; if (version >= SftpConstants.SFTP_V6) { valid = buffer.getInt(); } // TODO: handle attrib bits } if (version >= SftpConstants.SFTP_V6) { if ((flags & SftpConstants.SSH_FILEXFER_ATTR_TEXT_HINT) != 0) { @SuppressWarnings("unused") boolean text = buffer.getBoolean(); // TODO: handle text } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_MIME_TYPE) != 0) { @SuppressWarnings("unused") String mimeType = buffer.getString(); // TODO: handle mime-type } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_LINK_COUNT) != 0) { @SuppressWarnings("unused") int nlink = buffer.getInt(); // TODO: handle link-count } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_UNTRANSLATED_NAME) != 0) { @SuppressWarnings("unused") String untranslated = buffer.getString(); // TODO: handle untranslated-name } } } if ((flags & SftpConstants.SSH_FILEXFER_ATTR_EXTENDED) != 0) { attrs.put(IoUtils.EXTENDED_VIEW_ATTR, readExtensions(buffer)); } return attrs; } public static NavigableMap<String, byte[]> readExtensions(Buffer buffer) { int count = buffer.getInt(); // Protect against malicious or malformed packets if ((count < 0) || (count > SshConstants.SSH_REQUIRED_PAYLOAD_PACKET_LENGTH_SUPPORT)) { throw new IndexOutOfBoundsException("Illogical extensions count: " + count); } // NOTE NavigableMap<String, byte[]> extended = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); for (int i = 1; i <= count; i++) { String key = buffer.getString(); byte[] val = buffer.getBytes(); byte[] prev = extended.put(key, val); ValidateUtils.checkTrue(prev == null, "Duplicate values for extended key=%s", key); } return extended; } public static <B extends Buffer> B writeExtensions(B buffer, Map<?, ?> extensions) { int numExtensions = MapEntryUtils.size(extensions); buffer.putInt(numExtensions); if (numExtensions <= 0) { return buffer; } for (Map.Entry<?, ?> ee : extensions.entrySet()) { Object key = Objects.requireNonNull(ee.getKey(), "No extension type"); Object value = Objects.requireNonNull(ee.getValue(), "No extension value"); buffer.putString(key.toString()); if (value instanceof byte[]) { buffer.putBytes((byte[]) value); } else { buffer.putString(value.toString()); } } return buffer; } public static NavigableMap<String, String> toStringExtensions(Map<String, ?> extensions) { if (MapEntryUtils.isEmpty(extensions)) { return Collections.emptyNavigableMap(); } // NOTE: even though extensions are probably case sensitive we do not allow duplicate name that differs only in // case NavigableMap<String, String> map = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); for (Map.Entry<?, ?> ee : extensions.entrySet()) { Object key = Objects.requireNonNull(ee.getKey(), "No extension type"); Object value = ValidateUtils.checkNotNull(ee.getValue(), "No value for extension=%s", key); String prev = map.put(key.toString(), (value instanceof byte[]) ? new String((byte[]) value, StandardCharsets.UTF_8) : value.toString()); ValidateUtils.checkTrue(prev == null, "Multiple values for extension=%s", key); } return map; } public static NavigableMap<String, byte[]> toBinaryExtensions(Map<String, String> extensions) { if (MapEntryUtils.isEmpty(extensions)) { return Collections.emptyNavigableMap(); } // NOTE: even though extensions are probably case sensitive we do not allow duplicate name that differs only in // case NavigableMap<String, byte[]> map = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); extensions.forEach((key, value) -> { ValidateUtils.checkNotNull(value, "No value for extension=%s", key); byte[] prev = map.put(key, value.getBytes(StandardCharsets.UTF_8)); ValidateUtils.checkTrue(prev == null, "Multiple values for extension=%s", key); }); return map; } // for v4,5 see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-05#page-15 // for v6 see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#page-21 public static List<AclEntry> readACLs(Buffer buffer, int version) { int aclSize = buffer.getInt(); // Protect against malicious or malformed packets if ((aclSize < 0) || (aclSize > (2 * SshConstants.SSH_REQUIRED_PAYLOAD_PACKET_LENGTH_SUPPORT))) { throw new IndexOutOfBoundsException("Illogical ACL entries size: " + aclSize); } int startPos = buffer.rpos(); Buffer aclBuffer = new ByteArrayBuffer(buffer.array(), startPos, aclSize, true); List<AclEntry> acl = decodeACLs(aclBuffer, version); buffer.rpos(startPos + aclSize); return acl; } public static List<AclEntry> decodeACLs(Buffer buffer, int version) { @SuppressWarnings("unused") int aclFlags = 0; // TODO handle ACL flags if (version >= SftpConstants.SFTP_V6) { aclFlags = buffer.getInt(); } int count = buffer.getInt(); /* * NOTE: although the value is defined as UINT32 we do not expected a count greater than several hundreds + * protect against malicious or corrupted packets */ if ((count < 0) || (count > SshConstants.SSH_REQUIRED_PAYLOAD_PACKET_LENGTH_SUPPORT)) { throw new IndexOutOfBoundsException("Illogical ACL entries count: " + count); } ValidateUtils.checkTrue(count >= 0, "Invalid ACL entries count: %d", count); if (count == 0) { return Collections.emptyList(); } List<AclEntry> acls = new ArrayList<>(count); for (int i = 1; i <= count; i++) { int aclType = buffer.getInt(); int aclFlag = buffer.getInt(); int aclMask = buffer.getInt(); String aclWho = buffer.getString(); acls.add(buildAclEntry(aclType, aclFlag, aclMask, aclWho)); } return acls; } public static AclEntry buildAclEntry(int aclType, int aclFlag, int aclMask, String aclWho) { UserPrincipal who = new DefaultGroupPrincipal(aclWho); return AclEntry.newBuilder() .setType(ValidateUtils.checkNotNull(decodeAclEntryType(aclType), "Unknown ACL type: %d", aclType)) .setFlags(decodeAclFlags(aclFlag)) .setPermissions(decodeAclMask(aclMask)) .setPrincipal(who) .build(); } /** * @param aclType The {@code ACE4_ACCESS_xxx_ACE_TYPE} value * @return The matching {@link AclEntryType} or {@code null} if unknown value */ public static AclEntryType decodeAclEntryType(int aclType) { switch (aclType) { case SftpConstants.ACE4_ACCESS_ALLOWED_ACE_TYPE: return AclEntryType.ALLOW; case SftpConstants.ACE4_ACCESS_DENIED_ACE_TYPE: return AclEntryType.DENY; case SftpConstants.ACE4_SYSTEM_AUDIT_ACE_TYPE: return AclEntryType.AUDIT; case SftpConstants.ACE4_SYSTEM_ALARM_ACE_TYPE: return AclEntryType.ALARM; default: return null; } } public static Set<AclEntryFlag> decodeAclFlags(int aclFlag) { Set<AclEntryFlag> flags = EnumSet.noneOf(AclEntryFlag.class); if ((aclFlag & SftpConstants.ACE4_FILE_INHERIT_ACE) != 0) { flags.add(AclEntryFlag.FILE_INHERIT); } if ((aclFlag & SftpConstants.ACE4_DIRECTORY_INHERIT_ACE) != 0) { flags.add(AclEntryFlag.DIRECTORY_INHERIT); } if ((aclFlag & SftpConstants.ACE4_NO_PROPAGATE_INHERIT_ACE) != 0) { flags.add(AclEntryFlag.NO_PROPAGATE_INHERIT); } if ((aclFlag & SftpConstants.ACE4_INHERIT_ONLY_ACE) != 0) { flags.add(AclEntryFlag.INHERIT_ONLY); } return flags; } public static Set<AclEntryPermission> decodeAclMask(int aclMask) { Set<AclEntryPermission> mask = EnumSet.noneOf(AclEntryPermission.class); if ((aclMask & SftpConstants.ACE4_READ_DATA) != 0) { mask.add(AclEntryPermission.READ_DATA); } if ((aclMask & SftpConstants.ACE4_LIST_DIRECTORY) != 0) { mask.add(AclEntryPermission.LIST_DIRECTORY); } if ((aclMask & SftpConstants.ACE4_WRITE_DATA) != 0) { mask.add(AclEntryPermission.WRITE_DATA); } if ((aclMask & SftpConstants.ACE4_ADD_FILE) != 0) { mask.add(AclEntryPermission.ADD_FILE); } if ((aclMask & SftpConstants.ACE4_APPEND_DATA) != 0) { mask.add(AclEntryPermission.APPEND_DATA); } if ((aclMask & SftpConstants.ACE4_ADD_SUBDIRECTORY) != 0) { mask.add(AclEntryPermission.ADD_SUBDIRECTORY); } if ((aclMask & SftpConstants.ACE4_READ_NAMED_ATTRS) != 0) { mask.add(AclEntryPermission.READ_NAMED_ATTRS); } if ((aclMask & SftpConstants.ACE4_WRITE_NAMED_ATTRS) != 0) { mask.add(AclEntryPermission.WRITE_NAMED_ATTRS); } if ((aclMask & SftpConstants.ACE4_EXECUTE) != 0) { mask.add(AclEntryPermission.EXECUTE); } if ((aclMask & SftpConstants.ACE4_DELETE_CHILD) != 0) { mask.add(AclEntryPermission.DELETE_CHILD); } if ((aclMask & SftpConstants.ACE4_READ_ATTRIBUTES) != 0) { mask.add(AclEntryPermission.READ_ATTRIBUTES); } if ((aclMask & SftpConstants.ACE4_WRITE_ATTRIBUTES) != 0) { mask.add(AclEntryPermission.WRITE_ATTRIBUTES); } if ((aclMask & SftpConstants.ACE4_DELETE) != 0) { mask.add(AclEntryPermission.DELETE); } if ((aclMask & SftpConstants.ACE4_READ_ACL) != 0) { mask.add(AclEntryPermission.READ_ACL); } if ((aclMask & SftpConstants.ACE4_WRITE_ACL) != 0) { mask.add(AclEntryPermission.WRITE_ACL); } if ((aclMask & SftpConstants.ACE4_WRITE_OWNER) != 0) { mask.add(AclEntryPermission.WRITE_OWNER); } if ((aclMask & SftpConstants.ACE4_SYNCHRONIZE) != 0) { mask.add(AclEntryPermission.SYNCHRONIZE); } return mask; } public static <B extends Buffer> B writeACLs(B buffer, int version, Collection<? extends AclEntry> acl) { int lenPos = buffer.wpos(); buffer.putInt(0); // length placeholder buffer = encodeACLs(buffer, version, acl); BufferUtils.updateLengthPlaceholder(buffer, lenPos); return buffer; } public static <B extends Buffer> B encodeACLs(B buffer, int version, Collection<? extends AclEntry> acl) { Objects.requireNonNull(acl, "No ACL"); if (version >= SftpConstants.SFTP_V6) { buffer.putInt(0); // TODO handle ACL flags } int numEntries = GenericUtils.size(acl); buffer.putInt(numEntries); if (numEntries > 0) { for (AclEntry e : acl) { buffer = writeAclEntry(buffer, e); } } return buffer; } public static <B extends Buffer> B writeAclEntry(B buffer, AclEntry acl) { Objects.requireNonNull(acl, "No ACL"); AclEntryType type = acl.type(); int aclType = encodeAclEntryType(type); ValidateUtils.checkTrue(aclType >= 0, "Unknown ACL type: %s", type); buffer.putInt(aclType); buffer.putInt(encodeAclFlags(acl.flags())); buffer.putInt(encodeAclMask(acl.permissions())); Principal user = acl.principal(); buffer.putString(user.getName()); return buffer; } /** * Returns the equivalent SFTP value for the ACL type * * @param type The {@link AclEntryType} * @return The equivalent {@code ACE_SYSTEM_xxx_TYPE} or negative if {@code null} or unknown type */ public static int encodeAclEntryType(AclEntryType type) { if (type == null) { return Integer.MIN_VALUE; } switch (type) { case ALARM: return SftpConstants.ACE4_SYSTEM_ALARM_ACE_TYPE; case ALLOW: return SftpConstants.ACE4_ACCESS_ALLOWED_ACE_TYPE; case AUDIT: return SftpConstants.ACE4_SYSTEM_AUDIT_ACE_TYPE; case DENY: return SftpConstants.ACE4_ACCESS_DENIED_ACE_TYPE; default: return -1; } } public static long encodeAclFlags(Collection<AclEntryFlag> flags) { if (GenericUtils.isEmpty(flags)) { return 0L; } long aclFlag = 0L; if (flags.contains(AclEntryFlag.FILE_INHERIT)) { aclFlag |= SftpConstants.ACE4_FILE_INHERIT_ACE; } if (flags.contains(AclEntryFlag.DIRECTORY_INHERIT)) { aclFlag |= SftpConstants.ACE4_DIRECTORY_INHERIT_ACE; } if (flags.contains(AclEntryFlag.NO_PROPAGATE_INHERIT)) { aclFlag |= SftpConstants.ACE4_NO_PROPAGATE_INHERIT_ACE; } if (flags.contains(AclEntryFlag.INHERIT_ONLY)) { aclFlag |= SftpConstants.ACE4_INHERIT_ONLY_ACE; } return aclFlag; } public static long encodeAclMask(Collection<AclEntryPermission> mask) { if (GenericUtils.isEmpty(mask)) { return 0L; } long aclMask = 0L; if (mask.contains(AclEntryPermission.READ_DATA)) { aclMask |= SftpConstants.ACE4_READ_DATA; } if (mask.contains(AclEntryPermission.LIST_DIRECTORY)) { aclMask |= SftpConstants.ACE4_LIST_DIRECTORY; } if (mask.contains(AclEntryPermission.WRITE_DATA)) { aclMask |= SftpConstants.ACE4_WRITE_DATA; } if (mask.contains(AclEntryPermission.ADD_FILE)) { aclMask |= SftpConstants.ACE4_ADD_FILE; } if (mask.contains(AclEntryPermission.APPEND_DATA)) { aclMask |= SftpConstants.ACE4_APPEND_DATA; } if (mask.contains(AclEntryPermission.ADD_SUBDIRECTORY)) { aclMask |= SftpConstants.ACE4_ADD_SUBDIRECTORY; } if (mask.contains(AclEntryPermission.READ_NAMED_ATTRS)) { aclMask |= SftpConstants.ACE4_READ_NAMED_ATTRS; } if (mask.contains(AclEntryPermission.WRITE_NAMED_ATTRS)) { aclMask |= SftpConstants.ACE4_WRITE_NAMED_ATTRS; } if (mask.contains(AclEntryPermission.EXECUTE)) { aclMask |= SftpConstants.ACE4_EXECUTE; } if (mask.contains(AclEntryPermission.DELETE_CHILD)) { aclMask |= SftpConstants.ACE4_DELETE_CHILD; } if (mask.contains(AclEntryPermission.READ_ATTRIBUTES)) { aclMask |= SftpConstants.ACE4_READ_ATTRIBUTES; } if (mask.contains(AclEntryPermission.WRITE_ATTRIBUTES)) { aclMask |= SftpConstants.ACE4_WRITE_ATTRIBUTES; } if (mask.contains(AclEntryPermission.DELETE)) { aclMask |= SftpConstants.ACE4_DELETE; } if (mask.contains(AclEntryPermission.READ_ACL)) { aclMask |= SftpConstants.ACE4_READ_ACL; } if (mask.contains(AclEntryPermission.WRITE_ACL)) { aclMask |= SftpConstants.ACE4_WRITE_ACL; } if (mask.contains(AclEntryPermission.WRITE_OWNER)) { aclMask |= SftpConstants.ACE4_WRITE_OWNER; } if (mask.contains(AclEntryPermission.SYNCHRONIZE)) { aclMask |= SftpConstants.ACE4_SYNCHRONIZE; } return aclMask; } /** * Encodes a {@link FileTime} value into a buffer * * @param <B> Type of {@link Buffer} being updated * @param buffer The target buffer instance * @param version The encoding version * @param flags The encoding flags * @param time The value to encode * @return The updated buffer */ public static <B extends Buffer> B writeTime(B buffer, int version, int flags, FileTime time) { // for v3 see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-02#page-8 // for v6 see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#page-16 if (version >= SftpConstants.SFTP_V4) { buffer.putLong(time.to(TimeUnit.SECONDS)); if ((flags & SftpConstants.SSH_FILEXFER_ATTR_SUBSECOND_TIMES) != 0) { long nanos = time.to(TimeUnit.NANOSECONDS); nanos = nanos % TimeUnit.SECONDS.toNanos(1); buffer.putInt((int) nanos); } } else { buffer.putInt(time.to(TimeUnit.SECONDS)); } return buffer; } /** * Decodes a {@link FileTime} value from a buffer * * @param buffer The source {@link Buffer} * @param version The encoding version * @param flags The encoding flags * @return The decoded value */ public static FileTime readTime(Buffer buffer, int version, int flags) { // for v3 see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-02#page-8 // for v6 see https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#page-16 long secs = (version >= SftpConstants.SFTP_V4) ? buffer.getLong() : buffer.getUInt(); long millis = TimeUnit.SECONDS.toMillis(secs); if ((version >= SftpConstants.SFTP_V4) && ((flags & SftpConstants.SSH_FILEXFER_ATTR_SUBSECOND_TIMES) != 0)) { long nanoseconds = buffer.getUInt(); millis += TimeUnit.NANOSECONDS.toMillis(nanoseconds); } return FileTime.from(millis, TimeUnit.MILLISECONDS); } /** * Creates an &quot;ls -l&quot; compatible long name string * * @param shortName The short file name - can also be &quot;.&quot; or &quot;..&quot; * @param attributes The file's attributes - e.g., size, owner, permissions, etc. * @return A {@link String} representing the &quot;long&quot; file name as per * <A HREF="https://tools.ietf.org/html/draft-ietf-secsh-filexfer-02">SFTP version 3 - section * 7</A> */ public static String getLongName(String shortName, Map<String, ?> attributes) { String owner = Objects.toString(attributes.get(IoUtils.OWNER_VIEW_ATTR), null); String username = OsUtils.getCanonicalUser(owner); if (GenericUtils.isEmpty(username)) { username = SftpUniversalOwnerAndGroup.Owner.getName(); } String group = Objects.toString(attributes.get(IoUtils.GROUP_VIEW_ATTR), null); group = OsUtils.resolveCanonicalGroup(group, owner); if (GenericUtils.isEmpty(group)) { group = SftpUniversalOwnerAndGroup.Group.getName(); } Number length = (Number) attributes.get(IoUtils.SIZE_VIEW_ATTR); if (length == null) { length = 0L; } String lengthString = String.format("%1$8s", length); String linkCount = Objects.toString(attributes.get(IoUtils.NUMLINKS_VIEW_ATTR), null); if (GenericUtils.isEmpty(linkCount)) { linkCount = "1"; } Boolean isDirectory = (Boolean) attributes.get(IoUtils.DIRECTORY_VIEW_ATTR); Boolean isLink = (Boolean) attributes.get(IoUtils.SYMLINK_VIEW_ATTR); @SuppressWarnings("unchecked") Set<PosixFilePermission> perms = (Set<PosixFilePermission>) attributes.get(IoUtils.PERMISSIONS_VIEW_ATTR); if (perms == null) { perms = EnumSet.noneOf(PosixFilePermission.class); } String permsString = PosixFilePermissions.toString(perms); String timeStamp = UnixDateFormat.getUnixDate((FileTime) attributes.get(IoUtils.LASTMOD_TIME_VIEW_ATTR)); StringBuilder sb = new StringBuilder( GenericUtils.length(linkCount) + GenericUtils.length(username) + GenericUtils.length(group) + GenericUtils.length(timeStamp) + GenericUtils.length(lengthString) + GenericUtils.length(permsString) + GenericUtils.length(shortName) + Integer.SIZE); sb.append(SftpHelper.getBool(isDirectory) ? 'd' : (SftpHelper.getBool(isLink) ? 'l' : '-')).append(permsString); sb.append(' '); for (int index = linkCount.length(); index < 3; index++) { sb.append(' '); } sb.append(linkCount); sb.append(' ').append(username); for (int index = username.length(); index < 8; index++) { sb.append(' '); } sb.append(' ').append(group); for (int index = group.length(); index < 8; index++) { sb.append(' '); } sb.append(' ').append(lengthString).append(' ').append(timeStamp).append(' ').append(shortName); return sb.toString(); } }
Fix creationTime in SftpHelper#writeAttrsV4 SSHD-1228
sshd-sftp/src/main/java/org/apache/sshd/sftp/common/SftpHelper.java
Fix creationTime in SftpHelper#writeAttrsV4
Java
apache-2.0
1c7bd2d1fddc6f5a0e34d125007e629c2c304857
0
takawitter/trie4j,takawitter/trie4j
/* * Copyright 2012 Takao Nakaguchi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.trie4j; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.trie4j.bv.BitVectorUtilTest; import org.trie4j.doublearray.DoubleArrayTest; import org.trie4j.doublearray.OptimizedTailDoubleArrayWithConcatTailBuilderTest; import org.trie4j.doublearray.OptimizedTailDoubleArrayWithSuffixTrieTailBuilderTest; import org.trie4j.doublearray.TailDoubleArrayWithConcatTailBuilderTest; import org.trie4j.doublearray.TailDoubleArrayWithSuffixTrieTailBuilderTest; import org.trie4j.louds.TailLOUDSPPTrieWithConcatTailBuilderTest; import org.trie4j.louds.TailLOUDSPPTrieWithSuffixTrieTailBuilderTest; import org.trie4j.louds.TailLOUDSTrieWithConcatTailBuilderTest; import org.trie4j.louds.TailLOUDSTrieWithSuffixTrieTailBuilderTest; import org.trie4j.louds.LOUDSTrieTest; import org.trie4j.patricia.simple.MapPatriciaTrieTest; import org.trie4j.patricia.simple.PatriciaTrieTest; import org.trie4j.patricia.tail.TailPatriciaTrieWithConcatTailBuilderTest; import org.trie4j.patricia.tail.TailPatriciaTrieWithSuffixTrieTailBuilderTest; import org.trie4j.tail.ConcatTailArrayTest; import org.trie4j.tail.builder.SuffixTrieTailBuilderTest; import org.trie4j.tail.index.ArrayTailIndexTest; import org.trie4j.tail.index.SBVTailIndexTest; import org.trie4j.util.FastBitSetTest; import org.trie4j.util.SuccinctBitVectorTest; @RunWith(Suite.class) @Suite.SuiteClasses({ FastBitSetTest.class, SuffixTrieTailBuilderTest.class, ArrayTailIndexTest.class, SBVTailIndexTest.class, ConcatTailArrayTest.class, SuccinctBitVectorTest.class, BitVectorUtilTest.class, AlgorithmsTest.class, PatriciaTrieTest.class, MapPatriciaTrieTest.class, TailPatriciaTrieWithConcatTailBuilderTest.class, TailPatriciaTrieWithSuffixTrieTailBuilderTest.class, DoubleArrayTest.class, TailDoubleArrayWithConcatTailBuilderTest.class, TailDoubleArrayWithSuffixTrieTailBuilderTest.class, OptimizedTailDoubleArrayWithConcatTailBuilderTest.class, OptimizedTailDoubleArrayWithSuffixTrieTailBuilderTest.class, LOUDSTrieTest.class, TailLOUDSTrieWithConcatTailBuilderTest.class, TailLOUDSTrieWithSuffixTrieTailBuilderTest.class, TailLOUDSPPTrieWithConcatTailBuilderTest.class, TailLOUDSPPTrieWithSuffixTrieTailBuilderTest.class, }) public class AllTests { }
trie4j/test/org/trie4j/AllTests.java
/* * Copyright 2012 Takao Nakaguchi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.trie4j; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.trie4j.bv.BitVectorUtilTest; import org.trie4j.doublearray.DoubleArrayTest; import org.trie4j.doublearray.OptimizedTailDoubleArrayWithConcatTailBuilderTest; import org.trie4j.doublearray.OptimizedTailDoubleArrayWithSuffixTrieTailBuilderTest; import org.trie4j.doublearray.TailDoubleArrayWithConcatTailBuilderTest; import org.trie4j.doublearray.TailDoubleArrayWithSuffixTrieTailBuilderTest; import org.trie4j.louds.TailLOUDSPPTrieWithConcatTailBuilderTest; import org.trie4j.louds.TailLOUDSPPTrieWithSuffixTrieTailBuilderTest; import org.trie4j.louds.TailLOUDSTrieWithConcatTailBuilderTest; import org.trie4j.louds.TailLOUDSTrieWithSuffixTrieTailBuilderTest; import org.trie4j.louds.LOUDSTrieTest; import org.trie4j.patricia.simple.MapPatriciaTrieTest; import org.trie4j.patricia.simple.PatriciaTrieTest; import org.trie4j.patricia.tail.TailPatriciaTrieWithConcatTailBuilderTest; import org.trie4j.patricia.tail.TailPatriciaTrieWithSuffixTrieTailBuilderTest; import org.trie4j.tail.ConcatTailArrayTest; import org.trie4j.tail.builder.SuffixTrieTailBuilderTest; import org.trie4j.tail.index.ArrayTailIndexTest; import org.trie4j.tail.index.SBVTailIndexTest; import org.trie4j.util.SuccinctBitVectorTest; @RunWith(Suite.class) @Suite.SuiteClasses({ SuffixTrieTailBuilderTest.class, ArrayTailIndexTest.class, SBVTailIndexTest.class, ConcatTailArrayTest.class, SuccinctBitVectorTest.class, BitVectorUtilTest.class, AlgorithmsTest.class, PatriciaTrieTest.class, MapPatriciaTrieTest.class, TailPatriciaTrieWithConcatTailBuilderTest.class, TailPatriciaTrieWithSuffixTrieTailBuilderTest.class, DoubleArrayTest.class, TailDoubleArrayWithConcatTailBuilderTest.class, TailDoubleArrayWithSuffixTrieTailBuilderTest.class, OptimizedTailDoubleArrayWithConcatTailBuilderTest.class, OptimizedTailDoubleArrayWithSuffixTrieTailBuilderTest.class, LOUDSTrieTest.class, TailLOUDSTrieWithConcatTailBuilderTest.class, TailLOUDSTrieWithSuffixTrieTailBuilderTest.class, TailLOUDSPPTrieWithConcatTailBuilderTest.class, TailLOUDSPPTrieWithSuffixTrieTailBuilderTest.class, }) public class AllTests { }
update test cases.
trie4j/test/org/trie4j/AllTests.java
update test cases.
Java
apache-2.0
0aa1b37e307d15aee0a5814861f5256669826c3f
0
thomasdarimont/keycloak,thomasdarimont/keycloak,mhajas/keycloak,mposolda/keycloak,vmuzikar/keycloak,didiez/keycloak,ppolavar/keycloak,ewjmulder/keycloak,agolPL/keycloak,ppolavar/keycloak,keycloak/keycloak,srose/keycloak,dbarentine/keycloak,reneploetz/keycloak,raehalme/keycloak,mhajas/keycloak,stianst/keycloak,didiez/keycloak,keycloak/keycloak,dbarentine/keycloak,vmuzikar/keycloak,chameleon82/keycloak,jpkrohling/keycloak,reneploetz/keycloak,srose/keycloak,agolPL/keycloak,AOEpeople/keycloak,abstractj/keycloak,brat000012001/keycloak,jpkrohling/keycloak,iperdomo/keycloak,vmuzikar/keycloak,cfsnyder/keycloak,manuel-palacio/keycloak,mposolda/keycloak,wildfly-security-incubator/keycloak,keycloak/keycloak,jpkrohling/keycloak,stianst/keycloak,darranl/keycloak,mhajas/keycloak,abstractj/keycloak,ssilvert/keycloak,mposolda/keycloak,ahus1/keycloak,girirajsharma/keycloak,girirajsharma/keycloak,raehalme/keycloak,wildfly-security-incubator/keycloak,raehalme/keycloak,stianst/keycloak,vmuzikar/keycloak,vmuzikar/keycloak,brat000012001/keycloak,pedroigor/keycloak,ssilvert/keycloak,hmlnarik/keycloak,hmlnarik/keycloak,iperdomo/keycloak,iperdomo/keycloak,cfsnyder/keycloak,stianst/keycloak,ahus1/keycloak,girirajsharma/keycloak,mhajas/keycloak,darranl/keycloak,pedroigor/keycloak,iperdomo/keycloak,agolPL/keycloak,almighty/keycloak,darranl/keycloak,jpkrohling/keycloak,cfsnyder/keycloak,mbaluch/keycloak,thomasdarimont/keycloak,ewjmulder/keycloak,keycloak/keycloak,manuel-palacio/keycloak,keycloak/keycloak,jpkrohling/keycloak,ahus1/keycloak,reneploetz/keycloak,raehalme/keycloak,brat000012001/keycloak,ahus1/keycloak,chameleon82/keycloak,srose/keycloak,stianst/keycloak,hmlnarik/keycloak,abstractj/keycloak,ahus1/keycloak,abstractj/keycloak,mhajas/keycloak,hmlnarik/keycloak,manuel-palacio/keycloak,almighty/keycloak,srose/keycloak,girirajsharma/keycloak,mbaluch/keycloak,mbaluch/keycloak,cfsnyder/keycloak,brat000012001/keycloak,wildfly-security-incubator/keycloak,AOEpeople/keycloak,pedroigor/keycloak,dbarentine/keycloak,mposolda/keycloak,hmlnarik/keycloak,raehalme/keycloak,reneploetz/keycloak,thomasdarimont/keycloak,thomasdarimont/keycloak,almighty/keycloak,didiez/keycloak,manuel-palacio/keycloak,ahus1/keycloak,vmuzikar/keycloak,ssilvert/keycloak,darranl/keycloak,hmlnarik/keycloak,agolPL/keycloak,chameleon82/keycloak,brat000012001/keycloak,ppolavar/keycloak,ssilvert/keycloak,AOEpeople/keycloak,mbaluch/keycloak,didiez/keycloak,mposolda/keycloak,ewjmulder/keycloak,thomasdarimont/keycloak,chameleon82/keycloak,abstractj/keycloak,pedroigor/keycloak,mposolda/keycloak,reneploetz/keycloak,AOEpeople/keycloak,dbarentine/keycloak,almighty/keycloak,srose/keycloak,pedroigor/keycloak,wildfly-security-incubator/keycloak,ssilvert/keycloak,pedroigor/keycloak,ewjmulder/keycloak,ppolavar/keycloak,raehalme/keycloak
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.testsuite.adapter.example; import org.apache.commons.io.FileUtils; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.graphene.page.Page; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.keycloak.admin.client.resource.ClientResource; import org.keycloak.representations.idm.ClientRepresentation; import org.keycloak.representations.idm.RealmRepresentation; import org.keycloak.representations.idm.UserRepresentation; import org.keycloak.testsuite.adapter.AbstractExampleAdapterTest; import org.keycloak.testsuite.adapter.page.CustomerPortalExample; import org.keycloak.testsuite.adapter.page.DatabaseServiceExample; import org.keycloak.testsuite.adapter.page.ProductPortalExample; import org.keycloak.testsuite.admin.ApiUtil; import org.keycloak.testsuite.auth.page.account.Account; import org.keycloak.testsuite.auth.page.account.Applications; import org.keycloak.testsuite.auth.page.login.OAuthGrant; import org.keycloak.testsuite.console.page.events.Config; import org.keycloak.testsuite.console.page.events.LoginEvents; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.keycloak.testsuite.auth.page.AuthRealm.DEMO; import static org.keycloak.testsuite.util.IOUtil.loadRealm; import static org.keycloak.testsuite.util.URLAssert.assertCurrentUrlStartsWith; public abstract class AbstractDemoExampleAdapterTest extends AbstractExampleAdapterTest { @Page private CustomerPortalExample customerPortalExamplePage; @Page private ProductPortalExample productPortalExamplePage; @Page private DatabaseServiceExample databaseServiceExamplePage; @Page private Account testRealmAccountPage; @Page private Config configPage; @Page private LoginEvents loginEventsPage; @Page private OAuthGrant oAuthGrantPage; @Page private Applications applicationsPage; @Deployment(name = CustomerPortalExample.DEPLOYMENT_NAME) private static WebArchive customerPortalExample() throws IOException { return exampleDeployment(CustomerPortalExample.DEPLOYMENT_NAME); } @Deployment(name = ProductPortalExample.DEPLOYMENT_NAME) private static WebArchive productPortalExample() throws IOException { return exampleDeployment(ProductPortalExample.DEPLOYMENT_NAME); } @Deployment(name = DatabaseServiceExample.DEPLOYMENT_NAME) private static WebArchive databaseServiceExample() throws IOException { return exampleDeployment("database-service"); } @Override public void addAdapterTestRealms(List<RealmRepresentation> testRealms) { testRealms.add( loadRealm(new File(EXAMPLES_HOME_DIR + "/preconfigured-demo/testrealm.json"))); } @Override public void setDefaultPageUriParameters() { super.setDefaultPageUriParameters(); testRealmPage.setAuthRealm(DEMO); testRealmLoginPage.setAuthRealm(DEMO); testRealmAccountPage.setAuthRealm(DEMO); configPage.setConsoleRealm(DEMO); loginEventsPage.setConsoleRealm(DEMO); applicationsPage.setAuthRealm(DEMO); } @Before public void beforeDemoExampleTest() { customerPortalExamplePage.navigateTo(); driver.manage().deleteAllCookies(); productPortalExamplePage.navigateTo(); driver.manage().deleteAllCookies(); } @Test public void customerPortalListingTest() { customerPortalExamplePage.navigateTo(); customerPortalExamplePage.customerListing(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); assertCurrentUrlStartsWith(customerPortalExamplePage); customerPortalExamplePage.waitForCustomerListingHeader(); Assert.assertTrue(driver.getPageSource().contains("Username: bburke@redhat.com")); Assert.assertTrue(driver.getPageSource().contains("Bill Burke")); Assert.assertTrue(driver.getPageSource().contains("Stian Thorgersen")); } @Test public void customerPortalSessionTest() { customerPortalExamplePage.navigateTo(); customerPortalExamplePage.customerSession(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); assertCurrentUrlStartsWith(customerPortalExamplePage); customerPortalExamplePage.waitForCustomerSessionHeader(); Assert.assertTrue(driver.getPageSource().contains("You visited this page")); } @Test public void productPortalListingTest() { productPortalExamplePage.navigateTo(); productPortalExamplePage.productListing(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); assertCurrentUrlStartsWith(productPortalExamplePage); productPortalExamplePage.waitForProductListingHeader(); Assert.assertTrue(driver.getPageSource().contains("iphone")); Assert.assertTrue(driver.getPageSource().contains("ipad")); Assert.assertTrue(driver.getPageSource().contains("ipod")); productPortalExamplePage.goToCustomers(); } @Test public void goToProductPortalWithOneLoginTest() { productPortalExamplePage.navigateTo(); productPortalExamplePage.productListing(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); assertCurrentUrlStartsWith(productPortalExamplePage); productPortalExamplePage.waitForProductListingHeader(); productPortalExamplePage.goToCustomers(); assertCurrentUrlStartsWith(customerPortalExamplePage); customerPortalExamplePage.customerListing(); customerPortalExamplePage.goToProducts(); assertCurrentUrlStartsWith(productPortalExamplePage); } @Test public void logoutFromAllAppsTest() { productPortalExamplePage.navigateTo(); productPortalExamplePage.productListing(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); assertCurrentUrlStartsWith(productPortalExamplePage); productPortalExamplePage.waitForProductListingHeader(); if (isRelative()) { //KEYCLOAK-1546 productPortalExamplePage.logOut(); } else { driver.navigate().to(testRealmPage.getOIDCLogoutUrl() + "?redirect_uri=" + productPortalExamplePage); } assertCurrentUrlStartsWith(productPortalExamplePage); productPortalExamplePage.productListing(); customerPortalExamplePage.navigateTo(); customerPortalExamplePage.customerListing(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); customerPortalExamplePage.logOut(); } @Test public void grantServerBasedApp() { ClientResource clientResource = ApiUtil.findClientResourceByClientId(testRealmResource(), "customer-portal"); ClientRepresentation client = clientResource.toRepresentation(); client.setConsentRequired(true); clientResource.update(client); RealmRepresentation realm = testRealmResource().toRepresentation(); realm.setEventsEnabled(true); realm.setEnabledEventTypes(Arrays.asList("REVOKE_GRANT", "LOGIN")); testRealmResource().update(realm); customerPortalExamplePage.navigateTo(); customerPortalExamplePage.customerSession(); loginPage.form().login("bburke@redhat.com", "password"); assertTrue(oAuthGrantPage.isCurrent()); oAuthGrantPage.accept(); assertTrue(driver.getPageSource().contains("Your hostname:")); assertTrue(driver.getPageSource().contains("You visited this page")); applicationsPage.navigateTo(); applicationsPage.revokeGrantForApplication("customer-portal"); customerPortalExamplePage.navigateTo(); customerPortalExamplePage.customerSession(); assertTrue(oAuthGrantPage.isCurrent()); loginEventsPage.navigateTo(); if (!testContext.isAdminLoggedIn()) { loginPage.form().login(adminUser); testContext.setAdminLoggedIn(true); } loginEventsPage.table().filter(); loginEventsPage.table().filterForm().addEventType("REVOKE_GRANT"); loginEventsPage.table().update(); List<WebElement> resultList = loginEventsPage.table().rows(); assertEquals(1, resultList.size()); resultList.get(0).findElement(By.xpath(".//td[text()='REVOKE_GRANT']")); resultList.get(0).findElement(By.xpath(".//td[text()='Client']/../td[text()='account']")); resultList.get(0).findElement(By.xpath(".//td[text()='IP Address']/../td[text()='127.0.0.1']")); resultList.get(0).findElement(By.xpath(".//td[text()='revoked_client']/../td[text()='customer-portal']")); loginEventsPage.table().reset(); loginEventsPage.table().filterForm().addEventType("LOGIN"); loginEventsPage.table().update(); resultList = loginEventsPage.table().rows(); assertEquals(1, resultList.size()); resultList.get(0).findElement(By.xpath(".//td[text()='LOGIN']")); resultList.get(0).findElement(By.xpath(".//td[text()='Client']/../td[text()='customer-portal']")); resultList.get(0).findElement(By.xpath(".//td[text()='IP Address']/../td[text()='127.0.0.1']")); resultList.get(0).findElement(By.xpath(".//td[text()='username']/../td[text()='bburke@redhat.com']")); resultList.get(0).findElement(By.xpath(".//td[text()='consent']/../td[text()='consent_granted']")); } @Test public void historyOfAccessResourceTest() throws IOException { RealmRepresentation realm = testRealmResource().toRepresentation(); realm.setEventsEnabled(true); realm.setEnabledEventTypes(Arrays.asList("LOGIN", "LOGIN_ERROR", "LOGOUT", "CODE_TO_TOKEN")); testRealmResource().update(realm); customerPortalExamplePage.navigateTo(); customerPortalExamplePage.customerListing(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); Assert.assertTrue(driver.getPageSource().contains("Username: bburke@redhat.com") && driver.getPageSource().contains("Bill Burke") && driver.getPageSource().contains("Stian Thorgersen") ); if (isRelative()) { //KEYCLOAK-1546 productPortalExamplePage.logOut(); } else { driver.navigate().to(testRealmPage.getOIDCLogoutUrl() + "?redirect_uri=" + productPortalExamplePage); } loginEventsPage.navigateTo(); if (!testContext.isAdminLoggedIn()) { loginPage.form().login(adminUser); testContext.setAdminLoggedIn(true); } loginEventsPage.table().filter(); loginEventsPage.table().filterForm().addEventType("LOGOUT"); loginEventsPage.table().update(); List<WebElement> resultList = loginEventsPage.table().rows(); assertEquals(1, resultList.size()); resultList.get(0).findElement(By.xpath(".//td[text()='LOGOUT']")); resultList.get(0).findElement(By.xpath(".//td[text()='Client']/../td[text()='']")); resultList.get(0).findElement(By.xpath(".//td[text()='IP Address']/../td[text()='127.0.0.1']")); loginEventsPage.table().reset(); loginEventsPage.table().filterForm().addEventType("LOGIN"); loginEventsPage.table().update(); resultList = loginEventsPage.table().rows(); assertEquals(1, resultList.size()); resultList.get(0).findElement(By.xpath(".//td[text()='LOGIN']")); resultList.get(0).findElement(By.xpath(".//td[text()='Client']/../td[text()='customer-portal']")); resultList.get(0).findElement(By.xpath(".//td[text()='IP Address']/../td[text()='127.0.0.1']")); resultList.get(0).findElement(By.xpath(".//td[text()='username']/../td[text()='bburke@redhat.com']")); loginEventsPage.table().reset(); loginEventsPage.table().filterForm().addEventType("CODE_TO_TOKEN"); loginEventsPage.table().update(); resultList = loginEventsPage.table().rows(); assertEquals(1, resultList.size()); resultList.get(0).findElement(By.xpath(".//td[text()='CODE_TO_TOKEN']")); resultList.get(0).findElement(By.xpath(".//td[text()='Client']/../td[text()='customer-portal']")); resultList.get(0).findElement(By.xpath(".//td[text()='IP Address']/../td[text()='127.0.0.1']")); resultList.get(0).findElement(By.xpath(".//td[text()='refresh_token_type']/../td[text()='Refresh']")); String serverLogPath = null; if (System.getProperty("app.server.wildfly", "false").equals("true")) { serverLogPath = System.getProperty("app.server.wildfly.home") + "/standalone/log/server.log"; } if (System.getProperty("app.server.eap6", "false").equals("true")) { serverLogPath = System.getProperty("app.server.eap6.home") + "/standalone/log/server.log"; } if (System.getProperty("app.server.eap7", "false").equals("true")) { serverLogPath = System.getProperty("app.server.eap7.home") + "/standalone/log/server.log"; } String appServerUrl; if (Boolean.parseBoolean(System.getProperty("app.server.ssl.required"))) { appServerUrl = "https://localhost:" + System.getProperty("app.server.https.port", "8543") + "/"; } else { appServerUrl = "http://localhost:" + System.getProperty("app.server.http.port", "8280") + "/"; } if (serverLogPath != null) { File serverLog = new File(serverLogPath); String serverLogContent = FileUtils.readFileToString(serverLog); UserRepresentation bburke = ApiUtil.findUserByUsername(testRealmResource(), "bburke@redhat.com"); Pattern pattern = Pattern.compile("User '" + bburke.getId() + "' invoking '" + appServerUrl + "customer-portal\\/customers\\/view\\.jsp[^\\s]+' on client 'customer-portal'"); Matcher matcher = pattern.matcher(serverLogContent); assertTrue(matcher.find()); assertTrue(serverLogContent.contains("User '" + bburke.getId() + "' invoking '" + appServerUrl + "database/customers' on client 'database-service'")); } } }
testsuite/integration-arquillian/tests/base/src/test/java/org/keycloak/testsuite/adapter/example/AbstractDemoExampleAdapterTest.java
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.testsuite.adapter.example; import org.apache.commons.io.FileUtils; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.graphene.page.Page; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.keycloak.admin.client.resource.ClientResource; import org.keycloak.representations.idm.ClientRepresentation; import org.keycloak.representations.idm.RealmRepresentation; import org.keycloak.representations.idm.UserRepresentation; import org.keycloak.testsuite.adapter.AbstractExampleAdapterTest; import org.keycloak.testsuite.adapter.page.CustomerPortalExample; import org.keycloak.testsuite.adapter.page.DatabaseServiceExample; import org.keycloak.testsuite.adapter.page.ProductPortalExample; import org.keycloak.testsuite.admin.ApiUtil; import org.keycloak.testsuite.auth.page.account.Account; import org.keycloak.testsuite.auth.page.account.Applications; import org.keycloak.testsuite.auth.page.login.OAuthGrant; import org.keycloak.testsuite.console.page.events.Config; import org.keycloak.testsuite.console.page.events.LoginEvents; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.keycloak.testsuite.auth.page.AuthRealm.DEMO; import static org.keycloak.testsuite.util.IOUtil.loadRealm; import static org.keycloak.testsuite.util.URLAssert.assertCurrentUrlStartsWith; public abstract class AbstractDemoExampleAdapterTest extends AbstractExampleAdapterTest { @Page private CustomerPortalExample customerPortalExamplePage; @Page private ProductPortalExample productPortalExamplePage; @Page private DatabaseServiceExample databaseServiceExamplePage; @Page private Account testRealmAccountPage; @Page private Config configPage; @Page private LoginEvents loginEventsPage; @Page private OAuthGrant oAuthGrantPage; @Page private Applications applicationsPage; @Deployment(name = CustomerPortalExample.DEPLOYMENT_NAME) private static WebArchive customerPortalExample() throws IOException { return exampleDeployment(CustomerPortalExample.DEPLOYMENT_NAME); } @Deployment(name = ProductPortalExample.DEPLOYMENT_NAME) private static WebArchive productPortalExample() throws IOException { return exampleDeployment(ProductPortalExample.DEPLOYMENT_NAME); } @Deployment(name = DatabaseServiceExample.DEPLOYMENT_NAME) private static WebArchive databaseServiceExample() throws IOException { return exampleDeployment("database-service"); } @Override public void addAdapterTestRealms(List<RealmRepresentation> testRealms) { testRealms.add( loadRealm(new File(EXAMPLES_HOME_DIR + "/preconfigured-demo/testrealm.json"))); } @Override public void setDefaultPageUriParameters() { super.setDefaultPageUriParameters(); testRealmPage.setAuthRealm(DEMO); testRealmLoginPage.setAuthRealm(DEMO); testRealmAccountPage.setAuthRealm(DEMO); configPage.setConsoleRealm(DEMO); loginEventsPage.setConsoleRealm(DEMO); applicationsPage.setAuthRealm(DEMO); } @Before public void beforeDemoExampleTest() { customerPortalExamplePage.navigateTo(); driver.manage().deleteAllCookies(); productPortalExamplePage.navigateTo(); driver.manage().deleteAllCookies(); } @Test public void customerPortalListingTest() { customerPortalExamplePage.navigateTo(); customerPortalExamplePage.customerListing(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); assertCurrentUrlStartsWith(customerPortalExamplePage); customerPortalExamplePage.waitForCustomerListingHeader(); Assert.assertTrue(driver.getPageSource().contains("Username: bburke@redhat.com")); Assert.assertTrue(driver.getPageSource().contains("Bill Burke")); Assert.assertTrue(driver.getPageSource().contains("Stian Thorgersen")); } @Test public void customerPortalSessionTest() { customerPortalExamplePage.navigateTo(); customerPortalExamplePage.customerSession(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); assertCurrentUrlStartsWith(customerPortalExamplePage); customerPortalExamplePage.waitForCustomerSessionHeader(); Assert.assertTrue(driver.getPageSource().contains("You visited this page")); } @Test public void productPortalListingTest() { productPortalExamplePage.navigateTo(); productPortalExamplePage.productListing(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); assertCurrentUrlStartsWith(productPortalExamplePage); productPortalExamplePage.waitForProductListingHeader(); Assert.assertTrue(driver.getPageSource().contains("iphone")); Assert.assertTrue(driver.getPageSource().contains("ipad")); Assert.assertTrue(driver.getPageSource().contains("ipod")); productPortalExamplePage.goToCustomers(); } @Test public void goToProductPortalWithOneLoginTest() { productPortalExamplePage.navigateTo(); productPortalExamplePage.productListing(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); assertCurrentUrlStartsWith(productPortalExamplePage); productPortalExamplePage.waitForProductListingHeader(); productPortalExamplePage.goToCustomers(); assertCurrentUrlStartsWith(customerPortalExamplePage); customerPortalExamplePage.customerListing(); customerPortalExamplePage.goToProducts(); assertCurrentUrlStartsWith(productPortalExamplePage); } @Test public void logoutFromAllAppsTest() { productPortalExamplePage.navigateTo(); productPortalExamplePage.productListing(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); assertCurrentUrlStartsWith(productPortalExamplePage); productPortalExamplePage.waitForProductListingHeader(); if (isRelative()) { //KEYCLOAK-1546 productPortalExamplePage.logOut(); } else { driver.navigate().to(testRealmPage.getOIDCLogoutUrl() + "?redirect_uri=" + productPortalExamplePage); } assertCurrentUrlStartsWith(productPortalExamplePage); productPortalExamplePage.productListing(); customerPortalExamplePage.navigateTo(); customerPortalExamplePage.customerListing(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); customerPortalExamplePage.logOut(); } @Test public void grantServerBasedApp() { ClientResource clientResource = ApiUtil.findClientResourceByClientId(testRealmResource(), "customer-portal"); ClientRepresentation client = clientResource.toRepresentation(); client.setConsentRequired(true); clientResource.update(client); RealmRepresentation realm = testRealmResource().toRepresentation(); realm.setEventsEnabled(true); realm.setEnabledEventTypes(Arrays.asList("REVOKE_GRANT", "LOGIN")); testRealmResource().update(realm); customerPortalExamplePage.navigateTo(); customerPortalExamplePage.customerSession(); loginPage.form().login("bburke@redhat.com", "password"); assertTrue(oAuthGrantPage.isCurrent()); oAuthGrantPage.accept(); assertTrue(driver.getPageSource().contains("Your hostname:")); assertTrue(driver.getPageSource().contains("You visited this page")); applicationsPage.navigateTo(); applicationsPage.revokeGrantForApplication("customer-portal"); customerPortalExamplePage.navigateTo(); customerPortalExamplePage.customerSession(); assertTrue(oAuthGrantPage.isCurrent()); loginEventsPage.navigateTo(); loginPage.form().login(adminUser); loginEventsPage.table().filter(); loginEventsPage.table().filterForm().addEventType("REVOKE_GRANT"); loginEventsPage.table().update(); List<WebElement> resultList = loginEventsPage.table().rows(); assertEquals(1, resultList.size()); resultList.get(0).findElement(By.xpath(".//td[text()='REVOKE_GRANT']")); resultList.get(0).findElement(By.xpath(".//td[text()='Client']/../td[text()='account']")); resultList.get(0).findElement(By.xpath(".//td[text()='IP Address']/../td[text()='127.0.0.1']")); resultList.get(0).findElement(By.xpath(".//td[text()='revoked_client']/../td[text()='customer-portal']")); loginEventsPage.table().reset(); loginEventsPage.table().filterForm().addEventType("LOGIN"); loginEventsPage.table().update(); resultList = loginEventsPage.table().rows(); assertEquals(1, resultList.size()); resultList.get(0).findElement(By.xpath(".//td[text()='LOGIN']")); resultList.get(0).findElement(By.xpath(".//td[text()='Client']/../td[text()='customer-portal']")); resultList.get(0).findElement(By.xpath(".//td[text()='IP Address']/../td[text()='127.0.0.1']")); resultList.get(0).findElement(By.xpath(".//td[text()='username']/../td[text()='bburke@redhat.com']")); resultList.get(0).findElement(By.xpath(".//td[text()='consent']/../td[text()='consent_granted']")); } @Test public void historyOfAccessResourceTest() throws IOException { RealmRepresentation realm = testRealmResource().toRepresentation(); realm.setEventsEnabled(true); realm.setEnabledEventTypes(Arrays.asList("LOGIN", "LOGIN_ERROR", "LOGOUT", "CODE_TO_TOKEN")); testRealmResource().update(realm); customerPortalExamplePage.navigateTo(); customerPortalExamplePage.customerListing(); testRealmLoginPage.form().login("bburke@redhat.com", "password"); Assert.assertTrue(driver.getPageSource().contains("Username: bburke@redhat.com") && driver.getPageSource().contains("Bill Burke") && driver.getPageSource().contains("Stian Thorgersen") ); if (isRelative()) { //KEYCLOAK-1546 productPortalExamplePage.logOut(); } else { driver.navigate().to(testRealmPage.getOIDCLogoutUrl() + "?redirect_uri=" + productPortalExamplePage); } loginEventsPage.navigateTo(); if (!testContext.isAdminLoggedIn()) { loginPage.form().login(adminUser); testContext.setAdminLoggedIn(true); } loginEventsPage.table().filter(); loginEventsPage.table().filterForm().addEventType("LOGOUT"); loginEventsPage.table().update(); List<WebElement> resultList = loginEventsPage.table().rows(); assertEquals(1, resultList.size()); resultList.get(0).findElement(By.xpath(".//td[text()='LOGOUT']")); resultList.get(0).findElement(By.xpath(".//td[text()='Client']/../td[text()='']")); resultList.get(0).findElement(By.xpath(".//td[text()='IP Address']/../td[text()='127.0.0.1']")); loginEventsPage.table().reset(); loginEventsPage.table().filterForm().addEventType("LOGIN"); loginEventsPage.table().update(); resultList = loginEventsPage.table().rows(); assertEquals(1, resultList.size()); resultList.get(0).findElement(By.xpath(".//td[text()='LOGIN']")); resultList.get(0).findElement(By.xpath(".//td[text()='Client']/../td[text()='customer-portal']")); resultList.get(0).findElement(By.xpath(".//td[text()='IP Address']/../td[text()='127.0.0.1']")); resultList.get(0).findElement(By.xpath(".//td[text()='username']/../td[text()='bburke@redhat.com']")); loginEventsPage.table().reset(); loginEventsPage.table().filterForm().addEventType("CODE_TO_TOKEN"); loginEventsPage.table().update(); resultList = loginEventsPage.table().rows(); assertEquals(1, resultList.size()); resultList.get(0).findElement(By.xpath(".//td[text()='CODE_TO_TOKEN']")); resultList.get(0).findElement(By.xpath(".//td[text()='Client']/../td[text()='customer-portal']")); resultList.get(0).findElement(By.xpath(".//td[text()='IP Address']/../td[text()='127.0.0.1']")); resultList.get(0).findElement(By.xpath(".//td[text()='refresh_token_type']/../td[text()='Refresh']")); String serverLogPath = null; if (System.getProperty("app.server.wildfly", "false").equals("true")) { serverLogPath = System.getProperty("app.server.wildfly.home") + "/standalone/log/server.log"; } if (System.getProperty("app.server.eap6", "false").equals("true")) { serverLogPath = System.getProperty("app.server.eap6.home") + "/standalone/log/server.log"; } if (System.getProperty("app.server.eap7", "false").equals("true")) { serverLogPath = System.getProperty("app.server.eap7.home") + "/standalone/log/server.log"; } String appServerUrl; if (Boolean.parseBoolean(System.getProperty("app.server.ssl.required"))) { appServerUrl = "https://localhost:" + System.getProperty("app.server.https.port", "8543") + "/"; } else { appServerUrl = "http://localhost:" + System.getProperty("app.server.http.port", "8280") + "/"; } if (serverLogPath != null) { File serverLog = new File(serverLogPath); String serverLogContent = FileUtils.readFileToString(serverLog); UserRepresentation bburke = ApiUtil.findUserByUsername(testRealmResource(), "bburke@redhat.com"); Pattern pattern = Pattern.compile("User '" + bburke.getId() + "' invoking '" + appServerUrl + "customer-portal\\/customers\\/view\\.jsp[^\\s]+' on client 'customer-portal'"); Matcher matcher = pattern.matcher(serverLogContent); assertTrue(matcher.find()); assertTrue(serverLogContent.contains("User '" + bburke.getId() + "' invoking '" + appServerUrl + "database/customers' on client 'database-service'")); } } }
Fix DemoExampleAdapterTest
testsuite/integration-arquillian/tests/base/src/test/java/org/keycloak/testsuite/adapter/example/AbstractDemoExampleAdapterTest.java
Fix DemoExampleAdapterTest
Java
apache-2.0
b792cec71b246964e67fb93485475f61709e6215
0
jerome79/OG-Platform,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,jerome79/OG-Platform,ChinaQuants/OG-Platform,ChinaQuants/OG-Platform,jerome79/OG-Platform,DevStreet/FinanceAnalytics,DevStreet/FinanceAnalytics,nssales/OG-Platform,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,jeorme/OG-Platform,jeorme/OG-Platform,McLeodMoores/starling,McLeodMoores/starling,jeorme/OG-Platform,nssales/OG-Platform,codeaudit/OG-Platform,nssales/OG-Platform,nssales/OG-Platform,jeorme/OG-Platform,McLeodMoores/starling,jerome79/OG-Platform,codeaudit/OG-Platform,McLeodMoores/starling,ChinaQuants/OG-Platform
/** * Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics.model.irfutureoption; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.threeten.bp.Clock; import org.threeten.bp.ZonedDateTime; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.analytics.financial.instrument.InstrumentDefinition; import com.opengamma.analytics.financial.interestrate.InstrumentDerivative; import com.opengamma.analytics.financial.interestrate.YieldCurveBundle; import com.opengamma.analytics.financial.model.option.definition.YieldCurveWithBlackCubeBundle; import com.opengamma.analytics.financial.model.volatility.surface.VolatilitySurface; import com.opengamma.analytics.math.surface.InterpolatedDoublesSurface; import com.opengamma.core.config.ConfigSource; import com.opengamma.core.holiday.HolidaySource; import com.opengamma.core.position.Trade; import com.opengamma.core.region.RegionSource; import com.opengamma.core.security.Security; import com.opengamma.core.security.SecuritySource; import com.opengamma.engine.ComputationTarget; import com.opengamma.engine.ComputationTargetSpecification; import com.opengamma.engine.function.AbstractFunction; import com.opengamma.engine.function.FunctionCompilationContext; import com.opengamma.engine.function.FunctionExecutionContext; import com.opengamma.engine.function.FunctionInputs; import com.opengamma.engine.target.ComputationTargetType; import com.opengamma.engine.value.ComputedValue; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValuePropertyNames; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueRequirementNames; import com.opengamma.engine.value.ValueSpecification; import com.opengamma.financial.OpenGammaCompilationContext; import com.opengamma.financial.OpenGammaExecutionContext; import com.opengamma.financial.analytics.conversion.FixedIncomeConverterDataProvider; import com.opengamma.financial.analytics.conversion.InterestRateFutureOptionSecurityConverter; import com.opengamma.financial.analytics.conversion.InterestRateFutureOptionTradeConverter; import com.opengamma.financial.analytics.ircurve.calcconfig.ConfigDBCurveCalculationConfigSource; import com.opengamma.financial.analytics.ircurve.calcconfig.MultiCurveCalculationConfig; import com.opengamma.financial.analytics.model.CalculationPropertyNamesAndValues; import com.opengamma.financial.analytics.model.InstrumentTypeProperties; import com.opengamma.financial.analytics.model.YieldCurveFunctionUtils; import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesBundle; import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesFunctionUtils; import com.opengamma.financial.convention.ConventionBundleSource; import com.opengamma.financial.security.FinancialSecurityUtils; import com.opengamma.financial.security.option.IRFutureOptionSecurity; import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesResolver; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.money.Currency; /** * */ public abstract class InterestRateFutureOptionBlackCurveSpecificFunction extends AbstractFunction.NonCompiledInvoker { private static final Logger s_logger = LoggerFactory.getLogger(InterestRateFutureOptionBlackCurveSpecificFunction.class); private final String _valueRequirementName; private InterestRateFutureOptionTradeConverter _converter; private FixedIncomeConverterDataProvider _dataConverter; public InterestRateFutureOptionBlackCurveSpecificFunction(final String valueRequirementName) { ArgumentChecker.notNull(valueRequirementName, "value requirement name"); _valueRequirementName = valueRequirementName; } @Override public void init(final FunctionCompilationContext context) { final HolidaySource holidaySource = OpenGammaCompilationContext.getHolidaySource(context); final RegionSource regionSource = OpenGammaCompilationContext.getRegionSource(context); final ConventionBundleSource conventionSource = OpenGammaCompilationContext.getConventionBundleSource(context); final SecuritySource securitySource = OpenGammaCompilationContext.getSecuritySource(context); final HistoricalTimeSeriesResolver timeSeriesResolver = OpenGammaCompilationContext.getHistoricalTimeSeriesResolver(context); _converter = new InterestRateFutureOptionTradeConverter(new InterestRateFutureOptionSecurityConverter(holidaySource, conventionSource, regionSource, securitySource)); _dataConverter = new FixedIncomeConverterDataProvider(conventionSource, timeSeriesResolver); } @Override public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) { final Clock snapshotClock = executionContext.getValuationClock(); final ZonedDateTime now = ZonedDateTime.now(snapshotClock); final HistoricalTimeSeriesBundle timeSeries = HistoricalTimeSeriesFunctionUtils.getHistoricalTimeSeriesInputs(executionContext, inputs); final Trade trade = target.getTrade(); final IRFutureOptionSecurity security = (IRFutureOptionSecurity) trade.getSecurity(); final ValueRequirement desiredValue = desiredValues.iterator().next(); final Currency currency = FinancialSecurityUtils.getCurrency(security); final String curveName = desiredValue.getConstraint(ValuePropertyNames.CURVE); final String surfaceName = desiredValue.getConstraint(ValuePropertyNames.SURFACE); // To enable standard and midcurve options to share the same default name final String surfaceNameWithPrefix = surfaceName + "_" + IRFutureOptionFunctionHelper.getFutureOptionPrefix(target); final String curveCalculationConfigName = desiredValue.getConstraint(ValuePropertyNames.CURVE_CALCULATION_CONFIG); final ConfigSource configSource = OpenGammaExecutionContext.getConfigSource(executionContext); final ConfigDBCurveCalculationConfigSource curveCalculationConfigSource = new ConfigDBCurveCalculationConfigSource(configSource); final MultiCurveCalculationConfig curveCalculationConfig = curveCalculationConfigSource.getConfig(curveCalculationConfigName); if (curveCalculationConfig == null) { throw new OpenGammaRuntimeException("Could not find curve calculation configuration named " + curveCalculationConfigName); } final String[] curveNames = curveCalculationConfig.getYieldCurveNames(); final String[] fullCurveNames = new String[curveNames.length]; for (int i = 0; i < curveNames.length; i++) { fullCurveNames[i] = curveNames[i] + "_" + currency.getCode(); } final YieldCurveBundle curves = YieldCurveFunctionUtils.getAllYieldCurves(inputs, curveCalculationConfig, curveCalculationConfigSource); final Object volatilitySurfaceObject = inputs.getValue(getVolatilityRequirement(surfaceNameWithPrefix, currency)); if (volatilitySurfaceObject == null) { throw new OpenGammaRuntimeException("Could not get volatility surface"); } final VolatilitySurface volatilitySurface = (VolatilitySurface) volatilitySurfaceObject; if (!(volatilitySurface.getSurface() instanceof InterpolatedDoublesSurface)) { throw new OpenGammaRuntimeException("Expecting an InterpolatedDoublesSurface; got " + volatilitySurface.getSurface().getClass()); } final InstrumentDefinition<?> irFutureOptionDefinition = _converter.convert(trade); final InstrumentDerivative irFutureOption = _dataConverter.convert(security, irFutureOptionDefinition, now, fullCurveNames, timeSeries); final ValueProperties properties = getResultProperties(currency.getCode(), curveCalculationConfigName, surfaceName, curveName); final ValueSpecification spec = new ValueSpecification(_valueRequirementName, target.toSpecification(), properties); final YieldCurveWithBlackCubeBundle data = new YieldCurveWithBlackCubeBundle(volatilitySurface.getSurface(), curves); return getResult(irFutureOption, data, curveName, spec, security); } @Override public ComputationTargetType getTargetType() { return ComputationTargetType.TRADE; } @Override public boolean canApplyTo(final FunctionCompilationContext context, final ComputationTarget target) { return target.getTrade().getSecurity() instanceof IRFutureOptionSecurity; } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) { final String currency = FinancialSecurityUtils.getCurrency(target.getTrade().getSecurity()).getCode(); return Collections.singleton(new ValueSpecification(_valueRequirementName, target.toSpecification(), getResultProperties(currency))); } @Override public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) { final ValueProperties constraints = desiredValue.getConstraints(); final Set<String> curves = constraints.getValues(ValuePropertyNames.CURVE); if (curves == null || curves.size() != 1) { s_logger.error("Must specify a curve against which to calculate the desired value " + _valueRequirementName); return null; } final Set<String> curveCalculationConfigNames = constraints.getValues(ValuePropertyNames.CURVE_CALCULATION_CONFIG); if (curveCalculationConfigNames == null || curveCalculationConfigNames.size() != 1) { return null; } final Set<String> surfaceNames = constraints.getValues(ValuePropertyNames.SURFACE); if (surfaceNames == null || surfaceNames.size() != 1) { return null; } final String curveCalculationConfigName = curveCalculationConfigNames.iterator().next(); final ConfigSource configSource = OpenGammaCompilationContext.getConfigSource(context); final ConfigDBCurveCalculationConfigSource curveCalculationConfigSource = new ConfigDBCurveCalculationConfigSource(configSource); final MultiCurveCalculationConfig curveCalculationConfig = curveCalculationConfigSource.getConfig(curveCalculationConfigName); if (curveCalculationConfig == null) { s_logger.error("Could not find curve calculation configuration named " + curveCalculationConfigName); return null; } final Currency currency = FinancialSecurityUtils.getCurrency(target.getTrade().getSecurity()); if (!ComputationTargetSpecification.of(currency).equals(curveCalculationConfig.getTarget())) { s_logger.error("Security currency and curve calculation config id were not equal; have {} and {}", currency, curveCalculationConfig.getTarget()); return null; } final String[] curveNames = curveCalculationConfig.getYieldCurveNames(); final String curve = curves.iterator().next(); if (Arrays.binarySearch(curveNames, curve) < 0) { s_logger.error("Curve named {} is not available in curve calculation configuration called {}", curve, curveCalculationConfigName); return null; } final String surfaceName = surfaceNames.iterator().next() + "_" + IRFutureOptionFunctionHelper.getFutureOptionPrefix(target); final Set<ValueRequirement> requirements = new HashSet<>(); requirements.addAll(YieldCurveFunctionUtils.getCurveRequirements(curveCalculationConfig, curveCalculationConfigSource)); requirements.add(getVolatilityRequirement(surfaceName, currency)); final Set<ValueRequirement> tsRequirements = _dataConverter.getConversionTimeSeriesRequirements(target.getTrade().getSecurity(), _converter.convert(target.getTrade())); if (tsRequirements == null) { return null; } requirements.addAll(tsRequirements); return requirements; } protected abstract Set<ComputedValue> getResult(final InstrumentDerivative irFutureOption, final YieldCurveWithBlackCubeBundle data, final String curveName, final ValueSpecification spec, final Security security); private ValueProperties getResultProperties(final String currency) { return createValueProperties() .with(ValuePropertyNames.CALCULATION_METHOD, CalculationPropertyNamesAndValues.BLACK_METHOD) .withAny(ValuePropertyNames.CURVE_CALCULATION_CONFIG) .withAny(ValuePropertyNames.SURFACE) .with(ValuePropertyNames.CURRENCY, currency) .with(ValuePropertyNames.CURVE_CURRENCY, currency) .withAny(ValuePropertyNames.CURVE).get(); } private ValueProperties getResultProperties(final String currency, final String curveCalculationConfig, final String surfaceName, final String curveName) { return createValueProperties() .with(ValuePropertyNames.CALCULATION_METHOD, CalculationPropertyNamesAndValues.BLACK_METHOD) .with(ValuePropertyNames.CURVE_CALCULATION_CONFIG, curveCalculationConfig) .with(ValuePropertyNames.SURFACE, surfaceName) .with(ValuePropertyNames.CURRENCY, currency) .with(ValuePropertyNames.CURVE_CURRENCY, currency) .with(ValuePropertyNames.CURVE, curveName).get(); } private ValueRequirement getVolatilityRequirement(final String surface, final Currency currency) { final ValueProperties properties = ValueProperties.builder() .with(ValuePropertyNames.SURFACE, surface) .with(InstrumentTypeProperties.PROPERTY_SURFACE_INSTRUMENT_TYPE, InstrumentTypeProperties.IR_FUTURE_OPTION).get(); return new ValueRequirement(ValueRequirementNames.INTERPOLATED_VOLATILITY_SURFACE, ComputationTargetSpecification.of(currency), properties); } }
projects/OG-Financial/src/main/java/com/opengamma/financial/analytics/model/irfutureoption/InterestRateFutureOptionBlackCurveSpecificFunction.java
/** * Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics.model.irfutureoption; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.threeten.bp.Clock; import org.threeten.bp.ZonedDateTime; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.analytics.financial.instrument.InstrumentDefinition; import com.opengamma.analytics.financial.interestrate.InstrumentDerivative; import com.opengamma.analytics.financial.interestrate.YieldCurveBundle; import com.opengamma.analytics.financial.model.option.definition.YieldCurveWithBlackCubeBundle; import com.opengamma.analytics.financial.model.volatility.surface.VolatilitySurface; import com.opengamma.analytics.math.surface.InterpolatedDoublesSurface; import com.opengamma.core.config.ConfigSource; import com.opengamma.core.holiday.HolidaySource; import com.opengamma.core.position.Trade; import com.opengamma.core.region.RegionSource; import com.opengamma.core.security.Security; import com.opengamma.core.security.SecuritySource; import com.opengamma.engine.ComputationTarget; import com.opengamma.engine.ComputationTargetSpecification; import com.opengamma.engine.function.AbstractFunction; import com.opengamma.engine.function.FunctionCompilationContext; import com.opengamma.engine.function.FunctionExecutionContext; import com.opengamma.engine.function.FunctionInputs; import com.opengamma.engine.target.ComputationTargetType; import com.opengamma.engine.value.ComputedValue; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValuePropertyNames; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueRequirementNames; import com.opengamma.engine.value.ValueSpecification; import com.opengamma.financial.OpenGammaCompilationContext; import com.opengamma.financial.OpenGammaExecutionContext; import com.opengamma.financial.analytics.conversion.FixedIncomeConverterDataProvider; import com.opengamma.financial.analytics.conversion.InterestRateFutureOptionSecurityConverter; import com.opengamma.financial.analytics.conversion.InterestRateFutureOptionTradeConverter; import com.opengamma.financial.analytics.ircurve.calcconfig.ConfigDBCurveCalculationConfigSource; import com.opengamma.financial.analytics.ircurve.calcconfig.MultiCurveCalculationConfig; import com.opengamma.financial.analytics.model.CalculationPropertyNamesAndValues; import com.opengamma.financial.analytics.model.InstrumentTypeProperties; import com.opengamma.financial.analytics.model.YieldCurveFunctionUtils; import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesBundle; import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesFunctionUtils; import com.opengamma.financial.convention.ConventionBundleSource; import com.opengamma.financial.security.FinancialSecurityUtils; import com.opengamma.financial.security.option.IRFutureOptionSecurity; import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesResolver; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.money.Currency; /** * */ public abstract class InterestRateFutureOptionBlackCurveSpecificFunction extends AbstractFunction.NonCompiledInvoker { private static final Logger s_logger = LoggerFactory.getLogger(InterestRateFutureOptionBlackCurveSpecificFunction.class); private final String _valueRequirementName; private InterestRateFutureOptionTradeConverter _converter; private FixedIncomeConverterDataProvider _dataConverter; public InterestRateFutureOptionBlackCurveSpecificFunction(final String valueRequirementName) { ArgumentChecker.notNull(valueRequirementName, "value requirement name"); _valueRequirementName = valueRequirementName; } @Override public void init(final FunctionCompilationContext context) { final HolidaySource holidaySource = OpenGammaCompilationContext.getHolidaySource(context); final RegionSource regionSource = OpenGammaCompilationContext.getRegionSource(context); final ConventionBundleSource conventionSource = OpenGammaCompilationContext.getConventionBundleSource(context); final SecuritySource securitySource = OpenGammaCompilationContext.getSecuritySource(context); final HistoricalTimeSeriesResolver timeSeriesResolver = OpenGammaCompilationContext.getHistoricalTimeSeriesResolver(context); _converter = new InterestRateFutureOptionTradeConverter(new InterestRateFutureOptionSecurityConverter(holidaySource, conventionSource, regionSource, securitySource)); _dataConverter = new FixedIncomeConverterDataProvider(conventionSource, timeSeriesResolver); } @Override public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) { final Clock snapshotClock = executionContext.getValuationClock(); final ZonedDateTime now = ZonedDateTime.now(snapshotClock); final HistoricalTimeSeriesBundle timeSeries = HistoricalTimeSeriesFunctionUtils.getHistoricalTimeSeriesInputs(executionContext, inputs); final Trade trade = target.getTrade(); final IRFutureOptionSecurity security = (IRFutureOptionSecurity) trade.getSecurity(); final ValueRequirement desiredValue = desiredValues.iterator().next(); final Currency currency = FinancialSecurityUtils.getCurrency(security); final String curveName = desiredValue.getConstraint(ValuePropertyNames.CURVE); final String surfaceName = desiredValue.getConstraint(ValuePropertyNames.SURFACE); // To enable standard and midcurve options to share the same default name final String surfaceNameWithPrefix = surfaceName + "_" + IRFutureOptionFunctionHelper.getFutureOptionPrefix(target); final String curveCalculationConfigName = desiredValue.getConstraint(ValuePropertyNames.CURVE_CALCULATION_CONFIG); final ConfigSource configSource = OpenGammaExecutionContext.getConfigSource(executionContext); final ConfigDBCurveCalculationConfigSource curveCalculationConfigSource = new ConfigDBCurveCalculationConfigSource(configSource); final MultiCurveCalculationConfig curveCalculationConfig = curveCalculationConfigSource.getConfig(curveCalculationConfigName); if (curveCalculationConfig == null) { throw new OpenGammaRuntimeException("Could not find curve calculation configuration named " + curveCalculationConfigName); } final String[] curveNames = curveCalculationConfig.getYieldCurveNames(); final YieldCurveBundle curves = YieldCurveFunctionUtils.getAllYieldCurves(inputs, curveCalculationConfig, curveCalculationConfigSource); final Object volatilitySurfaceObject = inputs.getValue(getVolatilityRequirement(surfaceNameWithPrefix, currency)); if (volatilitySurfaceObject == null) { throw new OpenGammaRuntimeException("Could not get volatility surface"); } final VolatilitySurface volatilitySurface = (VolatilitySurface) volatilitySurfaceObject; if (!(volatilitySurface.getSurface() instanceof InterpolatedDoublesSurface)) { throw new OpenGammaRuntimeException("Expecting an InterpolatedDoublesSurface; got " + volatilitySurface.getSurface().getClass()); } final InstrumentDefinition<?> irFutureOptionDefinition = _converter.convert(trade); final InstrumentDerivative irFutureOption = _dataConverter.convert(security, irFutureOptionDefinition, now, curveNames, timeSeries); final ValueProperties properties = getResultProperties(currency.getCode(), curveCalculationConfigName, surfaceName, curveName); final ValueSpecification spec = new ValueSpecification(_valueRequirementName, target.toSpecification(), properties); final YieldCurveWithBlackCubeBundle data = new YieldCurveWithBlackCubeBundle(volatilitySurface.getSurface(), curves); return getResult(irFutureOption, data, curveName, spec, security); } @Override public ComputationTargetType getTargetType() { return ComputationTargetType.TRADE; } @Override public boolean canApplyTo(final FunctionCompilationContext context, final ComputationTarget target) { return target.getTrade().getSecurity() instanceof IRFutureOptionSecurity; } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) { final String currency = FinancialSecurityUtils.getCurrency(target.getTrade().getSecurity()).getCode(); return Collections.singleton(new ValueSpecification(_valueRequirementName, target.toSpecification(), getResultProperties(currency))); } @Override public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) { final ValueProperties constraints = desiredValue.getConstraints(); final Set<String> curves = constraints.getValues(ValuePropertyNames.CURVE); if (curves == null || curves.size() != 1) { s_logger.error("Must specify a curve against which to calculate the desired value " + _valueRequirementName); return null; } final Set<String> curveCalculationConfigNames = constraints.getValues(ValuePropertyNames.CURVE_CALCULATION_CONFIG); if (curveCalculationConfigNames == null || curveCalculationConfigNames.size() != 1) { return null; } final Set<String> surfaceNames = constraints.getValues(ValuePropertyNames.SURFACE); if (surfaceNames == null || surfaceNames.size() != 1) { return null; } final String curveCalculationConfigName = curveCalculationConfigNames.iterator().next(); final ConfigSource configSource = OpenGammaCompilationContext.getConfigSource(context); final ConfigDBCurveCalculationConfigSource curveCalculationConfigSource = new ConfigDBCurveCalculationConfigSource(configSource); final MultiCurveCalculationConfig curveCalculationConfig = curveCalculationConfigSource.getConfig(curveCalculationConfigName); if (curveCalculationConfig == null) { s_logger.error("Could not find curve calculation configuration named " + curveCalculationConfigName); return null; } final Currency currency = FinancialSecurityUtils.getCurrency(target.getTrade().getSecurity()); if (!ComputationTargetSpecification.of(currency).equals(curveCalculationConfig.getTarget())) { s_logger.error("Security currency and curve calculation config id were not equal; have {} and {}", currency, curveCalculationConfig.getTarget()); return null; } final String[] curveNames = curveCalculationConfig.getYieldCurveNames(); final String curve = curves.iterator().next(); if (Arrays.binarySearch(curveNames, curve) < 0) { s_logger.error("Curve named {} is not available in curve calculation configuration called {}", curve, curveCalculationConfigName); return null; } final String surfaceName = surfaceNames.iterator().next() + "_" + IRFutureOptionFunctionHelper.getFutureOptionPrefix(target); final Set<ValueRequirement> requirements = new HashSet<>(); requirements.addAll(YieldCurveFunctionUtils.getCurveRequirements(curveCalculationConfig, curveCalculationConfigSource)); requirements.add(getVolatilityRequirement(surfaceName, currency)); final Set<ValueRequirement> tsRequirements = _dataConverter.getConversionTimeSeriesRequirements(target.getTrade().getSecurity(), _converter.convert(target.getTrade())); if (tsRequirements == null) { return null; } requirements.addAll(tsRequirements); return requirements; } protected abstract Set<ComputedValue> getResult(final InstrumentDerivative irFutureOption, final YieldCurveWithBlackCubeBundle data, final String curveName, final ValueSpecification spec, final Security security); private ValueProperties getResultProperties(final String currency) { return createValueProperties() .with(ValuePropertyNames.CALCULATION_METHOD, CalculationPropertyNamesAndValues.BLACK_METHOD) .withAny(ValuePropertyNames.CURVE_CALCULATION_CONFIG) .withAny(ValuePropertyNames.SURFACE) .with(ValuePropertyNames.CURRENCY, currency) .with(ValuePropertyNames.CURVE_CURRENCY, currency) .withAny(ValuePropertyNames.CURVE).get(); } private ValueProperties getResultProperties(final String currency, final String curveCalculationConfig, final String surfaceName, final String curveName) { return createValueProperties() .with(ValuePropertyNames.CALCULATION_METHOD, CalculationPropertyNamesAndValues.BLACK_METHOD) .with(ValuePropertyNames.CURVE_CALCULATION_CONFIG, curveCalculationConfig) .with(ValuePropertyNames.SURFACE, surfaceName) .with(ValuePropertyNames.CURRENCY, currency) .with(ValuePropertyNames.CURVE_CURRENCY, currency) .with(ValuePropertyNames.CURVE, curveName).get(); } private ValueRequirement getVolatilityRequirement(final String surface, final Currency currency) { final ValueProperties properties = ValueProperties.builder() .with(ValuePropertyNames.SURFACE, surface) .with(InstrumentTypeProperties.PROPERTY_SURFACE_INSTRUMENT_TYPE, InstrumentTypeProperties.IR_FUTURE_OPTION).get(); return new ValueRequirement(ValueRequirementNames.INTERPOLATED_VOLATILITY_SURFACE, ComputationTargetSpecification.of(currency), properties); } }
Using the currency explicitly in yield curve names now
projects/OG-Financial/src/main/java/com/opengamma/financial/analytics/model/irfutureoption/InterestRateFutureOptionBlackCurveSpecificFunction.java
Using the currency explicitly in yield curve names now
Java
apache-2.0
5bcd42b7be3edf8a96a66f4481fac854d5710afa
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python; import com.intellij.codeInsight.folding.CodeFoldingManager; import com.intellij.codeInsight.folding.impl.EditorFoldingInfo; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.FoldRegion; import com.intellij.psi.PsiElement; import com.jetbrains.python.fixtures.PyTestCase; import com.jetbrains.python.psi.PyStringLiteralExpression; /** * @author yole */ public class PyFoldingTest extends PyTestCase { protected void doTest() { myFixture.testFolding(getTestDataPath() + "/folding/" + getTestName(true) + ".py"); } public void testClassTrailingSpace() { // PY-2544 doTest(); } public void testDocString() { doTest(); } public void testCustomFolding() { doTest(); } public void testImportBlock() { doTest(); } public void testBlocksFolding() { doTest(); } public void testLongStringsFolding() { doTest(); } public void testCollectionsFolding() { doTest(); } public void testMultilineComments() { doTest(); } public void testNestedFolding() { doTest(); } //PY-18928 public void testCustomFoldingWithComments() { doTest(); } // PY-17017 public void testCustomFoldingAtBlockEnd() { doTest(); } // PY-31154 public void testEmptyStatementListHasNoFolding() { doTest(); } public void testCollapseExpandDocCommentsTokenType() { myFixture.configureByFile(collapseExpandDocCommentsTokenTypeFile()); CodeFoldingManager.getInstance(myFixture.getProject()).buildInitialFoldings(myFixture.getEditor()); checkCollapseExpand(true); checkCollapseExpand(false); } protected String collapseExpandDocCommentsTokenTypeFile() { return getTestDataPath() + "/folding/" + getTestName(true) + ".py"; } private void checkCollapseExpand(boolean doExpand) { final String initial = doExpand ? "CollapseAllRegions" : "ExpandAllRegions"; final String action = doExpand ? "ExpandDocComments" : "CollapseDocComments"; final String logAction = doExpand ? "collapsed: " : "expanded: "; myFixture.performEditorAction(initial); myFixture.performEditorAction(action); final Editor editor = myFixture.getEditor(); for (FoldRegion region : editor.getFoldingModel().getAllFoldRegions()) { PsiElement element = EditorFoldingInfo.get(editor).getPsiElement(region); if (element instanceof PyStringLiteralExpression && ((PyStringLiteralExpression)element).isDocString()) { assertEquals(logAction + element.getText(), doExpand, region.isExpanded()); } else { assertEquals("not " + logAction + element.getText(), doExpand, !region.isExpanded()); } } } }
python/testSrc/com/jetbrains/python/PyFoldingTest.java
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python; import com.intellij.codeInsight.folding.impl.EditorFoldingInfo; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.FoldRegion; import com.intellij.psi.PsiElement; import com.jetbrains.python.fixtures.PyTestCase; import com.jetbrains.python.psi.PyStringLiteralExpression; /** * @author yole */ public class PyFoldingTest extends PyTestCase { protected void doTest() { myFixture.testFolding(getTestDataPath() + "/folding/" + getTestName(true) + ".py"); } public void testClassTrailingSpace() { // PY-2544 doTest(); } public void testDocString() { doTest(); } public void testCustomFolding() { doTest(); } public void testImportBlock() { doTest(); } public void testBlocksFolding() { doTest(); } public void testLongStringsFolding() { doTest(); } public void testCollectionsFolding() { doTest(); } public void testMultilineComments() { doTest(); } public void testNestedFolding() { doTest(); } //PY-18928 public void testCustomFoldingWithComments() { doTest(); } // PY-17017 public void testCustomFoldingAtBlockEnd() { doTest(); } // PY-31154 public void testEmptyStatementListHasNoFolding() { doTest(); } public void testCollapseExpandDocCommentsTokenType() { myFixture.configureByFile(collapseExpandDocCommentsTokenTypeFile()); checkCollapseExpand(true); checkCollapseExpand(false); } protected String collapseExpandDocCommentsTokenTypeFile() { return getTestDataPath() + "/folding/" + getTestName(true) + ".py"; } private void checkCollapseExpand(boolean doExpand) { final String initial = doExpand ? "CollapseAllRegions" : "ExpandAllRegions"; final String action = doExpand ? "ExpandDocComments" : "CollapseDocComments"; final String logAction = doExpand ? "collapsed: " : "expanded: "; myFixture.performEditorAction(initial); myFixture.performEditorAction(action); final Editor editor = myFixture.getEditor(); for (FoldRegion region : editor.getFoldingModel().getAllFoldRegions()) { PsiElement element = EditorFoldingInfo.get(editor).getPsiElement(region); if (element instanceof PyStringLiteralExpression && ((PyStringLiteralExpression)element).isDocString()) { assertEquals(logAction + element.getText(), doExpand, region.isExpanded()); } else { assertEquals("not " + logAction + element.getText(), doExpand, !region.isExpanded()); } } } }
don't commit the document and update fold regions synchronously in collapse/expand folding actions This doesn't seem to be necessary - the actions can work just fine with current state of fold regions in editor. (inspired by KT-38329) fix test GitOrigin-RevId: 5053828a12d8299546a9836721308dc9d38305f3
python/testSrc/com/jetbrains/python/PyFoldingTest.java
don't commit the document and update fold regions synchronously in collapse/expand folding actions
Java
apache-2.0
749df574f9e6fb1cfd98c1d02785848dd321e5d3
0
real-logic/Aeron,real-logic/Aeron,real-logic/Aeron,mikeb01/Aeron,EvilMcJerkface/Aeron,mikeb01/Aeron,real-logic/Aeron,EvilMcJerkface/Aeron,mikeb01/Aeron,mikeb01/Aeron,EvilMcJerkface/Aeron,EvilMcJerkface/Aeron
/* * Copyright 2014-2019 Real Logic Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.aeron.archive; import io.aeron.*; import io.aeron.archive.client.AeronArchive; import io.aeron.archive.client.ArchiveException; import io.aeron.archive.client.ControlResponsePoller; import io.aeron.archive.client.RecordingDescriptorConsumer; import io.aeron.archive.codecs.ControlResponseCode; import io.aeron.archive.codecs.RecordingSignal; import io.aeron.archive.codecs.SourceLocation; import io.aeron.exceptions.TimeoutException; import io.aeron.logbuffer.LogBufferDescriptor; import org.agrona.CloseHelper; import org.agrona.concurrent.EpochClock; import java.util.concurrent.TimeUnit; import static io.aeron.Aeron.NULL_VALUE; import static io.aeron.archive.client.AeronArchive.NULL_POSITION; import static io.aeron.archive.codecs.RecordingSignal.*; class ReplicationSession implements Session, RecordingDescriptorConsumer { private static final int LIVE_ADD_THRESHOLD = LogBufferDescriptor.TERM_MIN_LENGTH >> 2; private static final int REPLAY_REMOVE_THRESHOLD = 0; private static final int RETRY_ATTEMPTS = 3; enum State { CONNECT, REPLICATE_DESCRIPTOR, SRC_RECORDING_POSITION, REPLAY, EXTEND, AWAIT_IMAGE, REPLICATE, CATCHUP, ATTEMPT_LIVE_JOIN, DONE } private long activeCorrelationId = NULL_VALUE; private long srcReplaySessionId = NULL_VALUE; private long replayPosition = NULL_POSITION; private long srcStopPosition = NULL_POSITION; private long srcRecordingPosition = NULL_POSITION; private long timeOfLastActionMs; private final long actionTimeoutMs; private final long correlationId; private final long replicationId; private final long srcRecordingId; private long dstRecordingId; private int replayStreamId; private int replaySessionId; private int retryAttempts = RETRY_ATTEMPTS; private boolean isLiveAdded; private final String replicationChannel; private final String liveDestination; private String replayDestination; private final EpochClock epochClock; private final ArchiveConductor conductor; private final ControlSession controlSession; private final ControlResponseProxy controlResponseProxy; private final Catalog catalog; private final Aeron aeron; private final AeronArchive.Context context; private AeronArchive.AsyncConnect asyncConnect; private AeronArchive srcArchive; private Subscription recordingSubscription; private Image image; private State state = State.CONNECT; ReplicationSession( final long correlationId, final long srcRecordingId, final long dstRecordingId, final long replicationId, final String liveDestination, final String replicationChannel, final RecordingSummary recordingSummary, final AeronArchive.Context context, final EpochClock epochClock, final Catalog catalog, final ControlResponseProxy controlResponseProxy, final ControlSession controlSession) { this.correlationId = correlationId; this.replicationId = replicationId; this.srcRecordingId = srcRecordingId; this.dstRecordingId = dstRecordingId; this.liveDestination = "".equals(liveDestination) ? null : liveDestination; this.replicationChannel = replicationChannel; this.aeron = context.aeron(); this.context = context; this.catalog = catalog; this.controlResponseProxy = controlResponseProxy; this.epochClock = epochClock; this.conductor = controlSession.archiveConductor(); this.controlSession = controlSession; this.actionTimeoutMs = TimeUnit.NANOSECONDS.toMillis(context.messageTimeoutNs()); if (null != recordingSummary) { replayPosition = recordingSummary.stopPosition; replayStreamId = recordingSummary.streamId; } } public long sessionId() { return replicationId; } public boolean isDone() { return state == State.DONE; } public void abort() { this.state(State.DONE); } public void close() { stopRecording(); stopReplaySession(); CloseHelper.close(asyncConnect); CloseHelper.close(srcArchive); controlSession.archiveConductor().removeReplicationSession(this); } public int doWork() { int workCount = 0; try { if (null != recordingSubscription && recordingSubscription.isClosed()) { state(State.DONE); return 1; } switch (state) { case CONNECT: workCount += connect(); break; case REPLICATE_DESCRIPTOR: workCount += replicateDescriptor(); break; case SRC_RECORDING_POSITION: workCount += srcRecordingPosition(); break; case REPLAY: workCount += replay(); break; case EXTEND: workCount += extend(); break; case AWAIT_IMAGE: workCount += awaitImage(); break; case REPLICATE: workCount += replicate(); break; case CATCHUP: workCount += catchup(); break; case ATTEMPT_LIVE_JOIN: workCount += attemptLiveJoin(); break; } } catch (final Throwable ex) { controlSession.sendErrorResponse(correlationId, ex.getMessage(), controlResponseProxy); state(State.DONE); error(ex); throw ex; } return workCount; } public void onRecordingDescriptor( final long controlSessionId, final long correlationId, final long recordingId, final long startTimestamp, final long stopTimestamp, final long startPosition, final long stopPosition, final int initialTermId, final int segmentFileLength, final int termBufferLength, final int mtuLength, final int sessionId, final int streamId, final String strippedChannel, final String originalChannel, final String sourceIdentity) { srcStopPosition = stopPosition; replayStreamId = streamId; replaySessionId = sessionId; if (NULL_VALUE == dstRecordingId) { replayPosition = startPosition; dstRecordingId = catalog.addNewRecording( startPosition, startPosition, startTimestamp, startTimestamp, initialTermId, segmentFileLength, termBufferLength, mtuLength, sessionId, streamId, strippedChannel, originalChannel, sourceIdentity); signal(startPosition, REPLICATE); } State nextState = State.REPLAY; if (null != liveDestination) { if (NULL_POSITION != stopPosition) { state(State.DONE); final ArchiveException ex = new ArchiveException("cannot live merge without active source recording"); error(ex); throw ex; } nextState = State.SRC_RECORDING_POSITION; } if (startPosition == stopPosition) { signal(stopPosition, SYNC); nextState = State.DONE; } state(nextState); } private int connect() { int workCount = 0; if (null == asyncConnect) { asyncConnect = AeronArchive.asyncConnect(context); workCount += 1; } else { final int step = asyncConnect.step(); final AeronArchive archive = asyncConnect.poll(); if (null == archive) { if (asyncConnect.step() != step) { workCount += 1; } } else { srcArchive = archive; asyncConnect = null; state(State.REPLICATE_DESCRIPTOR); workCount += 1; } } return workCount; } private int replicateDescriptor() { int workCount = 0; if (NULL_VALUE == activeCorrelationId) { final long correlationId = aeron.nextCorrelationId(); if (srcArchive.archiveProxy().listRecording(srcRecordingId, correlationId, srcArchive.controlSessionId())) { workCount += trackAction(correlationId); srcArchive.recordingDescriptorPoller().reset(correlationId, 1, this); } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed to list remote recording descriptor"); } } else { final int fragments = srcArchive.recordingDescriptorPoller().poll(); if (0 == fragments && epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed to fetch remote recording descriptor"); } workCount += fragments; } return workCount; } private int srcRecordingPosition() { int workCount = 0; if (NULL_VALUE == activeCorrelationId) { final long correlationId = aeron.nextCorrelationId(); if (srcArchive.archiveProxy().getRecordingPosition( srcRecordingId, correlationId, srcArchive.controlSessionId())) { workCount += trackAction(correlationId); } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed to send recording position request"); } } else { final ControlResponsePoller poller = srcArchive.controlResponsePoller(); workCount += poller.poll(); if (hasResponse(poller)) { srcRecordingPosition = poller.relevantId(); if (NULL_POSITION == srcRecordingPosition) { if (null != liveDestination) { throw new ArchiveException("cannot live merge without active source recording"); } } state(State.REPLAY); } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed to get recording position"); } } return workCount; } private int replay() { int workCount = 0; if (NULL_VALUE == activeCorrelationId) { final long correlationId = aeron.nextCorrelationId(); if (srcArchive.archiveProxy().replay( srcRecordingId, replayPosition, Long.MAX_VALUE, null == liveDestination ? replicationChannel : replicationChannel + "|session-id=" + replaySessionId, replayStreamId, correlationId, srcArchive.controlSessionId())) { workCount += trackAction(correlationId); } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed to send replay request"); } } else { final ControlResponsePoller poller = srcArchive.controlResponsePoller(); workCount += poller.poll(); if (hasResponse(poller)) { srcReplaySessionId = poller.relevantId(); state(State.EXTEND); } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed get acknowledgement of replay request to: " + replicationChannel); } } return workCount; } private int extend() { final ChannelUri channelUri = ChannelUri.parse(replicationChannel); final ChannelUriStringBuilder builder = new ChannelUriStringBuilder(); final String channel = builder .media(channelUri) .alias(channelUri) .tags(replicationId + "," + replicationId) .controlMode(CommonContext.MDC_CONTROL_MODE_MANUAL) .rejoin(false) .sessionId((int)srcReplaySessionId) .build(); recordingSubscription = conductor.extendRecording( replicationId, dstRecordingId, replayStreamId, SourceLocation.REMOTE, channel, controlSession); if (null == recordingSubscription) { state(State.DONE); } else { replayDestination = builder.clear().media(channelUri).endpoint(channelUri).build(); recordingSubscription.asyncAddDestination(replayDestination); state(State.AWAIT_IMAGE); } return 1; } private int awaitImage() { int workCount = 0; image = recordingSubscription.imageBySessionId((int)srcReplaySessionId); if (null != image) { state(null == liveDestination ? State.REPLICATE : State.CATCHUP); workCount += 1; } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed get replay image"); } return workCount; } private int replicate() { int workCount = 0; final long position = image.position(); if (position == srcStopPosition || image.isClosed()) { if (position == srcStopPosition || (NULL_VALUE == srcStopPosition && image.isEndOfStream())) { srcReplaySessionId = NULL_VALUE; signal(position, SYNC); stopRecording(); } state(State.DONE); workCount += 1; } return workCount; } private int catchup() { int workCount = 0; if (image.isClosed()) { throw new ArchiveException("replication image closed unexpectedly"); } if (image.position() >= srcRecordingPosition) { state(State.ATTEMPT_LIVE_JOIN); workCount += 1; } return workCount; } private int attemptLiveJoin() { int workCount = 0; if (image.isClosed()) { throw new ArchiveException("replication image closed unexpectedly"); } if (NULL_VALUE == activeCorrelationId) { final long correlationId = aeron.nextCorrelationId(); if (srcArchive.archiveProxy().getRecordingPosition( srcRecordingId, correlationId, srcArchive.controlSessionId())) { workCount += trackAction(correlationId); } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed to send recording position request"); } } else { final ControlResponsePoller poller = srcArchive.controlResponsePoller(); workCount += poller.poll(); if (hasResponse(poller)) { trackAction(NULL_VALUE); retryAttempts = RETRY_ATTEMPTS; srcRecordingPosition = poller.relevantId(); if (NULL_POSITION == srcRecordingPosition) { if (null != liveDestination) { throw new ArchiveException("cannot live merge without active source recording"); } } final long position = image.position(); if (shouldAddLiveDestination(position)) { recordingSubscription.asyncAddDestination(liveDestination); isLiveAdded = true; } else if (shouldStopReplay(position)) { recordingSubscription.asyncRemoveDestination(replayDestination); recordingSubscription = null; signal(position, MERGE); state(State.DONE); } workCount += 1; } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { if (--retryAttempts == 0) { throw new TimeoutException("failed to get recording position"); } trackAction(NULL_VALUE); } } return workCount; } private boolean hasResponse(final ControlResponsePoller poller) { if (poller.isPollComplete() && poller.controlSessionId() == srcArchive.controlSessionId()) { final ControlResponseCode code = poller.code(); if (ControlResponseCode.ERROR == code) { throw new ArchiveException(poller.errorMessage(), code.value()); } return poller.correlationId() == activeCorrelationId && ControlResponseCode.OK == code; } return false; } private void error(final Throwable ex) { if (!controlSession.controlPublication().isConnected()) { controlSession.sendErrorResponse(correlationId, ex.getMessage(), controlResponseProxy); } } private void signal(final long position, final RecordingSignal recordingSignal) { final long subscriptionId = null != recordingSubscription ? recordingSubscription.registrationId() : NULL_VALUE; controlSession.attemptSendSignal(replicationId, dstRecordingId, subscriptionId, position, recordingSignal); } private void stopReplaySession() { if (NULL_VALUE != srcReplaySessionId) { final long correlationId = aeron.nextCorrelationId(); srcArchive.archiveProxy().stopReplay(srcReplaySessionId, correlationId, srcArchive.controlSessionId()); srcReplaySessionId = NULL_VALUE; } } private void stopRecording() { if (null != recordingSubscription) { conductor.removeRecordingSubscription(recordingSubscription.registrationId()); recordingSubscription.close(); recordingSubscription = null; } } private boolean shouldAddLiveDestination(final long position) { return !isLiveAdded && (srcRecordingPosition - position) <= LIVE_ADD_THRESHOLD; } private boolean shouldStopReplay(final long position) { return isLiveAdded && (srcRecordingPosition - position) <= REPLAY_REMOVE_THRESHOLD && image.activeTransportCount() >= 2; } private int trackAction(final long correlationId) { timeOfLastActionMs = epochClock.time(); activeCorrelationId = correlationId; return 1; } private void state(final State newState) { timeOfLastActionMs = epochClock.time(); //System.out.println(timeOfLastActionMs + ": " + state + " -> " + newState); state = newState; activeCorrelationId = NULL_VALUE; } }
aeron-archive/src/main/java/io/aeron/archive/ReplicationSession.java
/* * Copyright 2014-2019 Real Logic Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.aeron.archive; import io.aeron.*; import io.aeron.archive.client.AeronArchive; import io.aeron.archive.client.ArchiveException; import io.aeron.archive.client.ControlResponsePoller; import io.aeron.archive.client.RecordingDescriptorConsumer; import io.aeron.archive.codecs.ControlResponseCode; import io.aeron.archive.codecs.RecordingSignal; import io.aeron.archive.codecs.SourceLocation; import io.aeron.exceptions.TimeoutException; import io.aeron.logbuffer.LogBufferDescriptor; import org.agrona.CloseHelper; import org.agrona.concurrent.EpochClock; import java.util.concurrent.TimeUnit; import static io.aeron.Aeron.NULL_VALUE; import static io.aeron.archive.client.AeronArchive.NULL_POSITION; import static io.aeron.archive.codecs.RecordingSignal.*; class ReplicationSession implements Session, RecordingDescriptorConsumer { private static final int LIVE_ADD_THRESHOLD = LogBufferDescriptor.TERM_MIN_LENGTH >> 2; private static final int REPLAY_REMOVE_THRESHOLD = 0; private static final int RETRY_ATTEMPTS = 3; private static final String REPLICATION_ALIAS = "replication:"; enum State { CONNECT, REPLICATE_DESCRIPTOR, SRC_RECORDING_POSITION, REPLAY, EXTEND, AWAIT_IMAGE, REPLICATE, CATCHUP, ATTEMPT_LIVE_JOIN, DONE } private long activeCorrelationId = NULL_VALUE; private long srcReplaySessionId = NULL_VALUE; private long replayPosition = NULL_POSITION; private long srcStopPosition = NULL_POSITION; private long srcRecordingPosition = NULL_POSITION; private long timeOfLastActionMs; private final long actionTimeoutMs; private final long correlationId; private final long replicationId; private final long srcRecordingId; private long dstRecordingId; private int replayStreamId; private int replaySessionId; private int retryAttempts = RETRY_ATTEMPTS; private boolean isLiveAdded; private final String replicationChannel; private final String liveDestination; private String replayDestination; private final EpochClock epochClock; private final ArchiveConductor conductor; private final ControlSession controlSession; private final ControlResponseProxy controlResponseProxy; private final Catalog catalog; private final Aeron aeron; private final AeronArchive.Context context; private AeronArchive.AsyncConnect asyncConnect; private AeronArchive srcArchive; private Subscription recordingSubscription; private Image image; private State state = State.CONNECT; ReplicationSession( final long correlationId, final long srcRecordingId, final long dstRecordingId, final long replicationId, final String liveDestination, final String replicationChannel, final RecordingSummary recordingSummary, final AeronArchive.Context context, final EpochClock epochClock, final Catalog catalog, final ControlResponseProxy controlResponseProxy, final ControlSession controlSession) { this.correlationId = correlationId; this.replicationId = replicationId; this.srcRecordingId = srcRecordingId; this.dstRecordingId = dstRecordingId; this.liveDestination = "".equals(liveDestination) ? null : liveDestination; this.replicationChannel = replicationChannel; this.aeron = context.aeron(); this.context = context; this.catalog = catalog; this.controlResponseProxy = controlResponseProxy; this.epochClock = epochClock; this.conductor = controlSession.archiveConductor(); this.controlSession = controlSession; this.actionTimeoutMs = TimeUnit.NANOSECONDS.toMillis(context.messageTimeoutNs()); if (null != recordingSummary) { replayPosition = recordingSummary.stopPosition; replayStreamId = recordingSummary.streamId; } } public long sessionId() { return replicationId; } public boolean isDone() { return state == State.DONE; } public void abort() { this.state(State.DONE); } public void close() { stopRecording(); stopReplaySession(); CloseHelper.close(asyncConnect); CloseHelper.close(srcArchive); controlSession.archiveConductor().removeReplicationSession(this); } public int doWork() { int workCount = 0; try { if (null != recordingSubscription && recordingSubscription.isClosed()) { state(State.DONE); return 1; } switch (state) { case CONNECT: workCount += connect(); break; case REPLICATE_DESCRIPTOR: workCount += replicateDescriptor(); break; case SRC_RECORDING_POSITION: workCount += srcRecordingPosition(); break; case REPLAY: workCount += replay(); break; case EXTEND: workCount += extend(); break; case AWAIT_IMAGE: workCount += awaitImage(); break; case REPLICATE: workCount += replicate(); break; case CATCHUP: workCount += catchup(); break; case ATTEMPT_LIVE_JOIN: workCount += attemptLiveJoin(); break; } } catch (final Throwable ex) { controlSession.sendErrorResponse(correlationId, ex.getMessage(), controlResponseProxy); state(State.DONE); error(ex); throw ex; } return workCount; } public void onRecordingDescriptor( final long controlSessionId, final long correlationId, final long recordingId, final long startTimestamp, final long stopTimestamp, final long startPosition, final long stopPosition, final int initialTermId, final int segmentFileLength, final int termBufferLength, final int mtuLength, final int sessionId, final int streamId, final String strippedChannel, final String originalChannel, final String sourceIdentity) { srcStopPosition = stopPosition; replayStreamId = streamId; replaySessionId = sessionId; if (NULL_VALUE == dstRecordingId) { replayPosition = startPosition; dstRecordingId = catalog.addNewRecording( startPosition, startPosition, startTimestamp, startTimestamp, initialTermId, segmentFileLength, termBufferLength, mtuLength, sessionId, streamId, strippedChannel, originalChannel, sourceIdentity); signal(startPosition, REPLICATE); } State nextState = State.REPLAY; if (null != liveDestination) { if (NULL_POSITION != stopPosition) { state(State.DONE); final ArchiveException ex = new ArchiveException("cannot live merge without active source recording"); error(ex); throw ex; } nextState = State.SRC_RECORDING_POSITION; } if (startPosition == stopPosition) { signal(stopPosition, SYNC); nextState = State.DONE; } state(nextState); } private int connect() { int workCount = 0; if (null == asyncConnect) { asyncConnect = AeronArchive.asyncConnect(context); workCount += 1; } else { final int step = asyncConnect.step(); final AeronArchive archive = asyncConnect.poll(); if (null == archive) { if (asyncConnect.step() != step) { workCount += 1; } } else { srcArchive = archive; asyncConnect = null; state(State.REPLICATE_DESCRIPTOR); workCount += 1; } } return workCount; } private int replicateDescriptor() { int workCount = 0; if (NULL_VALUE == activeCorrelationId) { final long correlationId = aeron.nextCorrelationId(); if (srcArchive.archiveProxy().listRecording(srcRecordingId, correlationId, srcArchive.controlSessionId())) { workCount += trackAction(correlationId); srcArchive.recordingDescriptorPoller().reset(correlationId, 1, this); } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed to list remote recording descriptor"); } } else { final int fragments = srcArchive.recordingDescriptorPoller().poll(); if (0 == fragments && epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed to fetch remote recording descriptor"); } workCount += fragments; } return workCount; } private int srcRecordingPosition() { int workCount = 0; if (NULL_VALUE == activeCorrelationId) { final long correlationId = aeron.nextCorrelationId(); if (srcArchive.archiveProxy().getRecordingPosition( srcRecordingId, correlationId, srcArchive.controlSessionId())) { workCount += trackAction(correlationId); } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed to send recording position request"); } } else { final ControlResponsePoller poller = srcArchive.controlResponsePoller(); workCount += poller.poll(); if (hasResponse(poller)) { srcRecordingPosition = poller.relevantId(); if (NULL_POSITION == srcRecordingPosition) { if (null != liveDestination) { throw new ArchiveException("cannot live merge without active source recording"); } } state(State.REPLAY); } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed to get recording position"); } } return workCount; } private int replay() { int workCount = 0; if (NULL_VALUE == activeCorrelationId) { final long correlationId = aeron.nextCorrelationId(); if (srcArchive.archiveProxy().replay( srcRecordingId, replayPosition, Long.MAX_VALUE, null == liveDestination ? replicationChannel : replicationChannel + "|session-id=" + replaySessionId, replayStreamId, correlationId, srcArchive.controlSessionId())) { workCount += trackAction(correlationId); } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed to send replay request"); } } else { final ControlResponsePoller poller = srcArchive.controlResponsePoller(); workCount += poller.poll(); if (hasResponse(poller)) { srcReplaySessionId = poller.relevantId(); state(State.EXTEND); } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed get acknowledgement of replay request to: " + replicationChannel); } } return workCount; } private int extend() { final ChannelUri channelUri = ChannelUri.parse(replicationChannel); final ChannelUriStringBuilder builder = new ChannelUriStringBuilder(); final String tags = aeron.nextCorrelationId() + "," + replicationId; final String channel = builder .media(channelUri) .alias(REPLICATION_ALIAS + replicationId) .tags(tags) .controlMode(CommonContext.MDC_CONTROL_MODE_MANUAL) .rejoin(false) .sessionId((int)srcReplaySessionId) .build(); recordingSubscription = conductor.extendRecording( replicationId, dstRecordingId, replayStreamId, SourceLocation.REMOTE, channel, controlSession); if (null == recordingSubscription) { state(State.DONE); } else { replayDestination = builder.clear().media(channelUri).endpoint(channelUri).build(); recordingSubscription.asyncAddDestination(replayDestination); state(State.AWAIT_IMAGE); } return 1; } private int awaitImage() { int workCount = 0; image = recordingSubscription.imageBySessionId((int)srcReplaySessionId); if (null != image) { state(null == liveDestination ? State.REPLICATE : State.CATCHUP); workCount += 1; } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed get replay image"); } return workCount; } private int replicate() { int workCount = 0; final long position = image.position(); if (position == srcStopPosition || image.isClosed()) { if (position == srcStopPosition || (NULL_VALUE == srcStopPosition && image.isEndOfStream())) { srcReplaySessionId = NULL_VALUE; signal(position, SYNC); stopRecording(); } state(State.DONE); workCount += 1; } return workCount; } private int catchup() { int workCount = 0; if (image.isClosed()) { throw new ArchiveException("replication image closed unexpectedly"); } if (image.position() >= srcRecordingPosition) { state(State.ATTEMPT_LIVE_JOIN); workCount += 1; } return workCount; } private int attemptLiveJoin() { int workCount = 0; if (image.isClosed()) { throw new ArchiveException("replication image closed unexpectedly"); } if (NULL_VALUE == activeCorrelationId) { final long correlationId = aeron.nextCorrelationId(); if (srcArchive.archiveProxy().getRecordingPosition( srcRecordingId, correlationId, srcArchive.controlSessionId())) { workCount += trackAction(correlationId); } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { throw new TimeoutException("failed to send recording position request"); } } else { final ControlResponsePoller poller = srcArchive.controlResponsePoller(); workCount += poller.poll(); if (hasResponse(poller)) { trackAction(NULL_VALUE); retryAttempts = RETRY_ATTEMPTS; srcRecordingPosition = poller.relevantId(); if (NULL_POSITION == srcRecordingPosition) { if (null != liveDestination) { throw new ArchiveException("cannot live merge without active source recording"); } } final long position = image.position(); if (shouldAddLiveDestination(position)) { recordingSubscription.asyncAddDestination(liveDestination); isLiveAdded = true; } else if (shouldStopReplay(position)) { recordingSubscription.asyncRemoveDestination(replayDestination); recordingSubscription = null; signal(position, MERGE); state(State.DONE); } workCount += 1; } else if (epochClock.time() >= (timeOfLastActionMs + actionTimeoutMs)) { if (--retryAttempts == 0) { throw new TimeoutException("failed to get recording position"); } trackAction(NULL_VALUE); } } return workCount; } private boolean hasResponse(final ControlResponsePoller poller) { if (poller.isPollComplete() && poller.controlSessionId() == srcArchive.controlSessionId()) { final ControlResponseCode code = poller.code(); if (ControlResponseCode.ERROR == code) { throw new ArchiveException(poller.errorMessage(), code.value()); } return poller.correlationId() == activeCorrelationId && ControlResponseCode.OK == code; } return false; } private void error(final Throwable ex) { if (!controlSession.controlPublication().isConnected()) { controlSession.sendErrorResponse(correlationId, ex.getMessage(), controlResponseProxy); } } private void signal(final long position, final RecordingSignal recordingSignal) { final long subscriptionId = null != recordingSubscription ? recordingSubscription.registrationId() : NULL_VALUE; controlSession.attemptSendSignal(replicationId, dstRecordingId, subscriptionId, position, recordingSignal); } private void stopReplaySession() { if (NULL_VALUE != srcReplaySessionId) { final long correlationId = aeron.nextCorrelationId(); srcArchive.archiveProxy().stopReplay(srcReplaySessionId, correlationId, srcArchive.controlSessionId()); srcReplaySessionId = NULL_VALUE; } } private void stopRecording() { if (null != recordingSubscription) { conductor.removeRecordingSubscription(recordingSubscription.registrationId()); recordingSubscription.close(); recordingSubscription = null; } } private boolean shouldAddLiveDestination(final long position) { return !isLiveAdded && (srcRecordingPosition - position) <= LIVE_ADD_THRESHOLD; } private boolean shouldStopReplay(final long position) { return isLiveAdded && (srcRecordingPosition - position) <= REPLAY_REMOVE_THRESHOLD && image.activeTransportCount() >= 2; } private int trackAction(final long correlationId) { timeOfLastActionMs = epochClock.time(); activeCorrelationId = correlationId; return 1; } private void state(final State newState) { timeOfLastActionMs = epochClock.time(); //System.out.println(timeOfLastActionMs + ": " + state + " -> " + newState); state = newState; activeCorrelationId = NULL_VALUE; } }
[Java] Use replicationId to tag replication channel and allow users to set their own alias.
aeron-archive/src/main/java/io/aeron/archive/ReplicationSession.java
[Java] Use replicationId to tag replication channel and allow users to set their own alias.
Java
apache-2.0
7842fb47f1ded645b9b5ddf8e7a84cb136261a9e
0
tsurdilo/jbpm-designer,jomarko/jbpm-designer,jomarko/jbpm-designer,tsurdilo/jbpm-designer,manstis/jbpm-designer,porcelli-forks/jbpm-designer,manstis/jbpm-designer,jomarko/jbpm-designer,jomarko/jbpm-designer,jhrcek/jbpm-designer,droolsjbpm/jbpm-designer,jhrcek/jbpm-designer,porcelli-forks/jbpm-designer,droolsjbpm/jbpm-designer,porcelli-forks/jbpm-designer,droolsjbpm/jbpm-designer,tsurdilo/jbpm-designer,manstis/jbpm-designer,manstis/jbpm-designer,porcelli-forks/jbpm-designer,droolsjbpm/jbpm-designer,jhrcek/jbpm-designer,jhrcek/jbpm-designer,tsurdilo/jbpm-designer
/* * Copyright 2010 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.designer.bpmn2.impl; import static junit.framework.Assert.*; import static org.junit.Assert.assertFalse; import java.io.File; import java.net.URL; import java.util.Collections; import java.util.List; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonParser; import org.eclipse.bpmn2.*; import org.eclipse.bpmn2.Process; import org.eclipse.emf.ecore.util.FeatureMap; import org.jboss.drools.DroolsPackage; import org.jboss.drools.MetaDataType; import org.junit.Test; /** * @author Antoine Toulme * * A series of tests to check the unmarshalling of json to bpmn2. */ public class Bpmn2UnmarshallingTest { private static File getTestJsonFile(String filename) { URL fileURL = Bpmn2UnmarshallingTest.class.getResource(filename); return new File(fileURL.getFile()); } @Test public void testSimpleDefinitionsUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("empty.json"), "").getContents().get(0)); assertEquals("<![CDATA[my doc]]>", definitions.getRootElements().get(0).getDocumentation().iterator().next().getText()); assertEquals("http://www.w3.org/1999/XPath", definitions.getExpressionLanguage()); assertEquals("http://www.omg.org/bpmn20", definitions.getTargetNamespace()); assertEquals("http://www.w3.org/2001/XMLSchema", definitions.getTypeLanguage()); assertTrue(definitions.getRootElements().size() == 1); definitions.eResource().save(System.out, Collections.emptyMap()); } //@Test // removing until we start supporting global tasks public void testSimpleGlobalTaskUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("oneTask.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); assertTrue(definitions.getRootElements().iterator().next() instanceof GlobalTask); GlobalTask task = (GlobalTask) definitions.getRootElements().iterator().next(); assertEquals("oneTask", task.getName()); assertEquals("my task doc", task.getDocumentation().iterator().next().getText()); definitions.eResource().save(System.out, Collections.emptyMap()); } //@Test // removing until we start supporting global tasks public void testTwoGlobalTasksUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("twoTask.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); assertTrue(definitions.getRootElements().get(0) instanceof GlobalTask); GlobalTask task = (GlobalTask) definitions.getRootElements().get(0); assertEquals("firstTask", task.getName()); assertEquals("my task doc", task.getDocumentation().iterator().next().getText()); GlobalTask task2 = (GlobalTask) definitions.getRootElements().get(1); assertEquals("secondTask", task2.getName()); assertEquals("my task doc too", task2.getDocumentation().iterator().next().getText()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testPoolUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("pool.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); assertTrue(definitions.getRootElements().get(0) instanceof Process); Process process = getRootProcess(definitions); assertEquals("pool", process.getName()); assertEquals(ProcessType.PRIVATE, process.getProcessType()); assertTrue(process.isIsClosed()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testLaneUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("pool.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); assertTrue(definitions.getRootElements().get(0) instanceof Process); Process process = getRootProcess(definitions); assertTrue(process.getLaneSets().size() == 1); assertTrue(process.getLaneSets().get(0).getLanes().size() == 1); Lane l = process.getLaneSets().get(0).getLanes().get(0); assertEquals("my first lane", l.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testSequenceFlowUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("sequenceFlow.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); assertTrue(process.getFlowElements().get(0) instanceof Task); Task task = (Task) process.getFlowElements().get(0); assertEquals("task1", task.getName()); Task task2 = (Task) process.getFlowElements().get(1); assertEquals("task2", task2.getName()); SequenceFlow flow = (SequenceFlow) process.getFlowElements().get(2); assertEquals("seqFlow", flow.getName()); assertEquals(task, flow.getSourceRef()); assertEquals(task2, flow.getTargetRef()); definitions.eResource().save(System.out, Collections.emptyMap()); } //@Test // removing until we start supporting global tasks public void testScriptTaskUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("scriptTask.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); GlobalScriptTask task = (GlobalScriptTask) definitions.getRootElements().get(0); assertEquals("my script", task.getName()); assertEquals("git status | grep modified | awk '{print $3}' | xargs echo | xargs git add", task.getScript()); assertEquals("bash", task.getScriptLanguage()); definitions.eResource().save(System.out, Collections.emptyMap()); } //@Test // removing until we start supporting global tasks public void testUserTaskUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("userTask.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); GlobalUserTask task = (GlobalUserTask) definitions.getRootElements().get(0); assertEquals("ask user", task.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } //@Test // removing until we start supporting global tasks public void testBusinessRuleTaskUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("businessRuleTask.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); GlobalBusinessRuleTask task = (GlobalBusinessRuleTask) definitions.getRootElements().get(0); assertEquals("call business rule", task.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } //@Test // removing until we start supporting global tasks public void testManualTaskUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("manualTask.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); GlobalManualTask task = (GlobalManualTask) definitions.getRootElements().get(0); assertEquals("pull a lever", task.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testGatewayUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("gateway.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ExclusiveGateway g = (ExclusiveGateway) process.getFlowElements().get(0); assertEquals("xor gateway", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testParallelGatewayUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("parallelGateway.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ParallelGateway g = (ParallelGateway) process.getFlowElements().get(0); assertEquals("parallel gateway", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEventBasedGatewayUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("eventBasedGateway.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); EventBasedGateway g = (EventBasedGateway) process.getFlowElements().get(0); assertEquals("event-based gateway", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testInclusiveGatewayUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("inclusiveGateway.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); InclusiveGateway g = (InclusiveGateway) process.getFlowElements().get(0); assertEquals("inclusive gateway", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartMessageEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startMessageEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 3); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start message event", g.getName()); assertTrue(g.getEventDefinitions().size() == 1); assertTrue(g.getEventDefinitions().iterator().next() instanceof MessageEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartEscalationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startEscalationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start escalation event", g.getName()); assertTrue(g.getEventDefinitions().size() == 1); assertTrue(g.getEventDefinitions().iterator().next() instanceof EscalationEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartCompensationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startCompensationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start compensation event", g.getName()); assertTrue(g.getEventDefinitions().size() == 1); assertTrue(g.getEventDefinitions().iterator().next() instanceof CompensateEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartMultipleEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startMultipleEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start multiple event", g.getName()); //TODO multiple event definitions ??? definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartParallelMultipleEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startParallelMultipleEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start parallel multiple event", g.getName()); //TODO multiple event definitions ??? definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartSignalEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startSignalEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start signal event", g.getName()); assertTrue(g.getEventDefinitions().size() == 1); assertTrue(g.getEventDefinitions().iterator().next() instanceof SignalEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartTimerEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startTimerEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start timer event", g.getName()); assertTrue(g.getEventDefinitions().size() == 1); assertTrue(g.getEventDefinitions().iterator().next() instanceof TimerEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testGroupUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("group.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); Group group = (Group) process.getArtifacts().iterator().next(); assertEquals("Group name is wrong.", group.getCategoryValueRef().getValue(), "group"); assertEquals(group.getDocumentation().get(0).getText(), "<![CDATA[group documentation]]>"); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testTextAnnotationUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("textAnnotation.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); assertTrue(process.getFlowElements().iterator().next() instanceof TextAnnotation); TextAnnotation ta = (TextAnnotation) process.getFlowElements().iterator().next(); assertEquals("text annotation", ta.getText()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testDataObjectUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("dataObject.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); assertTrue(process.getFlowElements().iterator().next() instanceof DataObject); DataObject da = (DataObject) process.getFlowElements().iterator().next(); assertEquals("data object", da.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndMessageEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endMessageEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 3); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end message event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof MessageEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndEscalationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endEscalationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end escalation event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof EscalationEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndErrorEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endErrorEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end error event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof ErrorEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndSignalEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endSignalEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end signal event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof SignalEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndTerminateEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endTerminateEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("terminate end event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof TerminateEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndMultipleEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endMultipleEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end multiple event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndCompensationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endCompensationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end compensation event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof CompensateEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testSimpleChainUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startEvent-task-endEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); assertTrue(process.getFlowElements().size() == 5); assertTrue(process.getLaneSets().size() == 1); assertTrue(process.getLaneSets().get(0).getLanes().size() == 1); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchMessageEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchMessageEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 3); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch message event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof MessageEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchTimerEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchTimerEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch timer event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof TimerEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchEscalationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchEscalationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch escalation event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof EscalationEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchConditionalEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchConditionalEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch conditional event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof ConditionalEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchLinkEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchLinkEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch link event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof LinkEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchErrorEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchErrorEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch error event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof ErrorEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchCancelEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchCancelEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch cancel event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof CancelEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchCompensationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchCompensationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch compensation event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof CompensateEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchMultipleEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchMultipleEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch multiple event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchParallelMultipleEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchParallelMultipleEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch parallel multiple event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowMessageEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowMessageEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 3); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw message event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof MessageEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowEscalationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowEscalationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw escalation event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof EscalationEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowLinkEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowLinkEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw link event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof LinkEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowCompensationUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowCompensationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw compensation event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof CompensateEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowSignalUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowSignalEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw signal event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof SignalEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowMultipleUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowMultipleEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw multiple event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testAssociationUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("association.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); Task g = (Task) process.getFlowElements().get(0); assertEquals("task", g.getName()); TextAnnotation textA = (TextAnnotation) process.getFlowElements().get(1); Association association = (Association) process.getArtifacts().get(0); assertEquals(g, association.getSourceRef()); assertEquals(textA, association.getTargetRef()); assertEquals(AssociationDirection.NONE, association.getAssociationDirection()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testAssociationUnidirectionalUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("associationOne.json"), "").getContents().get(0)); Process process = getRootProcess(definitions); Task g = (Task) process.getFlowElements().get(0); assertEquals("task", g.getName()); TextAnnotation textA = (TextAnnotation) process.getFlowElements().get(1); Association association = (Association) process.getArtifacts().get(0); assertEquals(g, association.getSourceRef()); assertEquals(textA, association.getTargetRef()); assertEquals(AssociationDirection.ONE, association.getAssociationDirection()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testAssociationBidirectionalUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("associationBoth.json"), "").getContents().get(0)); Process process = getRootProcess(definitions); Task g = (Task) process.getFlowElements().get(0); assertEquals("task", g.getName()); TextAnnotation textA = (TextAnnotation) process.getFlowElements().get(1); Association association = (Association) process.getArtifacts().get(0); assertEquals(g, association.getSourceRef()); assertEquals(textA, association.getTargetRef()); assertEquals(AssociationDirection.BOTH, association.getAssociationDirection()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testBoundaryEventMultiLineName() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("boundaryEventMultiLineName.json"), "").getContents().get(0)); Process process = getRootProcess(definitions); Boolean foundElementNameExtensionValue = false; BoundaryEvent event = (BoundaryEvent) process.getFlowElements().get(1); if(event.getExtensionValues() != null && event.getExtensionValues().size() > 0) { for(ExtensionAttributeValue extattrval : event.getExtensionValues()) { FeatureMap extensionElements = extattrval.getValue(); List<MetaDataType> metadataExtensions = (List<MetaDataType>) extensionElements .get(DroolsPackage.Literals.DOCUMENT_ROOT__META_DATA, true); assertNotNull(metadataExtensions); assertTrue(metadataExtensions.size() == 1); for(MetaDataType metaType : metadataExtensions) { if(metaType.getName()!= null && metaType.getName().equals("elementname") && metaType.getMetaValue() != null && metaType.getMetaValue().length() > 0) { assertNotNull(metaType.getMetaValue()); foundElementNameExtensionValue = true; } } } assertTrue(foundElementNameExtensionValue); } else { fail("Boundary event has no extension element"); } } @Test public void testFindContainerForBoundaryEvent() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); JsonParser parser = new JsonFactory().createJsonParser(getTestJsonFile("boundaryEventsContainers.json")); parser.nextToken(); Definitions definitions = ((Definitions) unmarshaller.unmarshallItem(parser, "")); unmarshaller.revisitCatchEvents(definitions); unmarshaller.revisitCatchEventsConvertToBoundary(definitions); Process process = getRootProcess(definitions); for(FlowElement element : process.getFlowElements()) { if (element instanceof BoundaryEvent) { BoundaryEvent be = (BoundaryEvent) element; if ("Timer1".equals(element.getName())) { SubProcess sp = (SubProcess) unmarshaller.findContainerForBoundaryEvent(process, be); assertEquals("Subprocess1", sp.getName()); } if ("Timer2".equals(element.getName())) { SubProcess sp = (SubProcess) unmarshaller.findContainerForBoundaryEvent(process, be); assertEquals("Subprocess2", sp.getName()); } if ("Timer3".equals(element.getName())) { Process sp = (Process) unmarshaller.findContainerForBoundaryEvent(process, be); assertEquals("DemoProcess", sp.getName()); } } } } @Test public void testCompensationThrowingEvent() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCompensationEventThrowing.json"), "").getContents().get(0)); Process process = getRootProcess(definitions); ThrowEvent compensationThrowEvent = (ThrowEvent) process.getFlowElements().get(2); assertEquals("Compensate", compensationThrowEvent.getName()); assertNotNull(compensationThrowEvent.getEventDefinitions()); assertEquals(1, compensationThrowEvent.getEventDefinitions().size()); EventDefinition ed = compensationThrowEvent.getEventDefinitions().get(0); assertTrue(ed instanceof CompensateEventDefinition); CompensateEventDefinition ced = (CompensateEventDefinition) ed; assertNotNull(ced.getActivityRef()); assertEquals("User Task", ced.getActivityRef().getName()); } @Test public void testRevisitBoundaryEventsPositions() throws Exception { final String SUBTIMER_NAME = "SubTimer"; final String SUBPROCESSMESSAGE_NAME = "SubProcessMessage"; final String OUTTIMER_NAME = "OutTimer"; final String DURING_INITIALIZATION = "during initialization"; final String AFTER_REVISION = "after revision"; List<String> initialBoundaryEventOutgointIds = null; List<String> finalBoundaryEventOutgointIds = null; Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); JsonParser parser = new JsonFactory().createJsonParser(getTestJsonFile("boundaryEvents.json")); parser.nextToken(); Definitions definitions = ((Definitions) unmarshaller.unmarshallItem(parser, "")); unmarshaller.revisitCatchEvents(definitions); unmarshaller.revisitCatchEventsConvertToBoundary(definitions); // Validate initial state for (RootElement root : definitions.getRootElements()) { if(!(root instanceof Process)) { continue; } Process process = (Process) root; assertThatElementPresent(true, DURING_INITIALIZATION, process, SUBTIMER_NAME); assertThatElementPresent(true, DURING_INITIALIZATION, process, SUBPROCESSMESSAGE_NAME); assertThatElementPresent(true, DURING_INITIALIZATION, process, OUTTIMER_NAME); for(FlowElement flow : ((Process) root).getFlowElements()) { if (SUBTIMER_NAME.equals(flow.getName())) { initialBoundaryEventOutgointIds = unmarshaller.getOutgoingFlowsMap().get(flow); } if ("Subprocess".equals(flow.getName())) { SubProcess subProcess = (SubProcess) flow; assertThatElementPresent(false, DURING_INITIALIZATION, subProcess, SUBTIMER_NAME); assertThatElementPresent(false, DURING_INITIALIZATION, subProcess, SUBPROCESSMESSAGE_NAME); assertThatElementPresent(false, DURING_INITIALIZATION, subProcess, OUTTIMER_NAME); } } } unmarshaller.revisitBoundaryEventsPositions(definitions); // Validate final state for (RootElement root : definitions.getRootElements()) { if(!(root instanceof Process)) { continue; } Process process = (Process) root; assertThatElementPresent(false, AFTER_REVISION, process, SUBTIMER_NAME); assertThatElementPresent(true, AFTER_REVISION, process, SUBPROCESSMESSAGE_NAME); assertThatElementPresent(true, AFTER_REVISION, process, OUTTIMER_NAME); for(FlowElement flow : ((Process) root).getFlowElements()) { if (!"Subprocess".equals(flow.getName())) { continue; } SubProcess subProcess = (SubProcess) flow; assertThatElementPresent(true, AFTER_REVISION, subProcess, SUBTIMER_NAME); assertThatElementPresent(false, AFTER_REVISION, subProcess, SUBPROCESSMESSAGE_NAME); assertThatElementPresent(false, AFTER_REVISION, subProcess, OUTTIMER_NAME); for (FlowElement subFlow : subProcess.getFlowElements()) { if (SUBTIMER_NAME.equals(subFlow.getName())) { finalBoundaryEventOutgointIds = unmarshaller.getOutgoingFlowsMap().get(subFlow); } } } } initialBoundaryEventOutgointIds.equals(finalBoundaryEventOutgointIds); // Test2 unmarshaller = new Bpmn2JsonUnmarshaller(); parser = new JsonFactory().createJsonParser(getTestJsonFile("boundaryEventsContainers.json")); parser.nextToken(); definitions = ((Definitions) unmarshaller.unmarshallItem(parser, "")); unmarshaller.revisitCatchEvents(definitions); unmarshaller.revisitCatchEventsConvertToBoundary(definitions); Process process = getRootProcess(definitions); assertThatElementPresent(true, "", process, "Timer3"); assertThatElementPresent(true, "", process, "Timer1"); assertThatElementPresent(true, "", process, "Timer2"); unmarshaller.revisitBoundaryEventsPositions(definitions); assertThatElementPresent(true, "", process, "Timer3"); assertThatElementPresent(false, "", process, "Timer1"); assertThatElementPresent(false, "", process, "Timer2"); for(FlowElement flow : process.getFlowElements()) { if ("Subprocess1".equals(flow.getName())) { assertThatElementPresent(true, "", (SubProcess) flow, "Timer1"); } if ("Subprocess2".equals(flow.getName())) { assertThatElementPresent(true, "", (SubProcess) flow, "Timer2"); } } } private void assertThatElementPresent(boolean expected, String when, FlowElementsContainer where, String which) { if (expected) { assertTrue(which + " NOT found in " + where.toString() + " " + when + " but EXPECTED", isContainerContainFlowElementByName(where, which) ); } else { assertFalse(which + " FOUND in " + where.toString() + " " + when + " but NOT expected", isContainerContainFlowElementByName(where, which) ); } } private boolean isContainerContainFlowElementByName(FlowElementsContainer container, String elementName) { for (FlowElement findingSubTimer : container.getFlowElements()) { if (elementName.equals(findingSubTimer.getName())) { return true; } } return false; } @Test public void testWorkItemHandlerNoParams() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("workItemHandlerNoParams.json"), "Email,HelloWorkItemHandler,Log,Rest,WebService").getContents().get(0)); assertTrue(definitions.getRootElements().size() >= 1); Process process = getRootProcess(definitions); assertTrue(process.getFlowElements().get(0) instanceof StartEvent); StartEvent startEvent = (StartEvent) process.getFlowElements().get(0); assertEquals("TheStart", startEvent.getName()); Task task = (Task) process.getFlowElements().get(1); assertEquals("HelloWorldService", task.getName()); SequenceFlow flow = (SequenceFlow) process.getFlowElements().get(2); assertEquals("flow1", flow.getName()); assertEquals(startEvent, flow.getSourceRef()); assertEquals(task, flow.getTargetRef()); } /* Disabling test as no support for child lanes yet @Test public void testDoubleLaneUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("doubleLane.json"), "").getContents().get(0)); Process process = getRootProcess(definitions); Task g = (Task) process.getFlowElements().get(0); assertEquals("task", g.getName()); assertTrue(process.getLaneSets().size() == 1); assertTrue(process.getLaneSets().get(0).getLanes().size() == 1); Lane firstLane = process.getLaneSets().get(0).getLanes().get(0); assertEquals("First lane", firstLane.getName()); Lane secondLane = firstLane.getChildLaneSet().getLanes().get(0); assertEquals("Second lane", secondLane.getName()); assertEquals(g, secondLane.getFlowNodeRefs().get(0)); definitions.eResource().save(System.out, Collections.emptyMap()); }*/ /* Disabling test that doesn't pass. @Test public void testUserTaskDataPassing() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = unmarshaller.unmarshall(getTestJsonFile("userTaskDataPassing.json")); Process process = getRootProcess(definitions); Task g = (Task) process.getFlowElements().get(0); assertEquals("task", g.getName()); assertTrue(process.getLaneSets().size() == 1); assertTrue(process.getLaneSets().get(0).getLanes().size() == 1); Lane firstLane = process.getLaneSets().get(0).getLanes().get(0); assertEquals("First lane", firstLane.getName()); Lane secondLane = firstLane.getChildLaneSet().getLanes().get(0); assertEquals("Second lane", secondLane.getName()); assertEquals(g, secondLane.getFlowNodeRefs().get(0)); definitions.eResource().save(System.out, Collections.emptyMap()); }*/ private Process getRootProcess(Definitions def) { for(RootElement nextRootElement : def.getRootElements()) { if(nextRootElement instanceof Process) { return (Process) nextRootElement; } } return null; } @Test public void testDocumentationPropertyForBoundaryEvents() throws Exception { final String DOCUMENTATION_VALUE = "<![CDATA[Cancel task on timeout.]]>"; final String BOUNDARY_EVENT_NAME = "CancelOnTimer"; Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); JsonParser parser = new JsonFactory().createJsonParser(getTestJsonFile("boundaryEventsDocumentation.json")); parser.nextToken(); Definitions definitions = ((Definitions) unmarshaller.unmarshallItem(parser, "")); unmarshaller.revisitCatchEvents(definitions); unmarshaller.revisitCatchEventsConvertToBoundary(definitions); unmarshaller.revisitBoundaryEventsPositions(definitions); boolean documentationChecked = false; for (RootElement root : definitions.getRootElements()) { if (!(root instanceof Process)) { continue; } for (FlowElement flow : ((Process) root).getFlowElements()) { if (BOUNDARY_EVENT_NAME.equals(flow.getName())) { assertTrue(BOUNDARY_EVENT_NAME + " have no documentation.", flow.getDocumentation().size() > 0); assertEquals(DOCUMENTATION_VALUE, flow.getDocumentation().get(0).getText()); documentationChecked = true; break; } } } assertTrue("Boundary Event '" + BOUNDARY_EVENT_NAME + "' is not found in the process.", documentationChecked); } @Test public void testDocumentationForSwimlane() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("swimlane.json"), "").getContents().get(0)); Process process = getRootProcess(definitions); Lane lane = process.getLaneSets().get(0).getLanes().get(0); assertEquals("Swimlane name is wrong.", lane.getName(), "Documented Swimlane"); assertEquals("<![CDATA[Some documentation for swimlane.]]>", lane.getDocumentation().get(0).getText()); } }
jbpm-designer-backend/src/test/java/org/jbpm/designer/bpmn2/impl/Bpmn2UnmarshallingTest.java
/* * Copyright 2010 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.designer.bpmn2.impl; import static junit.framework.Assert.*; import static org.junit.Assert.assertFalse; import java.io.File; import java.net.URL; import java.util.Collections; import java.util.List; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonParser; import org.eclipse.bpmn2.*; import org.eclipse.bpmn2.Process; import org.eclipse.emf.ecore.util.FeatureMap; import org.jboss.drools.DroolsPackage; import org.jboss.drools.MetaDataType; import org.junit.Test; /** * @author Antoine Toulme * * A series of tests to check the unmarshalling of json to bpmn2. */ public class Bpmn2UnmarshallingTest { private static File getTestJsonFile(String filename) { URL fileURL = Bpmn2UnmarshallingTest.class.getResource(filename); return new File(fileURL.getFile()); } @Test public void testSimpleDefinitionsUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("empty.json"), "").getContents().get(0)); assertEquals("<![CDATA[my doc]]>", definitions.getRootElements().get(0).getDocumentation().iterator().next().getText()); assertEquals("http://www.w3.org/1999/XPath", definitions.getExpressionLanguage()); assertEquals("http://www.omg.org/bpmn20", definitions.getTargetNamespace()); assertEquals("http://www.w3.org/2001/XMLSchema", definitions.getTypeLanguage()); assertTrue(definitions.getRootElements().size() == 1); definitions.eResource().save(System.out, Collections.emptyMap()); } //@Test // removing until we start supporting global tasks public void testSimpleGlobalTaskUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("oneTask.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); assertTrue(definitions.getRootElements().iterator().next() instanceof GlobalTask); GlobalTask task = (GlobalTask) definitions.getRootElements().iterator().next(); assertEquals("oneTask", task.getName()); assertEquals("my task doc", task.getDocumentation().iterator().next().getText()); definitions.eResource().save(System.out, Collections.emptyMap()); } //@Test // removing until we start supporting global tasks public void testTwoGlobalTasksUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("twoTask.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); assertTrue(definitions.getRootElements().get(0) instanceof GlobalTask); GlobalTask task = (GlobalTask) definitions.getRootElements().get(0); assertEquals("firstTask", task.getName()); assertEquals("my task doc", task.getDocumentation().iterator().next().getText()); GlobalTask task2 = (GlobalTask) definitions.getRootElements().get(1); assertEquals("secondTask", task2.getName()); assertEquals("my task doc too", task2.getDocumentation().iterator().next().getText()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testPoolUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("pool.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); assertTrue(definitions.getRootElements().get(0) instanceof Process); Process process = getRootProcess(definitions); assertEquals("pool", process.getName()); assertEquals(ProcessType.PRIVATE, process.getProcessType()); assertTrue(process.isIsClosed()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testLaneUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("pool.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); assertTrue(definitions.getRootElements().get(0) instanceof Process); Process process = getRootProcess(definitions); assertTrue(process.getLaneSets().size() == 1); assertTrue(process.getLaneSets().get(0).getLanes().size() == 1); Lane l = process.getLaneSets().get(0).getLanes().get(0); assertEquals("my first lane", l.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testSequenceFlowUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("sequenceFlow.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); assertTrue(process.getFlowElements().get(0) instanceof Task); Task task = (Task) process.getFlowElements().get(0); assertEquals("task1", task.getName()); Task task2 = (Task) process.getFlowElements().get(1); assertEquals("task2", task2.getName()); SequenceFlow flow = (SequenceFlow) process.getFlowElements().get(2); assertEquals("seqFlow", flow.getName()); assertEquals(task, flow.getSourceRef()); assertEquals(task2, flow.getTargetRef()); definitions.eResource().save(System.out, Collections.emptyMap()); } //@Test // removing until we start supporting global tasks public void testScriptTaskUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("scriptTask.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); GlobalScriptTask task = (GlobalScriptTask) definitions.getRootElements().get(0); assertEquals("my script", task.getName()); assertEquals("git status | grep modified | awk '{print $3}' | xargs echo | xargs git add", task.getScript()); assertEquals("bash", task.getScriptLanguage()); definitions.eResource().save(System.out, Collections.emptyMap()); } //@Test // removing until we start supporting global tasks public void testUserTaskUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("userTask.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); GlobalUserTask task = (GlobalUserTask) definitions.getRootElements().get(0); assertEquals("ask user", task.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } //@Test // removing until we start supporting global tasks public void testBusinessRuleTaskUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("businessRuleTask.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); GlobalBusinessRuleTask task = (GlobalBusinessRuleTask) definitions.getRootElements().get(0); assertEquals("call business rule", task.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } //@Test // removing until we start supporting global tasks public void testManualTaskUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("manualTask.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); GlobalManualTask task = (GlobalManualTask) definitions.getRootElements().get(0); assertEquals("pull a lever", task.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testGatewayUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("gateway.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ExclusiveGateway g = (ExclusiveGateway) process.getFlowElements().get(0); assertEquals("xor gateway", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testParallelGatewayUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("parallelGateway.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ParallelGateway g = (ParallelGateway) process.getFlowElements().get(0); assertEquals("parallel gateway", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEventBasedGatewayUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("eventBasedGateway.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); EventBasedGateway g = (EventBasedGateway) process.getFlowElements().get(0); assertEquals("event-based gateway", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testInclusiveGatewayUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("inclusiveGateway.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); InclusiveGateway g = (InclusiveGateway) process.getFlowElements().get(0); assertEquals("inclusive gateway", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartMessageEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startMessageEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 3); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start message event", g.getName()); assertTrue(g.getEventDefinitions().size() == 1); assertTrue(g.getEventDefinitions().iterator().next() instanceof MessageEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartEscalationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startEscalationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start escalation event", g.getName()); assertTrue(g.getEventDefinitions().size() == 1); assertTrue(g.getEventDefinitions().iterator().next() instanceof EscalationEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartCompensationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startCompensationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start compensation event", g.getName()); assertTrue(g.getEventDefinitions().size() == 1); assertTrue(g.getEventDefinitions().iterator().next() instanceof CompensateEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartMultipleEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startMultipleEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start multiple event", g.getName()); //TODO multiple event definitions ??? definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartParallelMultipleEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startParallelMultipleEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start parallel multiple event", g.getName()); //TODO multiple event definitions ??? definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartSignalEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startSignalEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start signal event", g.getName()); assertTrue(g.getEventDefinitions().size() == 1); assertTrue(g.getEventDefinitions().iterator().next() instanceof SignalEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testStartTimerEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startTimerEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); StartEvent g = (StartEvent) process.getFlowElements().get(0); assertEquals("start timer event", g.getName()); assertTrue(g.getEventDefinitions().size() == 1); assertTrue(g.getEventDefinitions().iterator().next() instanceof TimerEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testGroupUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("group.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); Group group = (Group) process.getArtifacts().iterator().next(); assertEquals("Group name is wrong.", group.getCategoryValueRef().getValue(), "group"); assertEquals(group.getDocumentation().get(0).getText(), "<![CDATA[group documentation]]>"); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testTextAnnotationUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("textAnnotation.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); assertTrue(process.getFlowElements().iterator().next() instanceof TextAnnotation); TextAnnotation ta = (TextAnnotation) process.getFlowElements().iterator().next(); assertEquals("text annotation", ta.getText()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testDataObjectUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("dataObject.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); assertTrue(process.getFlowElements().iterator().next() instanceof DataObject); DataObject da = (DataObject) process.getFlowElements().iterator().next(); assertEquals("data object", da.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndMessageEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endMessageEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 3); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end message event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof MessageEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndEscalationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endEscalationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end escalation event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof EscalationEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndErrorEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endErrorEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end error event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof ErrorEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndSignalEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endSignalEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end signal event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof SignalEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndTerminateEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endTerminateEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("terminate end event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof TerminateEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndMultipleEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endMultipleEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end multiple event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testEndCompensationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("endCompensationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); EndEvent g = (EndEvent) process.getFlowElements().get(0); assertEquals("end compensation event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof CompensateEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testSimpleChainUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("startEvent-task-endEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); assertTrue(process.getFlowElements().size() == 5); assertTrue(process.getLaneSets().size() == 1); assertTrue(process.getLaneSets().get(0).getLanes().size() == 1); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchMessageEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchMessageEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 3); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch message event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof MessageEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchTimerEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchTimerEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch timer event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof TimerEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchEscalationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchEscalationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch escalation event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof EscalationEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchConditionalEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchConditionalEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch conditional event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof ConditionalEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchLinkEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchLinkEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch link event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof LinkEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchErrorEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchErrorEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch error event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof ErrorEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchCancelEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchCancelEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch cancel event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof CancelEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchCompensationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchCompensationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch compensation event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof CompensateEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchMultipleEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchMultipleEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch multiple event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateCatchParallelMultipleEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCatchParallelMultipleEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); CatchEvent g = (CatchEvent) process.getFlowElements().get(0); assertEquals("catch parallel multiple event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowMessageEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowMessageEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 3); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw message event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof MessageEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowEscalationEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowEscalationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 2); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw escalation event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof EscalationEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowLinkEventUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowLinkEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw link event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof LinkEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowCompensationUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowCompensationEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw compensation event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof CompensateEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowSignalUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowSignalEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw signal event", g.getName()); assertTrue(g.getEventDefinitions().iterator().next() instanceof SignalEventDefinition); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testIntermediateThrowMultipleUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateThrowMultipleEvent.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); ThrowEvent g = (ThrowEvent) process.getFlowElements().get(0); assertEquals("throw multiple event", g.getName()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testAssociationUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("association.json"), "").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); Task g = (Task) process.getFlowElements().get(0); assertEquals("task", g.getName()); TextAnnotation textA = (TextAnnotation) process.getFlowElements().get(1); Association association = (Association) process.getArtifacts().get(0); assertEquals(g, association.getSourceRef()); assertEquals(textA, association.getTargetRef()); assertEquals(AssociationDirection.NONE, association.getAssociationDirection()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testAssociationUnidirectionalUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("associationOne.json"), "").getContents().get(0)); Process process = getRootProcess(definitions); Task g = (Task) process.getFlowElements().get(0); assertEquals("task", g.getName()); TextAnnotation textA = (TextAnnotation) process.getFlowElements().get(1); Association association = (Association) process.getArtifacts().get(0); assertEquals(g, association.getSourceRef()); assertEquals(textA, association.getTargetRef()); assertEquals(AssociationDirection.ONE, association.getAssociationDirection()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testAssociationBidirectionalUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("associationBoth.json"), "").getContents().get(0)); Process process = getRootProcess(definitions); Task g = (Task) process.getFlowElements().get(0); assertEquals("task", g.getName()); TextAnnotation textA = (TextAnnotation) process.getFlowElements().get(1); Association association = (Association) process.getArtifacts().get(0); assertEquals(g, association.getSourceRef()); assertEquals(textA, association.getTargetRef()); assertEquals(AssociationDirection.BOTH, association.getAssociationDirection()); definitions.eResource().save(System.out, Collections.emptyMap()); } @Test public void testBoundaryEventMultiLineName() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("boundaryEventMultiLineName.json"), "").getContents().get(0)); Process process = getRootProcess(definitions); Boolean foundElementNameExtensionValue = false; BoundaryEvent event = (BoundaryEvent) process.getFlowElements().get(1); if(event.getExtensionValues() != null && event.getExtensionValues().size() > 0) { for(ExtensionAttributeValue extattrval : event.getExtensionValues()) { FeatureMap extensionElements = extattrval.getValue(); List<MetaDataType> metadataExtensions = (List<MetaDataType>) extensionElements .get(DroolsPackage.Literals.DOCUMENT_ROOT__META_DATA, true); assertNotNull(metadataExtensions); assertTrue(metadataExtensions.size() == 1); for(MetaDataType metaType : metadataExtensions) { if(metaType.getName()!= null && metaType.getName().equals("elementname") && metaType.getMetaValue() != null && metaType.getMetaValue().length() > 0) { assertNotNull(metaType.getMetaValue()); foundElementNameExtensionValue = true; } } } assertTrue(foundElementNameExtensionValue); } else { fail("Boundary event has no extension element"); } } @Test public void testFindContainerForBoundaryEvent() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); JsonParser parser = new JsonFactory().createJsonParser(getTestJsonFile("boundaryEventsContainers.json")); parser.nextToken(); Definitions definitions = ((Definitions) unmarshaller.unmarshallItem(parser, "")); unmarshaller.revisitCatchEvents(definitions); unmarshaller.revisitCatchEventsConvertToBoundary(definitions); Process process = getRootProcess(definitions); for(FlowElement element : process.getFlowElements()) { if (element instanceof BoundaryEvent) { BoundaryEvent be = (BoundaryEvent) element; if ("Timer1".equals(element.getName())) { SubProcess sp = (SubProcess) unmarshaller.findContainerForBoundaryEvent(process, be); assertEquals("Subprocess1", sp.getName()); } if ("Timer2".equals(element.getName())) { SubProcess sp = (SubProcess) unmarshaller.findContainerForBoundaryEvent(process, be); assertEquals("Subprocess2", sp.getName()); } if ("Timer3".equals(element.getName())) { Process sp = (Process) unmarshaller.findContainerForBoundaryEvent(process, be); assertEquals("DemoProcess", sp.getName()); } } } } @Test public void testCompensationThrowingEvent() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("intermediateCompensationEventThrowing.json"), "").getContents().get(0)); Process process = getRootProcess(definitions); ThrowEvent compensationThrowEvent = (ThrowEvent) process.getFlowElements().get(2); assertEquals("Compensate", compensationThrowEvent.getName()); assertNotNull(compensationThrowEvent.getEventDefinitions()); assertEquals(1, compensationThrowEvent.getEventDefinitions().size()); EventDefinition ed = compensationThrowEvent.getEventDefinitions().get(0); assertTrue(ed instanceof CompensateEventDefinition); CompensateEventDefinition ced = (CompensateEventDefinition) ed; assertNotNull(ced.getActivityRef()); assertEquals("User Task", ced.getActivityRef().getName()); } @Test public void testRevisitBoundaryEventsPositions() throws Exception { final String SUBTIMER_NAME = "SubTimer"; final String SUBPROCESSMESSAGE_NAME = "SubProcessMessage"; final String OUTTIMER_NAME = "OutTimer"; final String DURING_INITIALIZATION = "during initialization"; final String AFTER_REVISION = "after revision"; List<String> initialBoundaryEventOutgointIds = null; List<String> finalBoundaryEventOutgointIds = null; Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); JsonParser parser = new JsonFactory().createJsonParser(getTestJsonFile("boundaryEvents.json")); parser.nextToken(); Definitions definitions = ((Definitions) unmarshaller.unmarshallItem(parser, "")); unmarshaller.revisitCatchEvents(definitions); unmarshaller.revisitCatchEventsConvertToBoundary(definitions); // Validate initial state for (RootElement root : definitions.getRootElements()) { if(!(root instanceof Process)) { continue; } Process process = (Process) root; assertThatElementPresent(true, DURING_INITIALIZATION, process, SUBTIMER_NAME); assertThatElementPresent(true, DURING_INITIALIZATION, process, SUBPROCESSMESSAGE_NAME); assertThatElementPresent(true, DURING_INITIALIZATION, process, OUTTIMER_NAME); for(FlowElement flow : ((Process) root).getFlowElements()) { if (SUBTIMER_NAME.equals(flow.getName())) { initialBoundaryEventOutgointIds = unmarshaller.getOutgoingFlowsMap().get(flow); } if ("Subprocess".equals(flow.getName())) { SubProcess subProcess = (SubProcess) flow; assertThatElementPresent(false, DURING_INITIALIZATION, subProcess, SUBTIMER_NAME); assertThatElementPresent(false, DURING_INITIALIZATION, subProcess, SUBPROCESSMESSAGE_NAME); assertThatElementPresent(false, DURING_INITIALIZATION, subProcess, OUTTIMER_NAME); } } } unmarshaller.revisitBoundaryEventsPositions(definitions); // Validate final state for (RootElement root : definitions.getRootElements()) { if(!(root instanceof Process)) { continue; } Process process = (Process) root; assertThatElementPresent(false, AFTER_REVISION, process, SUBTIMER_NAME); assertThatElementPresent(true, AFTER_REVISION, process, SUBPROCESSMESSAGE_NAME); assertThatElementPresent(true, AFTER_REVISION, process, OUTTIMER_NAME); for(FlowElement flow : ((Process) root).getFlowElements()) { if (!"Subprocess".equals(flow.getName())) { continue; } SubProcess subProcess = (SubProcess) flow; assertThatElementPresent(true, AFTER_REVISION, subProcess, SUBTIMER_NAME); assertThatElementPresent(false, AFTER_REVISION, subProcess, SUBPROCESSMESSAGE_NAME); assertThatElementPresent(false, AFTER_REVISION, subProcess, OUTTIMER_NAME); for (FlowElement subFlow : subProcess.getFlowElements()) { if (SUBTIMER_NAME.equals(subFlow.getName())) { finalBoundaryEventOutgointIds = unmarshaller.getOutgoingFlowsMap().get(subFlow); } } } } initialBoundaryEventOutgointIds.equals(finalBoundaryEventOutgointIds); // Test2 unmarshaller = new Bpmn2JsonUnmarshaller(); parser = new JsonFactory().createJsonParser(getTestJsonFile("boundaryEventsContainers.json")); parser.nextToken(); definitions = ((Definitions) unmarshaller.unmarshallItem(parser, "")); unmarshaller.revisitCatchEvents(definitions); unmarshaller.revisitCatchEventsConvertToBoundary(definitions); Process process = getRootProcess(definitions); assertThatElementPresent(true, "", process, "Timer3"); assertThatElementPresent(true, "", process, "Timer1"); assertThatElementPresent(true, "", process, "Timer2"); unmarshaller.revisitBoundaryEventsPositions(definitions); assertThatElementPresent(true, "", process, "Timer3"); assertThatElementPresent(false, "", process, "Timer1"); assertThatElementPresent(false, "", process, "Timer2"); for(FlowElement flow : process.getFlowElements()) { if ("Subprocess1".equals(flow.getName())) { assertThatElementPresent(true, "", (SubProcess) flow, "Timer1"); } if ("Subprocess2".equals(flow.getName())) { assertThatElementPresent(true, "", (SubProcess) flow, "Timer2"); } } } private void assertThatElementPresent(boolean expected, String when, FlowElementsContainer where, String which) { if (expected) { assertTrue(which + " NOT found in " + where.toString() + " " + when + " but EXPECTED", isContainerContainFlowElementByName(where, which) ); } else { assertFalse(which + " FOUND in " + where.toString() + " " + when + " but NOT expected", isContainerContainFlowElementByName(where, which) ); } } private boolean isContainerContainFlowElementByName(FlowElementsContainer container, String elementName) { for (FlowElement findingSubTimer : container.getFlowElements()) { if (elementName.equals(findingSubTimer.getName())) { return true; } } return false; } @Test public void testWorkItemHandlerNoParams() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("workItemHandlerNoParams.json"), "Email,HelloWorkItemHandler,Log,Rest,WebService").getContents().get(0)); assertTrue(definitions.getRootElements().size() == 1); Process process = getRootProcess(definitions); assertTrue(process.getFlowElements().get(0) instanceof StartEvent); StartEvent startEvent = (StartEvent) process.getFlowElements().get(0); assertEquals("TheStart", startEvent.getName()); Task task = (Task) process.getFlowElements().get(1); assertEquals("HelloWorldService", task.getName()); SequenceFlow flow = (SequenceFlow) process.getFlowElements().get(2); assertEquals("flow1", flow.getName()); assertEquals(startEvent, flow.getSourceRef()); assertEquals(task, flow.getTargetRef()); } /* Disabling test as no support for child lanes yet @Test public void testDoubleLaneUnmarshalling() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("doubleLane.json"), "").getContents().get(0)); Process process = getRootProcess(definitions); Task g = (Task) process.getFlowElements().get(0); assertEquals("task", g.getName()); assertTrue(process.getLaneSets().size() == 1); assertTrue(process.getLaneSets().get(0).getLanes().size() == 1); Lane firstLane = process.getLaneSets().get(0).getLanes().get(0); assertEquals("First lane", firstLane.getName()); Lane secondLane = firstLane.getChildLaneSet().getLanes().get(0); assertEquals("Second lane", secondLane.getName()); assertEquals(g, secondLane.getFlowNodeRefs().get(0)); definitions.eResource().save(System.out, Collections.emptyMap()); }*/ /* Disabling test that doesn't pass. @Test public void testUserTaskDataPassing() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = unmarshaller.unmarshall(getTestJsonFile("userTaskDataPassing.json")); Process process = getRootProcess(definitions); Task g = (Task) process.getFlowElements().get(0); assertEquals("task", g.getName()); assertTrue(process.getLaneSets().size() == 1); assertTrue(process.getLaneSets().get(0).getLanes().size() == 1); Lane firstLane = process.getLaneSets().get(0).getLanes().get(0); assertEquals("First lane", firstLane.getName()); Lane secondLane = firstLane.getChildLaneSet().getLanes().get(0); assertEquals("Second lane", secondLane.getName()); assertEquals(g, secondLane.getFlowNodeRefs().get(0)); definitions.eResource().save(System.out, Collections.emptyMap()); }*/ private Process getRootProcess(Definitions def) { for(RootElement nextRootElement : def.getRootElements()) { if(nextRootElement instanceof Process) { return (Process) nextRootElement; } } return null; } @Test public void testDocumentationPropertyForBoundaryEvents() throws Exception { final String DOCUMENTATION_VALUE = "<![CDATA[Cancel task on timeout.]]>"; final String BOUNDARY_EVENT_NAME = "CancelOnTimer"; Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); JsonParser parser = new JsonFactory().createJsonParser(getTestJsonFile("boundaryEventsDocumentation.json")); parser.nextToken(); Definitions definitions = ((Definitions) unmarshaller.unmarshallItem(parser, "")); unmarshaller.revisitCatchEvents(definitions); unmarshaller.revisitCatchEventsConvertToBoundary(definitions); unmarshaller.revisitBoundaryEventsPositions(definitions); boolean documentationChecked = false; for (RootElement root : definitions.getRootElements()) { if (!(root instanceof Process)) { continue; } for (FlowElement flow : ((Process) root).getFlowElements()) { if (BOUNDARY_EVENT_NAME.equals(flow.getName())) { assertTrue(BOUNDARY_EVENT_NAME + " have no documentation.", flow.getDocumentation().size() > 0); assertEquals(DOCUMENTATION_VALUE, flow.getDocumentation().get(0).getText()); documentationChecked = true; break; } } } assertTrue("Boundary Event '" + BOUNDARY_EVENT_NAME + "' is not found in the process.", documentationChecked); } @Test public void testDocumentationForSwimlane() throws Exception { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); Definitions definitions = ((Definitions) unmarshaller.unmarshall(getTestJsonFile("swimlane.json"), "").getContents().get(0)); Process process = getRootProcess(definitions); Lane lane = process.getLaneSets().get(0).getLanes().get(0); assertEquals("Swimlane name is wrong.", lane.getName(), "Documented Swimlane"); assertEquals("<![CDATA[Some documentation for swimlane.]]>", lane.getDocumentation().get(0).getText()); } }
Fix failing unit test Bpmn2UnmarshallingTest.testWorkItemHandlerNoParams
jbpm-designer-backend/src/test/java/org/jbpm/designer/bpmn2/impl/Bpmn2UnmarshallingTest.java
Fix failing unit test Bpmn2UnmarshallingTest.testWorkItemHandlerNoParams
Java
apache-2.0
1181e6329e3326a8dc4bb08d1527fd6e3ebcd4cd
0
gentics/mesh,gentics/mesh,gentics/mesh,gentics/mesh
package com.gentics.mesh.core.data.node.impl; import static com.gentics.mesh.core.data.GraphFieldContainerEdge.WEBROOT_INDEX_NAME; import static com.gentics.mesh.core.data.relationship.GraphPermission.CREATE_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.READ_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.READ_PUBLISHED_PERM; import static com.gentics.mesh.core.data.relationship.GraphRelationships.ASSIGNED_TO_PROJECT; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_CREATOR; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_FIELD; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_FIELD_CONTAINER; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_ITEM; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_PARENT_NODE; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_ROLE; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_ROOT_NODE; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_SCHEMA_CONTAINER; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_TAG; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_USER; import static com.gentics.mesh.core.rest.MeshEvent.NODE_MOVED; import static com.gentics.mesh.core.rest.MeshEvent.NODE_REFERENCE_UPDATED; import static com.gentics.mesh.core.rest.MeshEvent.NODE_TAGGED; import static com.gentics.mesh.core.rest.MeshEvent.NODE_UNTAGGED; import static com.gentics.mesh.core.rest.common.ContainerType.DRAFT; import static com.gentics.mesh.core.rest.common.ContainerType.INITIAL; import static com.gentics.mesh.core.rest.common.ContainerType.PUBLISHED; import static com.gentics.mesh.core.rest.common.ContainerType.forVersion; import static com.gentics.mesh.core.rest.error.Errors.error; import static com.gentics.mesh.event.Assignment.ASSIGNED; import static com.gentics.mesh.event.Assignment.UNASSIGNED; import static com.gentics.mesh.madl.field.FieldType.LINK; import static com.gentics.mesh.madl.field.FieldType.STRING; import static com.gentics.mesh.madl.index.EdgeIndexDefinition.edgeIndex; import static com.gentics.mesh.madl.type.VertexTypeDefinition.vertexType; import static com.gentics.mesh.util.StreamUtil.toStream; import static com.gentics.mesh.util.URIUtils.encodeSegment; import static com.tinkerpop.blueprints.Direction.IN; import static com.tinkerpop.blueprints.Direction.OUT; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.METHOD_NOT_ALLOWED; import static io.netty.handler.codec.http.HttpResponseStatus.NOT_FOUND; import static org.apache.commons.lang3.StringUtils.isEmpty; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Deque; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Stack; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; import org.apache.commons.lang3.NotImplementedException; import com.gentics.madl.index.IndexHandler; import com.gentics.madl.tx.Tx; import com.gentics.madl.type.TypeHandler; import com.gentics.mesh.context.BulkActionContext; import com.gentics.mesh.context.InternalActionContext; import com.gentics.mesh.core.data.Branch; import com.gentics.mesh.core.data.GraphFieldContainer; import com.gentics.mesh.core.data.GraphFieldContainerEdge; import com.gentics.mesh.core.data.Language; import com.gentics.mesh.core.data.MeshAuthUser; import com.gentics.mesh.core.data.NodeGraphFieldContainer; import com.gentics.mesh.core.data.Project; import com.gentics.mesh.core.data.Role; import com.gentics.mesh.core.data.Tag; import com.gentics.mesh.core.data.TagEdge; import com.gentics.mesh.core.data.User; import com.gentics.mesh.core.data.container.impl.NodeGraphFieldContainerImpl; import com.gentics.mesh.core.data.diff.FieldContainerChange; import com.gentics.mesh.core.data.generic.AbstractGenericFieldContainerVertex; import com.gentics.mesh.core.data.generic.MeshVertexImpl; import com.gentics.mesh.core.data.impl.GraphFieldContainerEdgeImpl; import com.gentics.mesh.core.data.impl.ProjectImpl; import com.gentics.mesh.core.data.impl.TagEdgeImpl; import com.gentics.mesh.core.data.impl.TagImpl; import com.gentics.mesh.core.data.impl.UserImpl; import com.gentics.mesh.core.data.node.Node; import com.gentics.mesh.core.data.node.field.BinaryGraphField; import com.gentics.mesh.core.data.node.field.StringGraphField; import com.gentics.mesh.core.data.node.field.impl.NodeGraphFieldImpl; import com.gentics.mesh.core.data.node.field.nesting.NodeGraphField; import com.gentics.mesh.core.data.page.TransformablePage; import com.gentics.mesh.core.data.page.impl.DynamicTransformablePageImpl; import com.gentics.mesh.core.data.relationship.GraphPermission; import com.gentics.mesh.core.data.schema.SchemaContainer; import com.gentics.mesh.core.data.schema.SchemaContainerVersion; import com.gentics.mesh.core.data.schema.impl.SchemaContainerImpl; import com.gentics.mesh.core.link.WebRootLinkReplacer; import com.gentics.mesh.core.rest.MeshEvent; import com.gentics.mesh.core.rest.common.ContainerType; import com.gentics.mesh.core.rest.error.NodeVersionConflictException; import com.gentics.mesh.core.rest.error.NotModifiedException; import com.gentics.mesh.core.rest.event.MeshElementEventModel; import com.gentics.mesh.core.rest.event.MeshProjectElementEventModel; import com.gentics.mesh.core.rest.event.node.NodeMeshEventModel; import com.gentics.mesh.core.rest.event.node.NodeMovedEventModel; import com.gentics.mesh.core.rest.event.node.NodeTaggedEventModel; import com.gentics.mesh.core.rest.event.role.PermissionChangedProjectElementEventModel; import com.gentics.mesh.core.rest.navigation.NavigationElement; import com.gentics.mesh.core.rest.navigation.NavigationResponse; import com.gentics.mesh.core.rest.node.FieldMapImpl; import com.gentics.mesh.core.rest.node.NodeChildrenInfo; import com.gentics.mesh.core.rest.node.NodeCreateRequest; import com.gentics.mesh.core.rest.node.NodeResponse; import com.gentics.mesh.core.rest.node.NodeUpdateRequest; import com.gentics.mesh.core.rest.node.PublishStatusModel; import com.gentics.mesh.core.rest.node.PublishStatusResponse; import com.gentics.mesh.core.rest.node.field.Field; import com.gentics.mesh.core.rest.node.field.NodeFieldListItem; import com.gentics.mesh.core.rest.node.field.list.impl.NodeFieldListItemImpl; import com.gentics.mesh.core.rest.node.version.NodeVersionsResponse; import com.gentics.mesh.core.rest.node.version.VersionInfo; import com.gentics.mesh.core.rest.schema.FieldSchema; import com.gentics.mesh.core.rest.schema.Schema; import com.gentics.mesh.core.rest.tag.TagReference; import com.gentics.mesh.core.rest.user.NodeReference; import com.gentics.mesh.core.webroot.PathPrefixUtil; import com.gentics.mesh.dagger.DB; import com.gentics.mesh.dagger.MeshInternal; import com.gentics.mesh.event.Assignment; import com.gentics.mesh.event.EventQueueBatch; import com.gentics.mesh.graphdb.spi.Database; import com.gentics.mesh.handler.ActionContext; import com.gentics.mesh.handler.VersionHandler; import com.gentics.mesh.json.JsonUtil; import com.gentics.mesh.madl.traversal.TraversalResult; import com.gentics.mesh.parameter.DeleteParameters; import com.gentics.mesh.parameter.GenericParameters; import com.gentics.mesh.parameter.LinkType; import com.gentics.mesh.parameter.NavigationParameters; import com.gentics.mesh.parameter.NodeParameters; import com.gentics.mesh.parameter.PagingParameters; import com.gentics.mesh.parameter.PublishParameters; import com.gentics.mesh.parameter.VersioningParameters; import com.gentics.mesh.parameter.impl.NavigationParametersImpl; import com.gentics.mesh.parameter.impl.VersioningParametersImpl; import com.gentics.mesh.parameter.value.FieldsSet; import com.gentics.mesh.path.Path; import com.gentics.mesh.path.PathSegment; import com.gentics.mesh.util.DateUtils; import com.gentics.mesh.util.ETag; import com.gentics.mesh.util.StreamUtil; import com.gentics.mesh.util.URIUtils; import com.gentics.mesh.util.VersionNumber; import com.syncleus.ferma.EdgeFrame; import com.syncleus.ferma.FramedGraph; import com.syncleus.ferma.traversals.EdgeTraversal; import com.syncleus.ferma.traversals.VertexTraversal; import com.tinkerpop.blueprints.Direction; import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Vertex; import io.reactivex.Observable; import io.reactivex.Single; import io.vertx.core.logging.Logger; import io.vertx.core.logging.LoggerFactory; /** * @see Node */ public class NodeImpl extends AbstractGenericFieldContainerVertex<NodeResponse, Node> implements Node { private static final Logger log = LoggerFactory.getLogger(NodeImpl.class); public static void init(TypeHandler type, IndexHandler index) { type.createType(vertexType(NodeImpl.class, MeshVertexImpl.class)); index.createIndex(edgeIndex(HAS_PARENT_NODE)); index.createIndex(edgeIndex(HAS_PARENT_NODE) .withPostfix("branch_out") .withField("out", LINK) .withField(BRANCH_UUID_KEY, STRING)); index.createIndex(edgeIndex(HAS_PARENT_NODE) .withPostfix("branch") .withField("in", LINK) .withField(BRANCH_UUID_KEY, STRING)); index.createIndex(edgeIndex(HAS_FIELD_CONTAINER) .withPostfix("field") .withField("out", LINK) .withField(GraphFieldContainerEdge.BRANCH_UUID_KEY, STRING) .withField(GraphFieldContainerEdge.EDGE_TYPE_KEY, STRING)); } @Override public String getPathSegment(String branchUuid, ContainerType type, String... languageTag) { // Check whether this node is the base node. if (getParentNode(branchUuid) == null) { return ""; } // Find the first matching container and fallback to other listed languages NodeGraphFieldContainer container = null; for (String tag : languageTag) { if ((container = getGraphFieldContainer(tag, branchUuid, type)) != null) { break; } } if (container != null) { return container.getSegmentFieldValue(); } return null; } @Override public void postfixPathSegment(String branchUuid, ContainerType type, String languageTag) { // Check whether this node is the base node. if (getParentNode(branchUuid) == null) { return; } // Find the first matching container and fallback to other listed languages NodeGraphFieldContainer container = getGraphFieldContainer(languageTag, branchUuid, type); if (container != null) { container.postfixSegmentFieldValue(); } } @Override public String getPath(ActionContext ac, String branchUuid, ContainerType type, String... languageTag) { // We want to avoid rending the path again for nodes which we have already handled. // Thus utilise the action context data map to retrieve already handled paths. String cacheKey = getUuid() + branchUuid + type.getCode() + Arrays.toString(languageTag); return (String) ac.data().computeIfAbsent(cacheKey, key -> { List<String> segments = new ArrayList<>(); String segment = getPathSegment(branchUuid, type, languageTag); if (segment == null) { return null; } segments.add(segment); // For the path segments of the container, we add all (additional) // project languages to the list of languages for the fallback. List<String> langList = new ArrayList<>(); langList.addAll(Arrays.asList(languageTag)); // TODO maybe we only want to get the project languages? langList.addAll(MeshInternal.get().boot().getAllLanguageTags()); String[] projectLanguages = langList.toArray(new String[langList.size()]); Node current = this; while (current != null) { current = current.getParentNode(branchUuid); if (current == null || current.getParentNode(branchUuid) == null) { break; } // For the path segments of the container, we allow ANY language (of the project) segment = current.getPathSegment(branchUuid, type, projectLanguages); // Abort early if one of the path segments could not be resolved. We // need to return a 404 in those cases. if (segment == null) { return null; } segments.add(segment); } Collections.reverse(segments); // Finally construct the path from all segments StringBuilder builder = new StringBuilder(); // Append the prefix first Branch branch = getProject().getBranchRoot().findByUuid(branchUuid); if (branch != null) { String prefix = PathPrefixUtil.sanitize(branch.getPathPrefix()); if (!prefix.isEmpty()) { String[] prefixSegments = prefix.split("/"); for (String prefixSegment : prefixSegments) { if (prefixSegment.isEmpty()) { continue; } builder.append("/").append(URIUtils.encodeSegment(prefixSegment)); } } } Iterator<String> it = segments.iterator(); while (it.hasNext()) { String currentSegment = it.next(); builder.append("/").append(URIUtils.encodeSegment(currentSegment)); } return builder.toString(); }); } @Override public void assertPublishConsistency(InternalActionContext ac, Branch branch) { String branchUuid = branch.getUuid(); // Check whether the node got a published version and thus is published boolean isPublished = hasPublishedContent(branchUuid); // A published node must have also a published parent node. if (isPublished) { Node parentNode = getParentNode(branchUuid); // Only assert consistency of parent nodes which are not project // base nodes. if (parentNode != null && (!parentNode.getUuid().equals(getProject().getBaseNode().getUuid()))) { // Check whether the parent node has a published field container // for the given branch and language if (!parentNode.hasPublishedContent(branchUuid)) { log.error("Could not find published field container for node {" + parentNode.getUuid() + "} in branch {" + branchUuid + "}"); throw error(BAD_REQUEST, "node_error_parent_containers_not_published", parentNode.getUuid()); } } } // A draft node can't have any published child nodes. if (!isPublished) { for (Node node : getChildren(branchUuid)) { if (node.hasPublishedContent(branchUuid)) { log.error("Found published field container for node {" + node.getUuid() + "} in branch {" + branchUuid + "}. Node is child of {" + getUuid() + "}"); throw error(BAD_REQUEST, "node_error_children_containers_still_published", node.getUuid()); } } } } @Override public TraversalResult<? extends Tag> getTags(Branch branch) { return new TraversalResult<>(TagEdgeImpl.getTagTraversal(this, branch).frameExplicit(TagImpl.class)); } @Override public boolean hasTag(Tag tag, Branch branch) { return TagEdgeImpl.hasTag(this, tag, branch); } @Override public TraversalResult<? extends NodeGraphFieldContainer> getGraphFieldContainers(String branchUuid, ContainerType type) { return new TraversalResult<>(outE(HAS_FIELD_CONTAINER).has(GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branchUuid) .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, type.getCode()).inV().frameExplicit(NodeGraphFieldContainerImpl.class)); } @Override public TraversalResult<? extends NodeGraphFieldContainer> getGraphFieldContainersIt(ContainerType type) { return new TraversalResult<>( outE(HAS_FIELD_CONTAINER).has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, type.getCode()).inV() .frameExplicit(NodeGraphFieldContainerImpl.class)); } @Override public TraversalResult<? extends NodeGraphFieldContainer> getGraphFieldContainersIt(String branchUuid, ContainerType type) { return new TraversalResult<>( outE(HAS_FIELD_CONTAINER).has(GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branchUuid) .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, type.getCode()).inV().frameExplicit(NodeGraphFieldContainerImpl.class)); } @SuppressWarnings("unchecked") @Override public long getGraphFieldContainerCount() { return outE(HAS_FIELD_CONTAINER).or(e -> e.traversal().has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, DRAFT.getCode()), e -> e.traversal() .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, PUBLISHED.getCode())).inV().count(); } @Override public NodeGraphFieldContainer getLatestDraftFieldContainer(String languageTag) { return getGraphFieldContainer(languageTag, getProject().getLatestBranch(), DRAFT, NodeGraphFieldContainerImpl.class); } @Override public NodeGraphFieldContainer getGraphFieldContainer(String languageTag, Branch branch, ContainerType type) { return getGraphFieldContainer(languageTag, branch, type, NodeGraphFieldContainerImpl.class); } @Override public NodeGraphFieldContainer getGraphFieldContainer(String languageTag) { return getGraphFieldContainer(languageTag, getProject().getLatestBranch().getUuid(), DRAFT, NodeGraphFieldContainerImpl.class); } @Override public NodeGraphFieldContainer getGraphFieldContainer(String languageTag, String branchUuid, ContainerType type) { return getGraphFieldContainer(languageTag, branchUuid, type, NodeGraphFieldContainerImpl.class); } @Override public NodeGraphFieldContainer createGraphFieldContainer(String languageTag, Branch branch, User editor) { return createGraphFieldContainer(languageTag, branch, editor, null, true); } @Override public NodeGraphFieldContainer createGraphFieldContainer(String languageTag, Branch branch, User editor, NodeGraphFieldContainer original, boolean handleDraftEdge) { NodeGraphFieldContainerImpl previous = null; EdgeFrame draftEdge = null; String branchUuid = branch.getUuid(); // check whether there is a current draft version if (handleDraftEdge) { draftEdge = getGraphFieldContainerEdgeFrame(languageTag, branchUuid, DRAFT); if (draftEdge != null) { previous = draftEdge.inV().nextOrDefault(NodeGraphFieldContainerImpl.class, null); } } // Create the new container NodeGraphFieldContainerImpl newContainer = getGraph().addFramedVertex(NodeGraphFieldContainerImpl.class); if (original != null) { newContainer.setEditor(editor); newContainer.setLastEditedTimestamp(); newContainer.setLanguageTag(languageTag); newContainer.setSchemaContainerVersion(original.getSchemaContainerVersion()); } else { newContainer.setEditor(editor); newContainer.setLastEditedTimestamp(); newContainer.setLanguageTag(languageTag); // We need create a new container with no reference. So use the latest version available to use. newContainer.setSchemaContainerVersion(branch.findLatestSchemaVersion(getSchemaContainer())); } if (previous != null) { // set the next version number newContainer.setVersion(previous.getVersion().nextDraft()); previous.setNextVersion(newContainer); } else { // set the initial version number newContainer.setVersion(new VersionNumber()); } // clone the original or the previous container if (original != null) { newContainer.clone(original); } else if (previous != null) { newContainer.clone(previous); } // remove existing draft edge if (draftEdge != null) { draftEdge.remove(); newContainer.updateWebrootPathInfo(branchUuid, "node_conflicting_segmentfield_update"); } // We need to update the display field property since we created a new // node graph field container. newContainer.updateDisplayFieldValue(); if (handleDraftEdge) { // create a new draft edge GraphFieldContainerEdge edge = addFramedEdge(HAS_FIELD_CONTAINER, newContainer, GraphFieldContainerEdgeImpl.class); edge.setLanguageTag(languageTag); edge.setBranchUuid(branchUuid); edge.setType(DRAFT); } // if there is no initial edge, create one if (getGraphFieldContainerEdge(languageTag, branchUuid, INITIAL) == null) { GraphFieldContainerEdge initialEdge = addFramedEdge(HAS_FIELD_CONTAINER, newContainer, GraphFieldContainerEdgeImpl.class); initialEdge.setLanguageTag(languageTag); initialEdge.setBranchUuid(branchUuid); initialEdge.setType(INITIAL); } return newContainer; } @Override public EdgeFrame getGraphFieldContainerEdgeFrame(String languageTag, String branchUuid, ContainerType type) { EdgeTraversal<?, ?, ?> edgeTraversal = outE(HAS_FIELD_CONTAINER).has(GraphFieldContainerEdgeImpl.LANGUAGE_TAG_KEY, languageTag).has( GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branchUuid).has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, type.getCode()); if (edgeTraversal.hasNext()) { return edgeTraversal.next(); } else { return null; } } /** * Get all graph field. * * @param branchUuid * @param type * @return */ protected Iterable<? extends GraphFieldContainerEdgeImpl> getGraphFieldContainerEdges(String branchUuid, ContainerType type) { EdgeTraversal<?, ?, ?> edgeTraversal = outE(HAS_FIELD_CONTAINER) .has(GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branchUuid) .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, type.getCode()); return edgeTraversal.frameExplicit(GraphFieldContainerEdgeImpl.class); } @Override public void addTag(Tag tag, Branch branch) { removeTag(tag, branch); TagEdge edge = addFramedEdge(HAS_TAG, tag, TagEdgeImpl.class); edge.setBranchUuid(branch.getUuid()); } @Override public void removeTag(Tag tag, Branch branch) { outE(HAS_TAG).has(TagEdgeImpl.BRANCH_UUID_KEY, branch.getUuid()).mark().inV().retain(tag).back().removeAll(); } @Override public void removeAllTags(Branch branch) { outE(HAS_TAG).has(TagEdgeImpl.BRANCH_UUID_KEY, branch.getUuid()).removeAll(); } @Override public void setSchemaContainer(SchemaContainer schema) { setLinkOut(schema, HAS_SCHEMA_CONTAINER); } @Override public SchemaContainer getSchemaContainer() { return out(HAS_SCHEMA_CONTAINER).nextOrDefaultExplicit(SchemaContainerImpl.class, null); } @Override public TraversalResult<? extends Node> getChildren() { return new TraversalResult<>(in(HAS_PARENT_NODE).frameExplicit(NodeImpl.class)); } @Override public TraversalResult<Node> getChildren(String branchUuid) { Database db = MeshInternal.get().database(); FramedGraph graph = Tx.getActive().getGraph(); Iterable<Edge> edges = graph.getEdges("e." + HAS_PARENT_NODE.toLowerCase() + "_branch", db.createComposedIndexKey(id(), branchUuid)); Iterator<Edge> it = edges.iterator(); Iterable<Edge> iterable = () -> it; Stream<Edge> stream = StreamSupport.stream(iterable.spliterator(), false); Stream<Node> nstream = stream.map(edge -> { Vertex vertex = edge.getVertex(OUT); return graph.frameElementExplicit(vertex, NodeImpl.class); }); return new TraversalResult<>(() -> nstream.iterator()); } @Override public Stream<Node> getChildrenStream(InternalActionContext ac) { Database db = MeshInternal.get().database(); FramedGraph graph = Tx.get().getGraph(); MeshAuthUser user = ac.getUser(); Iterable<Edge> edges = graph.getEdges("e." + HAS_PARENT_NODE.toLowerCase() + "_branch", db.createComposedIndexKey(id(), ac.getBranch().getUuid())); Iterator<Edge> it = edges.iterator(); Iterable<Edge> iterable = () -> it; Stream<Edge> stream = StreamSupport.stream(iterable.spliterator(), false); return stream .map(edge -> edge.getVertex(OUT)) .filter(vertex -> { Object id = vertex.getId(); return user.hasPermissionForId(id, READ_PERM) || user.hasPermissionForId(id, READ_PUBLISHED_PERM); }) .map(vertex -> graph.frameElementExplicit(vertex, NodeImpl.class)); } @Override public Node getParentNode(String branchUuid) { Database db = MeshInternal.get().database(); FramedGraph graph = Tx.getActive().getGraph(); Iterable<Edge> edges = graph.getEdges("e." + HAS_PARENT_NODE.toLowerCase() + "_branch_out", db.createComposedIndexKey(id(), branchUuid)); Iterator<Edge> it = edges.iterator(); if (it.hasNext()) { Vertex in = it.next().getVertex(IN); return graph.frameElementExplicit(in, NodeImpl.class); } else { return null; } } @Override public void setParentNode(String branchUuid, Node parent) { outE(HAS_PARENT_NODE).has(BRANCH_UUID_KEY, branchUuid).removeAll(); addFramedEdge(HAS_PARENT_NODE, parent).setProperty(BRANCH_UUID_KEY, branchUuid); } @Override public Project getProject() { return out(ASSIGNED_TO_PROJECT, ProjectImpl.class).nextOrNull(); } @Override public void setProject(Project project) { setLinkOut(project, ASSIGNED_TO_PROJECT); } @Override public Node create(User creator, SchemaContainerVersion schemaVersion, Project project) { return create(creator, schemaVersion, project, project.getLatestBranch()); } /** * Create a new node and make sure to delegate the creation request to the main node root aggregation node. */ @Override public Node create(User creator, SchemaContainerVersion schemaVersion, Project project, Branch branch, String uuid) { if (!isBaseNode() && !isVisibleInBranch(branch.getUuid())) { log.error(String.format("Error while creating node in branch {%s}: requested parent node {%s} exists, but is not visible in branch.", branch.getName(), getUuid())); throw error(NOT_FOUND, "object_not_found_for_uuid", getUuid()); } // We need to use the (meshRoot)--(nodeRoot) node instead of the // (project)--(nodeRoot) node. Node node = MeshInternal.get().boot().nodeRoot().create(creator, schemaVersion, project, uuid); node.setParentNode(branch.getUuid(), this); node.setSchemaContainer(schemaVersion.getSchemaContainer()); // setCreated(creator); return node; } private String getLanguageInfo(List<String> languageTags) { Iterator<String> it = languageTags.iterator(); String langInfo = "["; while (it.hasNext()) { langInfo += it.next(); if (it.hasNext()) { langInfo += ","; } } langInfo += "]"; return langInfo; } @Override public NodeResponse transformToRestSync(InternalActionContext ac, int level, String... languageTags) { GenericParameters generic = ac.getGenericParameters(); FieldsSet fields = generic.getFields(); // Increment level for each node transformation to avoid stackoverflow situations level = level + 1; NodeResponse restNode = new NodeResponse(); if (fields.has("uuid")) { restNode.setUuid(getUuid()); // Performance shortcut to return now and ignore the other checks if (fields.size() == 1) { return restNode; } } SchemaContainer container = getSchemaContainer(); if (container == null) { throw error(BAD_REQUEST, "The schema container for node {" + getUuid() + "} could not be found."); } Branch branch = ac.getBranch(getProject()); if (fields.has("languages")) { restNode.setAvailableLanguages(getLanguageInfo(ac)); } setFields(ac, branch, restNode, level, fields, languageTags); if (fields.has("parent")) { setParentNodeInfo(ac, branch, restNode); } if (fields.has("perms")) { setRolePermissions(ac, restNode); } if (fields.has("children")) { setChildrenInfo(ac, branch, restNode); } if (fields.has("tags")) { setTagsToRest(ac, restNode, branch); } fillCommonRestFields(ac, fields, restNode); if (fields.has("breadcrumb")) { setBreadcrumbToRest(ac, restNode); } if (fields.has("path")) { setPathsToRest(ac, restNode, branch); } if (fields.has("project")) { setProjectReference(ac, restNode); } return restNode; } /** * Set the project reference to the node response model. * * @param ac * @param restNode */ private void setProjectReference(InternalActionContext ac, NodeResponse restNode) { restNode.setProject(getProject().transformToReference()); } /** * Set the parent node reference to the rest model. * * @param ac * @param branch * Use the given branch to identify the branch specific parent node * @param restNode * Model to be updated * @return */ private void setParentNodeInfo(InternalActionContext ac, Branch branch, NodeResponse restNode) { Node parentNode = getParentNode(branch.getUuid()); if (parentNode != null) { restNode.setParentNode(parentNode.transformToReference(ac)); } else { // Only the base node of the project has no parent. Therefore this // node must be a container. restNode.setContainer(true); } } /** * Set the node fields to the given rest model. * * @param ac * @param branch * Branch which will be used to locate the correct field container * @param restNode * Rest model which will be updated * @param fields * Field whitelist for the response * @param level * Current level of transformation * @param languageTags * @return */ private void setFields(InternalActionContext ac, Branch branch, NodeResponse restNode, int level, FieldsSet fieldsSet, String... languageTags) { VersioningParameters versioiningParameters = ac.getVersioningParameters(); NodeParameters nodeParameters = ac.getNodeParameters(); List<String> requestedLanguageTags = null; if (languageTags != null && languageTags.length > 0) { requestedLanguageTags = Arrays.asList(languageTags); } else { requestedLanguageTags = nodeParameters.getLanguageList(); } // First check whether the NGFC for the requested language,branch and version could be found. NodeGraphFieldContainer fieldContainer = findVersion(requestedLanguageTags, branch.getUuid(), versioiningParameters.getVersion()); if (fieldContainer == null) { // If a published version was requested, we check whether any // published language variant exists for the node, if not, response // with NOT_FOUND if (forVersion(versioiningParameters.getVersion()) == PUBLISHED && !getGraphFieldContainers(branch, PUBLISHED).iterator().hasNext()) { log.error("Could not find field container for languages {" + requestedLanguageTags + "} and branch {" + branch.getUuid() + "} and version params version {" + versioiningParameters.getVersion() + "}, branch {" + branch.getUuid() + "}"); throw error(NOT_FOUND, "node_error_published_not_found_for_uuid_branch_version", getUuid(), branch.getUuid()); } // If a specific version was requested, that does not exist, we also // return NOT_FOUND if (forVersion(versioiningParameters.getVersion()) == INITIAL) { throw error(NOT_FOUND, "object_not_found_for_version", versioiningParameters.getVersion()); } String langInfo = getLanguageInfo(requestedLanguageTags); if (log.isDebugEnabled()) { log.debug("The fields for node {" + getUuid() + "} can't be populated since the node has no matching language for the languages {" + langInfo + "}. Fields will be empty."); } // No field container was found so we can only set the schema // reference that points to the container (no version information // will be included) if (fieldsSet.has("schema")) { restNode.setSchema(getSchemaContainer().transformToReference()); } // TODO BUG Issue #119 - Actually we would need to throw a 404 in these cases but many current implementations rely on the empty node response. // The response will also contain information about other languages and general structure information. // We should change this behaviour and update the client implementations. // throw error(NOT_FOUND, "object_not_found_for_uuid", getUuid()); } else { Schema schema = fieldContainer.getSchemaContainerVersion().getSchema(); if (fieldsSet.has("container")) { restNode.setContainer(schema.getContainer()); } if (fieldsSet.has("displayField")) { restNode.setDisplayField(schema.getDisplayField()); } if (fieldsSet.has("displayName")) { restNode.setDisplayName(getDisplayName(ac)); } if (fieldsSet.has("language")) { restNode.setLanguage(fieldContainer.getLanguageTag()); } // List<String> fieldsToExpand = ac.getExpandedFieldnames(); // modify the language fallback list by moving the container's // language to the front List<String> containerLanguageTags = new ArrayList<>(requestedLanguageTags); containerLanguageTags.remove(restNode.getLanguage()); containerLanguageTags.add(0, restNode.getLanguage()); // Schema reference if (fieldsSet.has("schema")) { restNode.setSchema(fieldContainer.getSchemaContainerVersion().transformToReference()); } // Version reference if (fieldsSet.has("version") && fieldContainer.getVersion() != null) { restNode.setVersion(fieldContainer.getVersion().toString()); } // editor and edited if (fieldsSet.has("editor")) { User editor = fieldContainer.getEditor(); if (editor != null) { restNode.setEditor(editor.transformToReference()); } } if (fieldsSet.has("edited")) { restNode.setEdited(fieldContainer.getLastEditedDate()); } if (fieldsSet.has("fields")) { // Iterate over all fields and transform them to rest com.gentics.mesh.core.rest.node.FieldMap fields = new FieldMapImpl(); for (FieldSchema fieldEntry : schema.getFields()) { // boolean expandField = // fieldsToExpand.contains(fieldEntry.getName()) || // ac.getExpandAllFlag(); Field restField = fieldContainer.getRestFieldFromGraph(ac, fieldEntry.getName(), fieldEntry, containerLanguageTags, level); if (fieldEntry.isRequired() && restField == null) { // TODO i18n // throw error(BAD_REQUEST, "The field {" + // fieldEntry.getName() // + "} is a required field but it could not be found in the // node. Please add the field using an update call or change // the field schema and // remove the required flag."); fields.put(fieldEntry.getName(), null); } if (restField == null) { if (log.isDebugEnabled()) { log.debug("Field for key {" + fieldEntry.getName() + "} could not be found. Ignoring the field."); } } else { fields.put(fieldEntry.getName(), restField); } } restNode.setFields(fields); } } } /** * Set the children info to the rest model. * * @param ac * @param branch * Branch which will be used to identify the branch specific child nodes * @param restNode * Rest model which will be updated */ private void setChildrenInfo(InternalActionContext ac, Branch branch, NodeResponse restNode) { Map<String, NodeChildrenInfo> childrenInfo = new HashMap<>(); for (Node child : getChildren(branch.getUuid())) { if (ac.getUser().hasPermission(child, READ_PERM)) { String schemaName = child.getSchemaContainer().getName(); NodeChildrenInfo info = childrenInfo.get(schemaName); if (info == null) { info = new NodeChildrenInfo(); String schemaUuid = child.getSchemaContainer().getUuid(); info.setSchemaUuid(schemaUuid); info.setCount(1); childrenInfo.put(schemaName, info); } else { info.setCount(info.getCount() + 1); } } } restNode.setChildrenInfo(childrenInfo); } /** * Set the tag information to the rest model. * * @param ac * @param restNode * Rest model which will be updated * @param branch * Branch which will be used to identify the branch specific tags * @return */ private void setTagsToRest(InternalActionContext ac, NodeResponse restNode, Branch branch) { List<TagReference> list = getTags(branch).stream() .map(Tag::transformToReference) .collect(Collectors.toList()); restNode.setTags(list); } /** * Add the branch specific webroot and language paths to the given rest node. * * @param ac * @param restNode * Rest model which will be updated * @param branch * Branch which will be used to identify the nodes relations and thus the correct path can be determined * @return */ private void setPathsToRest(InternalActionContext ac, NodeResponse restNode, Branch branch) { VersioningParameters versioiningParameters = ac.getVersioningParameters(); if (ac.getNodeParameters().getResolveLinks() != LinkType.OFF) { String branchUuid = ac.getBranch(getProject()).getUuid(); ContainerType type = forVersion(versioiningParameters.getVersion()); LinkType linkType = ac.getNodeParameters().getResolveLinks(); // Path WebRootLinkReplacer linkReplacer = MeshInternal.get().webRootLinkReplacer(); String path = linkReplacer.resolve(ac, branchUuid, type, getUuid(), linkType, getProject().getName(), restNode.getLanguage()); restNode.setPath(path); // languagePaths restNode.setLanguagePaths(getLanguagePaths(ac, linkType, branch)); } } @Override public Map<String, String> getLanguagePaths(InternalActionContext ac, LinkType linkType, Branch branch) { VersioningParameters versioiningParameters = ac.getVersioningParameters(); String branchUuid = ac.getBranch(getProject()).getUuid(); ContainerType type = forVersion(versioiningParameters.getVersion()); Map<String, String> languagePaths = new HashMap<>(); WebRootLinkReplacer linkReplacer = MeshInternal.get().webRootLinkReplacer(); for (GraphFieldContainer currentFieldContainer : getGraphFieldContainers(branch, forVersion(versioiningParameters.getVersion()))) { String currLanguage = currentFieldContainer.getLanguageTag(); String languagePath = linkReplacer.resolve(ac, branchUuid, type, this, linkType, currLanguage); languagePaths.put(currLanguage, languagePath); } return languagePaths; } /** * Set the breadcrumb information to the given rest node. * * @param ac * @param restNode */ private void setBreadcrumbToRest(InternalActionContext ac, NodeResponse restNode) { List<NodeReference> breadcrumbs = getBreadcrumbNodeStream(ac) .map(node -> node.transformToReference(ac)) .collect(Collectors.toList()); restNode.setBreadcrumb(breadcrumbs); } @Override public TraversalResult<Node> getBreadcrumbNodes(InternalActionContext ac) { return new TraversalResult<>(() -> getBreadcrumbNodeStream(ac).iterator()); } private Stream<Node> getBreadcrumbNodeStream(InternalActionContext ac) { String branchUuid = ac.getBranch(getProject()).getUuid(); Node current = this; Deque<Node> breadcrumb = new ArrayDeque<>(); while (current != null) { breadcrumb.addFirst(current); current = current.getParentNode(branchUuid); } return breadcrumb.stream(); } @Override public Single<NavigationResponse> transformToNavigation(InternalActionContext ac) { NavigationParametersImpl parameters = new NavigationParametersImpl(ac); if (parameters.getMaxDepth() < 0) { throw error(BAD_REQUEST, "navigation_error_invalid_max_depth"); } return MeshInternal.get().database().asyncTx(() -> { // TODO assure that the schema version is correct if (!getSchemaContainer().getLatestVersion().getSchema().getContainer()) { throw error(BAD_REQUEST, "navigation_error_no_container"); } String etagKey = buildNavigationEtagKey(ac, this, parameters.getMaxDepth(), 0, ac.getBranch(getProject()).getUuid(), forVersion(ac .getVersioningParameters().getVersion())); String etag = ETag.hash(etagKey); ac.setEtag(etag, true); if (ac.matches(etag, true)) { return Single.error(new NotModifiedException()); } else { NavigationResponse response = new NavigationResponse(); return buildNavigationResponse(ac, this, parameters.getMaxDepth(), 0, response, response, ac.getBranch(getProject()).getUuid(), forVersion(ac.getVersioningParameters().getVersion())); } }); } @Override public NodeVersionsResponse transformToVersionList(InternalActionContext ac) { NodeVersionsResponse response = new NodeVersionsResponse(); Map<String, List<VersionInfo>> versions = new HashMap<>(); getGraphFieldContainersIt(ac.getBranch(), DRAFT).forEach(c -> { versions.put(c.getLanguageTag(), c.versions().stream() .map(v -> v.transformToVersionInfo(ac)) .collect(Collectors.toList())); }); response.setVersions(versions); return response; } /** * Generate the etag key for the requested navigation. * * @param ac * @param node * Current node to start building the navigation * @param maxDepth * Maximum depth of navigation * @param level * Current level of recursion * @param branchUuid * Branch uuid used to extract selected tree structure * @param type * @return */ private String buildNavigationEtagKey(InternalActionContext ac, Node node, int maxDepth, int level, String branchUuid, ContainerType type) { NavigationParametersImpl parameters = new NavigationParametersImpl(ac); StringBuilder builder = new StringBuilder(); builder.append(node.getETag(ac)); TraversalResult<? extends Node> nodes = node.getChildren(ac.getUser(), branchUuid, null, type); // Abort recursion when we reach the max level or when no more children // can be found. if (level == maxDepth || !nodes.iterator().hasNext()) { return builder.toString(); } for (Node child : nodes) { if (child.getSchemaContainer().getLatestVersion().getSchema().getContainer()) { builder.append(buildNavigationEtagKey(ac, child, maxDepth, level + 1, branchUuid, type)); } else if (parameters.isIncludeAll()) { builder.append(buildNavigationEtagKey(ac, child, maxDepth, level, branchUuid, type)); } } return builder.toString(); } /** * Recursively build the navigation response. * * @param ac * Action context * @param node * Current node that should be handled in combination with the given navigation element * @param maxDepth * Maximum depth for the navigation * @param level * Zero based level of the current navigation element * @param navigation * Current navigation response * @param currentElement * Current navigation element for the given level * @param branchUuid * Branch uuid to be used for loading children of nodes * @param type * container type to be used for transformation * @return */ private Single<NavigationResponse> buildNavigationResponse(InternalActionContext ac, Node node, int maxDepth, int level, NavigationResponse navigation, NavigationElement currentElement, String branchUuid, ContainerType type) { TraversalResult<? extends Node> nodes = node.getChildren(ac.getUser(), branchUuid, null, type); List<Single<NavigationResponse>> obsResponses = new ArrayList<>(); obsResponses.add(node.transformToRest(ac, 0).map(response -> { // Set current element data currentElement.setUuid(response.getUuid()); currentElement.setNode(response); return navigation; })); // Abort recursion when we reach the max level or when no more children // can be found. if (level == maxDepth || !nodes.iterator().hasNext()) { List<Observable<NavigationResponse>> obsList = obsResponses.stream().map(ele -> ele.toObservable()).collect(Collectors.toList()); return Observable.merge(obsList).lastOrError(); } NavigationParameters parameters = new NavigationParametersImpl(ac); // Add children for (Node child : nodes) { // TODO assure that the schema version is correct? // TODO also allow navigations over containers if (child.getSchemaContainer().getLatestVersion().getSchema().getContainer()) { NavigationElement childElement = new NavigationElement(); // We found at least one child so lets create the array if (currentElement.getChildren() == null) { currentElement.setChildren(new ArrayList<>()); } currentElement.getChildren().add(childElement); obsResponses.add(buildNavigationResponse(ac, child, maxDepth, level + 1, navigation, childElement, branchUuid, type)); } else if (parameters.isIncludeAll()) { // We found at least one child so lets create the array if (currentElement.getChildren() == null) { currentElement.setChildren(new ArrayList<>()); } NavigationElement childElement = new NavigationElement(); currentElement.getChildren().add(childElement); obsResponses.add(buildNavigationResponse(ac, child, maxDepth, level, navigation, childElement, branchUuid, type)); } } List<Observable<NavigationResponse>> obsList = obsResponses.stream().map(ele -> ele.toObservable()).collect(Collectors.toList()); return Observable.merge(obsList).lastOrError(); } @Override public NodeReference transformToReference(InternalActionContext ac) { Branch branch = ac.getBranch(getProject()); NodeReference nodeReference = new NodeReference(); nodeReference.setUuid(getUuid()); nodeReference.setDisplayName(getDisplayName(ac)); nodeReference.setSchema(getSchemaContainer().transformToReference()); nodeReference.setProjectName(getProject().getName()); if (LinkType.OFF != ac.getNodeParameters().getResolveLinks()) { WebRootLinkReplacer linkReplacer = MeshInternal.get().webRootLinkReplacer(); ContainerType type = forVersion(ac.getVersioningParameters().getVersion()); String url = linkReplacer.resolve(ac, branch.getUuid(), type, this, ac.getNodeParameters().getResolveLinks(), ac.getNodeParameters() .getLanguages()); nodeReference.setPath(url); } return nodeReference; } @Override public NodeReference transformToMinimalReference() { NodeReference ref = new NodeReference(); ref.setUuid(getUuid()); ref.setSchema(getSchemaContainer().transformToReference()); return ref; } @Override public NodeFieldListItem toListItem(InternalActionContext ac, String[] languageTags) { // Create the rest field and populate the fields NodeFieldListItemImpl listItem = new NodeFieldListItemImpl(getUuid()); String branchUuid = ac.getBranch(getProject()).getUuid(); ContainerType type = forVersion(new VersioningParametersImpl(ac).getVersion()); if (ac.getNodeParameters().getResolveLinks() != LinkType.OFF) { listItem.setUrl(MeshInternal.get().webRootLinkReplacer().resolve(ac, branchUuid, type, this, ac.getNodeParameters().getResolveLinks(), languageTags)); } return listItem; } @Override public PublishStatusResponse transformToPublishStatus(InternalActionContext ac) { PublishStatusResponse publishStatus = new PublishStatusResponse(); Map<String, PublishStatusModel> languages = getLanguageInfo(ac); publishStatus.setAvailableLanguages(languages); return publishStatus; } private Map<String, PublishStatusModel> getLanguageInfo(InternalActionContext ac) { Map<String, PublishStatusModel> languages = new HashMap<>(); Branch branch = ac.getBranch(getProject()); getGraphFieldContainers(branch, PUBLISHED).stream().forEach(c -> { String date = DateUtils.toISO8601(c.getLastEditedTimestamp(), 0); PublishStatusModel status = new PublishStatusModel(); status.setPublished(true); status.setVersion(c.getVersion().toString()); User editor = c.getEditor(); if (editor != null) { status.setPublisher(editor.transformToReference()); } status.setPublishDate(date); languages.put(c.getLanguageTag(), status); }); getGraphFieldContainers(branch, DRAFT).stream().filter(c -> !languages.containsKey(c.getLanguageTag())).forEach(c -> { PublishStatusModel status = new PublishStatusModel().setPublished(false).setVersion(c.getVersion().toString()); languages.put(c.getLanguageTag(), status); }); return languages; } @Override public void publish(InternalActionContext ac, Branch branch, BulkActionContext bac) { PublishParameters parameters = ac.getPublishParameters(); // .store(this, branchUuid, ContainerType.PUBLISHED, false); bac.batch().add(onUpdated()); // Handle recursion if (parameters.isRecursive()) { // TODO handle specific branch for (Node child : getChildren()) { child.publish(ac, branch, bac); } } assertPublishConsistency(ac, branch); } @Override public void publish(InternalActionContext ac, BulkActionContext bac) { Branch branch = ac.getBranch(getProject()); String branchUuid = branch.getUuid(); List<? extends NodeGraphFieldContainer> unpublishedContainers = getGraphFieldContainers(branch, ContainerType.DRAFT).stream().filter(c -> !c .isPublished(branchUuid)).collect(Collectors.toList()); // publish all unpublished containers and handle recursion unpublishedContainers.stream().forEach(c -> { NodeGraphFieldContainer newVersion = publish(ac, c.getLanguageTag(), branch, ac.getUser()); bac.add(newVersion.onPublish(branchUuid)); }); assertPublishConsistency(ac, branch); // Handle recursion after publishing the current node. // This is done to ensure the publish consistency. // Even if the publishing process stops at the initial // level the consistency is correct. PublishParameters parameters = ac.getPublishParameters(); if (parameters.isRecursive()) { // TODO handle specific branch for (Node node : getChildren()) { node.publish(ac, bac); } } bac.process(); } @Override public void takeOffline(InternalActionContext ac, BulkActionContext bac, Branch branch, PublishParameters parameters) { // Handle recursion first to start at the leafs if (parameters.isRecursive()) { for (Node node : getChildren()) { node.takeOffline(ac, bac, branch, parameters); } } String branchUuid = branch.getUuid(); TraversalResult<? extends GraphFieldContainerEdgeImpl> publishEdges = new TraversalResult<>( getGraphFieldContainerEdges(branchUuid, PUBLISHED)); // Remove the published edge for each found container publishEdges.forEach(edge -> { NodeGraphFieldContainer content = edge.getNodeContainer(); bac.add(content.onTakenOffline(branchUuid)); edge.remove(); if (content.isAutoPurgeEnabled() && content.isPurgeable()) { content.purge(bac); } }); assertPublishConsistency(ac, branch); bac.process(); } @Override public void takeOffline(InternalActionContext ac, BulkActionContext bac) { Branch branch = ac.getBranch(getProject()); PublishParameters parameters = ac.getPublishParameters(); takeOffline(ac, bac, branch, parameters); } @Override public PublishStatusModel transformToPublishStatus(InternalActionContext ac, String languageTag) { Branch branch = ac.getBranch(getProject()); NodeGraphFieldContainer container = getGraphFieldContainer(languageTag, branch.getUuid(), PUBLISHED); if (container != null) { String date = container.getLastEditedDate(); PublishStatusModel status = new PublishStatusModel(); status.setPublished(true); status.setVersion(container.getVersion().toString()); User editor = container.getEditor(); if (editor != null) { status.setPublisher(editor.transformToReference()); } status.setPublishDate(date); return status; } else { container = getGraphFieldContainer(languageTag, branch.getUuid(), DRAFT); if (container == null) { throw error(NOT_FOUND, "error_language_not_found", languageTag); } return new PublishStatusModel().setPublished(false).setVersion(container.getVersion().toString()); } } @Override public void publish(InternalActionContext ac, BulkActionContext bac, String languageTag) { Branch branch = ac.getBranch(getProject()); String branchUuid = branch.getUuid(); // get the draft version of the given language NodeGraphFieldContainer draftVersion = getGraphFieldContainer(languageTag, branchUuid, DRAFT); // if not existent -> NOT_FOUND if (draftVersion == null) { throw error(NOT_FOUND, "error_language_not_found", languageTag); } // If the located draft version was already published we are done if (draftVersion.isPublished(branchUuid)) { return; } // TODO check whether all required fields are filled, if not -> unable to publish NodeGraphFieldContainer publishedContainer = publish(ac, draftVersion.getLanguageTag(), branch, ac.getUser()); // Invoke a store of the document since it must now also be added to the published index bac.add(publishedContainer.onPublish(branchUuid)); } @Override public void takeOffline(InternalActionContext ac, BulkActionContext bac, Branch branch, String languageTag) { String branchUuid = branch.getUuid(); // Locate the published container NodeGraphFieldContainer published = getGraphFieldContainer(languageTag, branchUuid, PUBLISHED); if (published == null) { throw error(NOT_FOUND, "error_language_not_found", languageTag); } bac.add(published.onTakenOffline(branchUuid)); // Remove the "published" edge getGraphFieldContainerEdge(languageTag, branchUuid, PUBLISHED).remove(); assertPublishConsistency(ac, branch); bac.process(); } @Override public void setPublished(InternalActionContext ac, NodeGraphFieldContainer container, String branchUuid) { String languageTag = container.getLanguageTag(); boolean isAutoPurgeEnabled = container.isAutoPurgeEnabled(); // Remove an existing published edge EdgeFrame currentPublished = getGraphFieldContainerEdgeFrame(languageTag, branchUuid, PUBLISHED); if (currentPublished != null) { // We need to remove the edge first since updateWebrootPathInfo will // check the published edge again NodeGraphFieldContainerImpl oldPublishedContainer = currentPublished.inV().nextOrDefaultExplicit(NodeGraphFieldContainerImpl.class, null); currentPublished.remove(); oldPublishedContainer.updateWebrootPathInfo(branchUuid, "node_conflicting_segmentfield_publish"); if (ac.isPurgeAllowed() && isAutoPurgeEnabled && oldPublishedContainer.isPurgeable()) { oldPublishedContainer.purge(); } } if (ac.isPurgeAllowed()) { // Check whether a previous draft can be purged. NodeGraphFieldContainer prev = container.getPreviousVersion(); if (isAutoPurgeEnabled && prev != null && prev.isPurgeable()) { prev.purge(); } } // create new published edge GraphFieldContainerEdge edge = addFramedEdge(HAS_FIELD_CONTAINER, container, GraphFieldContainerEdgeImpl.class); edge.setLanguageTag(languageTag); edge.setBranchUuid(branchUuid); edge.setType(PUBLISHED); container.updateWebrootPathInfo(branchUuid, "node_conflicting_segmentfield_publish"); } @Override public NodeGraphFieldContainer publish(InternalActionContext ac, String languageTag, Branch branch, User user) { String branchUuid = branch.getUuid(); // create published version NodeGraphFieldContainer newVersion = createGraphFieldContainer(languageTag, branch, user); newVersion.setVersion(newVersion.getVersion().nextPublished()); setPublished(ac, newVersion, branchUuid); return newVersion; } @Override public NodeGraphFieldContainer findVersion(List<String> languageTags, String branchUuid, String version) { NodeGraphFieldContainer fieldContainer = null; // TODO refactor the type handling and don't return INITIAL. ContainerType type = forVersion(version); for (String languageTag : languageTags) { // Don't start the version lookup using the initial version. Instead start at the end of the chain and use the DRAFT version instead. fieldContainer = getGraphFieldContainer(languageTag, branchUuid, type == INITIAL ? DRAFT : type); // Traverse the chain downwards and stop once we found our target version or we reached the end. if (fieldContainer != null && type == INITIAL) { while (fieldContainer != null && !version.equals(fieldContainer.getVersion().toString())) { fieldContainer = fieldContainer.getPreviousVersion(); } } // We found a container for one of the languages if (fieldContainer != null) { break; } } return fieldContainer; } @Override public List<String> getAvailableLanguageNames() { List<String> languageTags = new ArrayList<>(); // TODO it would be better to store the languagetag along with the edge for (GraphFieldContainer container : getDraftGraphFieldContainers()) { languageTags.add(container.getLanguageTag()); } return languageTags; } @Override public List<String> getAvailableLanguageNames(Branch branch, ContainerType type) { List<String> languageTags = new ArrayList<>(); for (GraphFieldContainer container : getGraphFieldContainers(branch, type)) { languageTags.add(container.getLanguageTag()); } return languageTags; } @Override public void delete(BulkActionContext bac, boolean ignoreChecks, boolean recursive) { if (!ignoreChecks) { // Prevent deletion of basenode if (getProject().getBaseNode().getUuid().equals(getUuid())) { throw error(METHOD_NOT_ALLOWED, "node_basenode_not_deletable"); } } // Delete subfolders if (log.isDebugEnabled()) { log.debug("Deleting node {" + getUuid() + "}"); } // TODO Only affect a specific branch? if (recursive) { for (Node child : getChildren()) { child.delete(bac); bac.process(); } } // Delete all initial containers (which will delete all containers) for (NodeGraphFieldContainer container : getGraphFieldContainersIt(INITIAL)) { container.delete(bac); } if (log.isDebugEnabled()) { log.debug("Deleting node {" + getUuid() + "} vertex."); } addReferenceUpdates(bac); bac.add(onDeleted(getUuid(), getSchemaContainer(), null, null, null)); getElement().remove(); bac.process(); } @Override public Stream<? extends NodeGraphField> getInboundReferences() { return toStream(inE(HAS_FIELD, HAS_ITEM) .has(NodeGraphFieldImpl.class) .frameExplicit(NodeGraphFieldImpl.class)); } /** * Adds reference update events to the context for all draft and published contents that reference this node. * @param bac */ private void addReferenceUpdates(BulkActionContext bac) { Set<String> handledNodeUuids = new HashSet<>(); getInboundReferences() .flatMap(NodeGraphField::getReferencingContents) .forEach(nodeContainer -> { for (GraphFieldContainerEdgeImpl edge : nodeContainer.inE(HAS_FIELD_CONTAINER).frameExplicit(GraphFieldContainerEdgeImpl.class)) { ContainerType type = edge.getType(); // Only handle published or draft contents if (type.equals(DRAFT) || type.equals(PUBLISHED)) { Node node = nodeContainer.getParentNode(); String uuid = node.getUuid(); String languageTag = nodeContainer.getLanguageTag(); String branchUuid = edge.getBranchUuid(); String key = uuid + languageTag + branchUuid + type.getCode(); if (!handledNodeUuids.contains(key)) { bac.add(onReferenceUpdated(node.getUuid(), node.getSchemaContainer(), branchUuid, type, languageTag)); handledNodeUuids.add(key); } } } }); } @Override public void delete(BulkActionContext bac) { delete(bac, false, true); } @Override public void deleteFromBranch(InternalActionContext ac, Branch branch, BulkActionContext bac, boolean ignoreChecks) { DeleteParameters parameters = ac.getDeleteParameters(); // 1. Remove subfolders from branch String branchUuid = branch.getUuid(); for (Node child : getChildren(branchUuid)) { if (!parameters.isRecursive()) { throw error(BAD_REQUEST, "node_error_delete_failed_node_has_children"); } child.deleteFromBranch(ac, branch, bac, ignoreChecks); } // 2. Delete all language containers for (NodeGraphFieldContainer container : getGraphFieldContainers(branch, DRAFT)) { deleteLanguageContainer(ac, branch, container.getLanguageTag(), bac, false); } // 3. Now check if the node has no more field containers in any branch. We can delete it in those cases if (getGraphFieldContainerCount() == 0) { delete(bac); } else { // Otherwise we need to remove the "parent" edge for the branch // first remove the "parent" edge (because the node itself will // probably not be deleted, but just removed from the branch) outE(HAS_PARENT_NODE).has(BRANCH_UUID_KEY, branchUuid).removeAll(); } } /** * Get a vertex traversal to find the children of this node, this user has read permission for. * * @param requestUser * user * @param branchUuid * branch uuid * @param languageTags * Only list nodes which match the given language tags. Don't filter if the language tags list is null * @param type * edge type * @return vertex traversal */ private VertexTraversal<?, ?, ?> getChildrenTraversal(MeshAuthUser requestUser, String branchUuid, List<String> languageTags, ContainerType type) { String permLabel = type == PUBLISHED ? READ_PUBLISHED_PERM.label() : READ_PERM.label(); VertexTraversal<?, ?, ?> traversal = null; if (branchUuid != null) { traversal = inE(HAS_PARENT_NODE).has(BRANCH_UUID_KEY, branchUuid).outV(); } else { traversal = in(HAS_PARENT_NODE); } traversal = traversal.mark().in(permLabel).out(HAS_ROLE).in(HAS_USER).retain(requestUser).back(); if (branchUuid != null || type != null) { EdgeTraversal<?, ?, ?> edgeTraversal = traversal.mark().outE(HAS_FIELD_CONTAINER); if (branchUuid != null) { edgeTraversal = edgeTraversal.has(GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branchUuid); } if (type != null) { edgeTraversal = edgeTraversal.has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, type.getCode()); } // Filter out nodes which are not listed in the given language tags if (languageTags != null) { edgeTraversal = edgeTraversal.filter(edge -> { String languageTag = edge.getProperty(GraphFieldContainerEdgeImpl.LANGUAGE_TAG_KEY); return languageTags.contains(languageTag); }); } traversal = (VertexTraversal<?, ?, ?>) edgeTraversal.outV().back(); } return traversal; } @Override public TraversalResult<? extends Node> getChildren(MeshAuthUser requestUser, String branchUuid, List<String> languageTags, ContainerType type) { return new TraversalResult<>(getChildrenTraversal(requestUser, branchUuid, languageTags, type).frameExplicit(NodeImpl.class)); } @Override public TransformablePage<? extends Node> getChildren(InternalActionContext ac, List<String> languageTags, String branchUuid, ContainerType type, PagingParameters pagingInfo) { String indexName = "e." + HAS_PARENT_NODE.toLowerCase() + "_branch"; Object indexKey = DB.get().createComposedIndexKey(id(), branchUuid); GraphPermission perm = type == PUBLISHED ? READ_PUBLISHED_PERM : READ_PERM; if (languageTags == null) { return new DynamicTransformablePageImpl<>(ac.getUser(), indexName, indexKey, Direction.OUT, NodeImpl.class, pagingInfo, perm, null, true); } else { return new DynamicTransformablePageImpl<>(ac.getUser(), indexName, indexKey, Direction.OUT, NodeImpl.class, pagingInfo, perm, (item) -> { // Filter out nodes which do not provide one of the specified language tags and type for (String languageTag : languageTags) { if (item.getGraphFieldContainerEdge(languageTag, branchUuid, type) != null) { return true; } } return false; }, true); } } @Override public TransformablePage<? extends Tag> getTags(User user, PagingParameters params, Branch branch) { VertexTraversal<?, ?, ?> traversal = TagEdgeImpl.getTagTraversal(this, branch); return new DynamicTransformablePageImpl<Tag>(user, traversal, params, READ_PERM, TagImpl.class); } @Override public void applyPermissions(EventQueueBatch batch, Role role, boolean recursive, Set<GraphPermission> permissionsToGrant, Set<GraphPermission> permissionsToRevoke) { if (recursive) { // TODO for branch? for (Node child : getChildren()) { child.applyPermissions(batch, role, recursive, permissionsToGrant, permissionsToRevoke); } } super.applyPermissions(batch, role, recursive, permissionsToGrant, permissionsToRevoke); } @Override public String getDisplayName(InternalActionContext ac) { NodeParameters nodeParameters = ac.getNodeParameters(); VersioningParameters versioningParameters = ac.getVersioningParameters(); NodeGraphFieldContainer container = findVersion(nodeParameters.getLanguageList(), ac.getBranch(getProject()).getUuid(), versioningParameters .getVersion()); if (container == null) { if (log.isDebugEnabled()) { log.debug("Could not find any matching i18n field container for node {" + getUuid() + "}."); } return null; } else { // Determine the display field name and load the string value // from that field. return container.getDisplayFieldValue(); } } /** * Update the node language or create a new draft for the specific language. This method will also apply conflict detection and take care of deduplication. * * * <p> * Conflict detection: Conflict detection only occurs during update requests. Two diffs are created. The update request will be compared against base * version graph field container (version which is referenced by the request). The second diff is being created in-between the base version graph field * container and the latest version of the graph field container. This diff identifies previous changes in between those version. These both diffs are * compared in order to determine their intersection. The intersection identifies those fields which have been altered in between both versions and which * would now also be touched by the current request. This situation causes a conflict and the update would abort. * * <p> * Conflict cases * <ul> * <li>Initial creates - No conflict handling needs to be performed</li> * <li>Migration check - Nodes which have not yet migrated can't be updated</li> * </ul> * * * <p> * Deduplication: Field values that have not been changed in between the request data and the last version will not cause new fields to be created in new * version graph field containers. The new version graph field container will instead reference those fields from the previous graph field container * version. Please note that this deduplication only applies to complex fields (e.g.: Lists, Micronode) * * @param ac * @param batch * Batch which will be used to update the search index * @return */ @Override public boolean update(InternalActionContext ac, EventQueueBatch batch) { NodeUpdateRequest requestModel = ac.fromJson(NodeUpdateRequest.class); if (isEmpty(requestModel.getLanguage())) { throw error(BAD_REQUEST, "error_language_not_set"); } // Check whether the tags need to be updated List<TagReference> tags = requestModel.getTags(); if (tags != null) { updateTags(ac, batch, requestModel.getTags()); } // Set the language tag parameter here in order to return the updated language in the response String languageTag = requestModel.getLanguage(); NodeParameters nodeParameters = ac.getNodeParameters(); nodeParameters.setLanguages(languageTag); Language language = MeshInternal.get().boot().languageRoot().findByLanguageTag(languageTag); if (language == null) { throw error(BAD_REQUEST, "error_language_not_found", requestModel.getLanguage()); } Branch branch = ac.getBranch(getProject()); NodeGraphFieldContainer latestDraftVersion = getGraphFieldContainer(languageTag, branch, DRAFT); // Check whether this is the first time that an update for the given language and branch occurs. In this case a new container must be created. // This means that no conflict check can be performed. Conflict checks only occur for updates on existing contents. if (latestDraftVersion == null) { // Create a new field container latestDraftVersion = createGraphFieldContainer(languageTag, branch, ac.getUser()); // Check whether the node has a parent node in this branch, if not, the request is supposed to be a create request // and we get the parent node from this create request if (getParentNode(branch.getUuid()) == null) { NodeCreateRequest createRequest = JsonUtil.readValue(ac.getBodyAsString(), NodeCreateRequest.class); if (createRequest.getParentNode() == null || isEmpty(createRequest.getParentNode().getUuid())) { throw error(BAD_REQUEST, "node_missing_parentnode_field"); } Node parentNode = getProject().getNodeRoot().loadObjectByUuid(ac, createRequest.getParentNode().getUuid(), CREATE_PERM); // check whether the parent node is visible in the branch if (!parentNode.isBaseNode() && !parentNode.isVisibleInBranch(branch.getUuid())) { log.error( String.format("Error while creating node in branch {%s}: requested parent node {%s} exists, but is not visible in branch.", branch.getName(), parentNode.getUuid())); throw error(NOT_FOUND, "object_not_found_for_uuid", createRequest.getParentNode().getUuid()); } setParentNode(branch.getUuid(), parentNode); } latestDraftVersion.updateFieldsFromRest(ac, requestModel.getFields()); batch.add(latestDraftVersion.onCreated(branch.getUuid(), DRAFT)); return true; } else { String version = requestModel.getVersion(); if (version == null) { log.debug("No version was specified. Assuming 'draft' for latest version"); version = "draft"; } // Make sure the container was already migrated. Otherwise the update can't proceed. SchemaContainerVersion schemaContainerVersion = latestDraftVersion.getSchemaContainerVersion(); if (!latestDraftVersion.getSchemaContainerVersion().equals(branch.findLatestSchemaVersion(schemaContainerVersion .getSchemaContainer()))) { throw error(BAD_REQUEST, "node_error_migration_incomplete"); } // Load the base version field container in order to create the diff NodeGraphFieldContainer baseVersionContainer = findVersion(requestModel.getLanguage(), branch.getUuid(), version); if (baseVersionContainer == null) { throw error(BAD_REQUEST, "node_error_draft_not_found", version, requestModel.getLanguage()); } latestDraftVersion.getSchemaContainerVersion().getSchema().assertForUnhandledFields(requestModel.getFields()); // TODO handle simplified case in which baseContainerVersion and // latestDraftVersion are equal List<FieldContainerChange> baseVersionDiff = baseVersionContainer.compareTo(latestDraftVersion); List<FieldContainerChange> requestVersionDiff = latestDraftVersion.compareTo(requestModel.getFields()); // Compare both sets of change sets List<FieldContainerChange> intersect = baseVersionDiff.stream().filter(requestVersionDiff::contains).collect(Collectors.toList()); // Check whether the update was not based on the latest draft version. In that case a conflict check needs to occur. if (!latestDraftVersion.getVersion().getFullVersion().equals(version)) { // Check whether a conflict has been detected if (intersect.size() > 0) { NodeVersionConflictException conflictException = new NodeVersionConflictException("node_error_conflict_detected"); conflictException.setOldVersion(baseVersionContainer.getVersion().toString()); conflictException.setNewVersion(latestDraftVersion.getVersion().toString()); for (FieldContainerChange fcc : intersect) { conflictException.addConflict(fcc.getFieldCoordinates()); } throw conflictException; } } // Make sure to only update those fields which have been altered in between the latest version and the current request. Remove // unaffected fields from the rest request in order to prevent duplicate references. We don't want to touch field that have not been changed. // Otherwise the graph field references would no longer point to older revisions of the same field. Set<String> fieldsToKeepForUpdate = requestVersionDiff.stream().map(e -> e.getFieldKey()).collect(Collectors.toSet()); for (String fieldKey : requestModel.getFields().keySet()) { if (fieldsToKeepForUpdate.contains(fieldKey)) { continue; } if (log.isDebugEnabled()) { log.debug("Removing field from request {" + fieldKey + "} in order to handle deduplication."); } requestModel.getFields().remove(fieldKey); } // Check whether the request still contains data which needs to be updated. if (!requestModel.getFields().isEmpty()) { // Create new field container as clone of the existing NodeGraphFieldContainer newDraftVersion = createGraphFieldContainer(language.getLanguageTag(), branch, ac.getUser(), latestDraftVersion, true); // Update the existing fields newDraftVersion.updateFieldsFromRest(ac, requestModel.getFields()); // Purge the old draft if (ac.isPurgeAllowed() && newDraftVersion.isAutoPurgeEnabled() && latestDraftVersion.isPurgeable()) { latestDraftVersion.purge(); } latestDraftVersion = newDraftVersion; batch.add(newDraftVersion.onUpdated(branch.getUuid(), DRAFT)); return true; } } return false; } @Override public TransformablePage<? extends Tag> updateTags(InternalActionContext ac, EventQueueBatch batch) { List<Tag> tags = getTagsToSet(ac, batch); Branch branch = ac.getBranch(); applyTags(branch, tags, batch); User user = ac.getUser(); return getTags(user, ac.getPagingParameters(), branch); } @Override public void updateTags(InternalActionContext ac, EventQueueBatch batch, List<TagReference> list) { List<Tag> tags = getTagsToSet(list, ac, batch); Branch branch = ac.getBranch(); applyTags(branch, tags, batch); } private void applyTags(Branch branch, List<? extends Tag> tags, EventQueueBatch batch) { List<? extends Tag> currentTags = getTags(branch).list(); List<Tag> toBeAdded = tags.stream() .filter(StreamUtil.not(new HashSet<>(currentTags)::contains)) .collect(Collectors.toList()); toBeAdded.forEach(tag -> { addTag(tag, branch); batch.add(onTagged(tag, branch, ASSIGNED)); }); List<Tag> toBeRemoved = currentTags.stream() .filter(StreamUtil.not(new HashSet<>(tags)::contains)) .collect(Collectors.toList()); toBeRemoved.forEach(tag -> { removeTag(tag, branch); batch.add(onTagged(tag, branch, UNASSIGNED)); }); } @Override public void moveTo(InternalActionContext ac, Node targetNode, EventQueueBatch batch) { // TODO should we add a guard that terminates this loop when it runs to // long? // Check whether the target node is part of the subtree of the source // node. // We must detect and prevent such actions because those would // invalidate the tree structure Branch branch = ac.getBranch(getProject()); String branchUuid = branch.getUuid(); Node parent = targetNode.getParentNode(branchUuid); while (parent != null) { if (parent.getUuid().equals(getUuid())) { throw error(BAD_REQUEST, "node_move_error_not_allowed_to_move_node_into_one_of_its_children"); } parent = parent.getParentNode(branchUuid); } if (!targetNode.getSchemaContainer().getLatestVersion().getSchema().getContainer()) { throw error(BAD_REQUEST, "node_move_error_targetnode_is_no_folder"); } if (getUuid().equals(targetNode.getUuid())) { throw error(BAD_REQUEST, "node_move_error_same_nodes"); } setParentNode(branchUuid, targetNode); // Update published graph field containers getGraphFieldContainers(branchUuid, PUBLISHED).stream().forEach(container -> { container.updateWebrootPathInfo(branchUuid, "node_conflicting_segmentfield_move"); }); // Update draft graph field containers getGraphFieldContainers(branchUuid, DRAFT).stream().forEach(container -> { container.updateWebrootPathInfo(branchUuid, "node_conflicting_segmentfield_move"); }); batch.add(onNodeMoved(branchUuid, targetNode)); assertPublishConsistency(ac, branch); } @Override public void deleteLanguageContainer(InternalActionContext ac, Branch branch, String languageTag, BulkActionContext bac, boolean failForLastContainer) { // 1. Check whether the container has also a published variant. We need to take it offline in those cases NodeGraphFieldContainer container = getGraphFieldContainer(languageTag, branch, PUBLISHED); if (container != null) { takeOffline(ac, bac, branch, languageTag); } // 2. Load the draft container and remove it from the branch container = getGraphFieldContainer(languageTag, branch, DRAFT); if (container == null) { throw error(NOT_FOUND, "node_no_language_found", languageTag); } container.deleteFromBranch(branch, bac); // No need to delete the published variant because if the container was published the take offline call handled it // starting with the old draft, delete all GFC that have no next and are not draft (for other branches) NodeGraphFieldContainer dangling = container; while (dangling != null && !dangling.isDraft() && !dangling.hasNextVersion()) { NodeGraphFieldContainer toDelete = dangling; dangling = toDelete.getPreviousVersion(); toDelete.delete(bac); } NodeGraphFieldContainer initial = getGraphFieldContainer(languageTag, branch, INITIAL); if (initial != null) { // Remove the initial edge initial.inE(HAS_FIELD_CONTAINER).has(GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branch.getUuid()) .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, ContainerType.INITIAL.getCode()).removeAll(); // starting with the old initial, delete all GFC that have no previous and are not initial (for other branches) dangling = initial; while (dangling != null && !dangling.isInitial() && !dangling.hasPreviousVersion()) { NodeGraphFieldContainer toDelete = dangling; // since the GFC "toDelete" was only used by this branch, it can not have more than one "next" GFC // (multiple "next" would have to belong to different branches, and for every branch, there would have to be // an INITIAL, which would have to be either this GFC or a previous) dangling = toDelete.getNextVersions().iterator().next(); toDelete.delete(bac, false); } } // 3. Check whether this was be the last container of the node for this branch DeleteParameters parameters = ac.getDeleteParameters(); if (failForLastContainer) { TraversalResult<? extends NodeGraphFieldContainer> draftContainers = getGraphFieldContainers(branch.getUuid(), DRAFT); TraversalResult<? extends NodeGraphFieldContainer> publishContainers = getGraphFieldContainers(branch.getUuid(), PUBLISHED); boolean wasLastContainer = !draftContainers.iterator().hasNext() && !publishContainers.iterator().hasNext(); if (!parameters.isRecursive() && wasLastContainer) { throw error(BAD_REQUEST, "node_error_delete_failed_last_container_for_branch"); } // Also delete the node and children if (parameters.isRecursive() && wasLastContainer) { deleteFromBranch(ac, branch, bac, false); } } } @Override public PathSegment getSegment(String branchUuid, ContainerType type, String segment) { // Check the different language versions for (NodeGraphFieldContainer container : getGraphFieldContainersIt(branchUuid, type)) { Schema schema = container.getSchemaContainerVersion().getSchema(); String segmentFieldName = schema.getSegmentField(); // First check whether a string field exists for the given name StringGraphField field = container.getString(segmentFieldName); if (field != null) { String fieldValue = field.getString(); if (segment.equals(fieldValue)) { return new PathSegment(container, field, container.getLanguageTag(), segment); } } // No luck yet - lets check whether a binary field matches the // segmentField BinaryGraphField binaryField = container.getBinary(segmentFieldName); if (binaryField == null) { if (log.isDebugEnabled()) { log.debug("The node {" + getUuid() + "} did not contain a string or a binary field for segment field name {" + segmentFieldName + "}"); } } else { String binaryFilename = binaryField.getFileName(); if (segment.equals(binaryFilename)) { return new PathSegment(container, binaryField, container.getLanguageTag(), segment); } } } return null; } @Override public Path resolvePath(String branchUuid, ContainerType type, Path path, Stack<String> pathStack) { if (pathStack.isEmpty()) { return path; } String segment = pathStack.pop(); if (log.isDebugEnabled()) { log.debug("Resolving for path segment {" + segment + "}"); } FramedGraph graph = Tx.get().getGraph(); String segmentInfo = GraphFieldContainerEdgeImpl.composeSegmentInfo(this, segment); Object key = GraphFieldContainerEdgeImpl.composeWebrootIndexKey(segmentInfo, branchUuid, type); Iterator<? extends GraphFieldContainerEdge> edges = graph.getFramedEdges(WEBROOT_INDEX_NAME, key, GraphFieldContainerEdgeImpl.class) .iterator(); if (edges.hasNext()) { GraphFieldContainerEdge edge = edges.next(); Node childNode = edge.getNode(); PathSegment pathSegment = childNode.getSegment(branchUuid, type, segment); if (pathSegment != null) { path.addSegment(pathSegment); return childNode.resolvePath(branchUuid, type, path, pathStack); } } return path; } /** * Generate the etag for nodes. The etag consists of: * <ul> * <li>uuid of the node</li> * <li>parent node uuid (which is branch specific)</li> * <li>version and language specific etag of the field container</li> * <li>availableLanguages</li> * <li>breadcrumb</li> * <li>webroot path &amp; language paths</li> * <li>permissions</li> * </ul> */ @Override public String getETag(InternalActionContext ac) { String superkey = super.getETag(ac); // Parameters Branch branch = ac.getBranch(getProject()); VersioningParameters versioiningParameters = ac.getVersioningParameters(); ContainerType type = forVersion(versioiningParameters.getVersion()); Node parentNode = getParentNode(branch.getUuid()); NodeGraphFieldContainer container = findVersion(ac.getNodeParameters().getLanguageList(), branch.getUuid(), ac.getVersioningParameters() .getVersion()); StringBuilder keyBuilder = new StringBuilder(); keyBuilder.append(superkey); /** * branch uuid */ keyBuilder.append(branch.getUuid()); keyBuilder.append("-"); // TODO version, language list // We can omit further etag keys since this would return a 404 anyhow // since the requested container could not be found. if (container == null) { keyBuilder.append("404-no-container"); return keyBuilder.toString(); } /** * Parent node * * The node can be moved and this would also affect the response. The etag must also be changed when the node is moved. */ if (parentNode != null) { keyBuilder.append("-"); keyBuilder.append(parentNode.getUuid()); } // fields version if (container != null) { keyBuilder.append("-"); keyBuilder.append(container.getETag(ac)); } /** * Expansion (all) * * The expandAll parameter changes the json response and thus must be included in the etag computation. */ if (ac.getNodeParameters().getExpandAll()) { keyBuilder.append("-"); keyBuilder.append("expand:true"); } // expansion (selective) String expandedFields = Arrays.toString(ac.getNodeParameters().getExpandedFieldNames()); keyBuilder.append("-"); keyBuilder.append("expandFields:"); keyBuilder.append(expandedFields); // branch specific tags for (Tag tag : getTags(branch)) { // Tags can't be moved across branches thus we don't need to add the // tag family etag keyBuilder.append(tag.getETag(ac)); } // branch specific children for (Node child : getChildren(branch.getUuid())) { if (ac.getUser().hasPermission(child, READ_PUBLISHED_PERM)) { keyBuilder.append("-"); keyBuilder.append(child.getSchemaContainer().getName()); } } // Publish state & availableLanguages for (NodeGraphFieldContainer c : getGraphFieldContainers(branch, PUBLISHED)) { keyBuilder.append(c.getLanguageTag() + "published"); } for (NodeGraphFieldContainer c : getGraphFieldContainers(branch, DRAFT)) { keyBuilder.append(c.getLanguageTag() + "draft"); } // breadcrumb keyBuilder.append("-"); Node current = getParentNode(branch.getUuid()); if (current != null) { while (current != null) { String key = current.getUuid() + current.getDisplayName(ac); keyBuilder.append(key); if (LinkType.OFF != ac.getNodeParameters().getResolveLinks()) { WebRootLinkReplacer linkReplacer = MeshInternal.get().webRootLinkReplacer(); String url = linkReplacer.resolve(ac, branch.getUuid(), type, current.getUuid(), ac.getNodeParameters().getResolveLinks(), getProject().getName(), container.getLanguageTag()); keyBuilder.append(url); } current = current.getParentNode(branch.getUuid()); } } /** * webroot path & language paths * * The webroot and language paths must be included in the etag computation in order to invalidate the etag once a node language gets updated or once the * display name of any parent node changes. */ if (ac.getNodeParameters().getResolveLinks() != LinkType.OFF) { WebRootLinkReplacer linkReplacer = MeshInternal.get().webRootLinkReplacer(); String path = linkReplacer.resolve(ac, branch.getUuid(), type, getUuid(), ac.getNodeParameters().getResolveLinks(), getProject() .getName(), container.getLanguageTag()); keyBuilder.append(path); // languagePaths for (GraphFieldContainer currentFieldContainer : getGraphFieldContainers(branch, forVersion(versioiningParameters.getVersion()))) { String currLanguage = currentFieldContainer.getLanguageTag(); keyBuilder.append(currLanguage + "=" + linkReplacer.resolve(ac, branch.getUuid(), type, this, ac.getNodeParameters() .getResolveLinks(), currLanguage)); } } /** * permissions (&roleUuid query parameter aware) * * Permissions can change and thus must be included in the etag computation in order to invalidate the etag once the permissions change. */ String roleUuid = ac.getRolePermissionParameters().getRoleUuid(); if (!isEmpty(roleUuid)) { Role role = MeshInternal.get().boot().meshRoot().getRoleRoot().loadObjectByUuid(ac, roleUuid, READ_PERM); if (role != null) { Set<GraphPermission> permSet = role.getPermissions(this); Set<String> humanNames = new HashSet<>(); for (GraphPermission permission : permSet) { humanNames.add(permission.getRestPerm().getName()); } String[] names = humanNames.toArray(new String[humanNames.size()]); keyBuilder.append(Arrays.toString(names)); } } if (log.isDebugEnabled()) { log.debug("Creating etag from key {" + keyBuilder.toString() + "}"); } return ETag.hash(keyBuilder.toString()); } @Override public String getAPIPath(InternalActionContext ac) { return VersionHandler.baseRoute(ac) + "/" + encodeSegment(getProject().getName()) + "/nodes/" + getUuid(); } @Override public User getCreator() { return out(HAS_CREATOR, UserImpl.class).nextOrNull(); } @Override public MeshElementEventModel onDeleted() { throw new NotImplementedException("Use dedicated onDeleted method for nodes instead."); } public NodeMovedEventModel onNodeMoved(String branchUuid, Node target) { NodeMovedEventModel model = new NodeMovedEventModel(); model.setEvent(NODE_MOVED); model.setBranchUuid(branchUuid); model.setProject(getProject().transformToReference()); fillEventInfo(model); model.setTarget(target.transformToMinimalReference()); return model; } @Override protected MeshProjectElementEventModel createEvent(MeshEvent event) { NodeMeshEventModel model = new NodeMeshEventModel(); model.setEvent(event); model.setProject(getProject().transformToReference()); fillEventInfo(model); return model; } public NodeMeshEventModel onReferenceUpdated(String uuid, SchemaContainer schema, String branchUuid, ContainerType type, String languageTag) { NodeMeshEventModel event = new NodeMeshEventModel(); event.setEvent(NODE_REFERENCE_UPDATED); event.setUuid(uuid); event.setLanguageTag(languageTag); event.setType(type); event.setBranchUuid(branchUuid); event.setProject(getProject().transformToReference()); if (schema != null) { event.setSchema(schema.transformToReference()); } return event; } @Override public NodeMeshEventModel onDeleted(String uuid, SchemaContainer schema, String branchUuid, ContainerType type, String languageTag) { NodeMeshEventModel event = new NodeMeshEventModel(); event.setEvent(getTypeInfo().getOnDeleted()); event.setUuid(uuid); event.setLanguageTag(languageTag); event.setType(type); event.setBranchUuid(branchUuid); event.setProject(getProject().transformToReference()); if (schema != null) { event.setSchema(schema.transformToReference()); } return event; } @Override public NodeTaggedEventModel onTagged(Tag tag, Branch branch, Assignment assignment) { NodeTaggedEventModel model = new NodeTaggedEventModel(); model.setTag(tag.transformToReference()); model.setBranch(branch.transformToReference()); model.setProject(getProject().transformToReference()); model.setNode(transformToMinimalReference()); switch (assignment) { case ASSIGNED: model.setEvent(NODE_TAGGED); break; case UNASSIGNED: model.setEvent(NODE_UNTAGGED); break; } return model; } @Override public Single<NodeResponse> transformToRest(InternalActionContext ac, int level, String... languageTags) { return MeshInternal.get().database().asyncTx(() -> { return Single.just(transformToRestSync(ac, level, languageTags)); }); } @Override public boolean isBaseNode() { return inE(HAS_ROOT_NODE).hasNext(); } @Override public boolean isVisibleInBranch(String branchUuid) { return getGraphFieldContainersIt(branchUuid, DRAFT).iterator().hasNext(); } @Override public PermissionChangedProjectElementEventModel onPermissionChanged(Role role) { PermissionChangedProjectElementEventModel model = new PermissionChangedProjectElementEventModel(); fillPermissionChanged(model, role); return model; } }
core/src/main/java/com/gentics/mesh/core/data/node/impl/NodeImpl.java
package com.gentics.mesh.core.data.node.impl; import static com.gentics.mesh.core.data.GraphFieldContainerEdge.WEBROOT_INDEX_NAME; import static com.gentics.mesh.core.data.relationship.GraphPermission.CREATE_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.READ_PERM; import static com.gentics.mesh.core.data.relationship.GraphPermission.READ_PUBLISHED_PERM; import static com.gentics.mesh.core.data.relationship.GraphRelationships.ASSIGNED_TO_PROJECT; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_CREATOR; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_FIELD; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_FIELD_CONTAINER; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_ITEM; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_PARENT_NODE; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_ROLE; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_ROOT_NODE; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_SCHEMA_CONTAINER; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_TAG; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_USER; import static com.gentics.mesh.core.rest.MeshEvent.NODE_MOVED; import static com.gentics.mesh.core.rest.MeshEvent.NODE_REFERENCE_UPDATED; import static com.gentics.mesh.core.rest.MeshEvent.NODE_TAGGED; import static com.gentics.mesh.core.rest.MeshEvent.NODE_UNTAGGED; import static com.gentics.mesh.core.rest.common.ContainerType.DRAFT; import static com.gentics.mesh.core.rest.common.ContainerType.INITIAL; import static com.gentics.mesh.core.rest.common.ContainerType.PUBLISHED; import static com.gentics.mesh.core.rest.common.ContainerType.forVersion; import static com.gentics.mesh.core.rest.error.Errors.error; import static com.gentics.mesh.event.Assignment.ASSIGNED; import static com.gentics.mesh.event.Assignment.UNASSIGNED; import static com.gentics.mesh.madl.field.FieldType.LINK; import static com.gentics.mesh.madl.field.FieldType.STRING; import static com.gentics.mesh.madl.index.EdgeIndexDefinition.edgeIndex; import static com.gentics.mesh.madl.type.VertexTypeDefinition.vertexType; import static com.gentics.mesh.util.StreamUtil.toStream; import static com.gentics.mesh.util.URIUtils.encodeSegment; import static com.tinkerpop.blueprints.Direction.IN; import static com.tinkerpop.blueprints.Direction.OUT; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.METHOD_NOT_ALLOWED; import static io.netty.handler.codec.http.HttpResponseStatus.NOT_FOUND; import static org.apache.commons.lang3.StringUtils.isEmpty; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Deque; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Stack; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; import org.apache.commons.lang3.NotImplementedException; import com.gentics.madl.index.IndexHandler; import com.gentics.madl.tx.Tx; import com.gentics.madl.type.TypeHandler; import com.gentics.mesh.context.BulkActionContext; import com.gentics.mesh.context.InternalActionContext; import com.gentics.mesh.core.data.Branch; import com.gentics.mesh.core.data.GraphFieldContainer; import com.gentics.mesh.core.data.GraphFieldContainerEdge; import com.gentics.mesh.core.data.Language; import com.gentics.mesh.core.data.MeshAuthUser; import com.gentics.mesh.core.data.NodeGraphFieldContainer; import com.gentics.mesh.core.data.Project; import com.gentics.mesh.core.data.Role; import com.gentics.mesh.core.data.Tag; import com.gentics.mesh.core.data.TagEdge; import com.gentics.mesh.core.data.User; import com.gentics.mesh.core.data.container.impl.NodeGraphFieldContainerImpl; import com.gentics.mesh.core.data.diff.FieldContainerChange; import com.gentics.mesh.core.data.generic.AbstractGenericFieldContainerVertex; import com.gentics.mesh.core.data.generic.MeshVertexImpl; import com.gentics.mesh.core.data.impl.GraphFieldContainerEdgeImpl; import com.gentics.mesh.core.data.impl.ProjectImpl; import com.gentics.mesh.core.data.impl.TagEdgeImpl; import com.gentics.mesh.core.data.impl.TagImpl; import com.gentics.mesh.core.data.impl.UserImpl; import com.gentics.mesh.core.data.node.Node; import com.gentics.mesh.core.data.node.field.BinaryGraphField; import com.gentics.mesh.core.data.node.field.StringGraphField; import com.gentics.mesh.core.data.node.field.impl.NodeGraphFieldImpl; import com.gentics.mesh.core.data.node.field.nesting.NodeGraphField; import com.gentics.mesh.core.data.page.TransformablePage; import com.gentics.mesh.core.data.page.impl.DynamicTransformablePageImpl; import com.gentics.mesh.core.data.relationship.GraphPermission; import com.gentics.mesh.core.data.schema.SchemaContainer; import com.gentics.mesh.core.data.schema.SchemaContainerVersion; import com.gentics.mesh.core.data.schema.impl.SchemaContainerImpl; import com.gentics.mesh.core.link.WebRootLinkReplacer; import com.gentics.mesh.core.rest.MeshEvent; import com.gentics.mesh.core.rest.common.ContainerType; import com.gentics.mesh.core.rest.error.NodeVersionConflictException; import com.gentics.mesh.core.rest.error.NotModifiedException; import com.gentics.mesh.core.rest.event.MeshElementEventModel; import com.gentics.mesh.core.rest.event.MeshProjectElementEventModel; import com.gentics.mesh.core.rest.event.node.NodeMeshEventModel; import com.gentics.mesh.core.rest.event.node.NodeMovedEventModel; import com.gentics.mesh.core.rest.event.node.NodeTaggedEventModel; import com.gentics.mesh.core.rest.event.role.PermissionChangedProjectElementEventModel; import com.gentics.mesh.core.rest.navigation.NavigationElement; import com.gentics.mesh.core.rest.navigation.NavigationResponse; import com.gentics.mesh.core.rest.node.FieldMapImpl; import com.gentics.mesh.core.rest.node.NodeChildrenInfo; import com.gentics.mesh.core.rest.node.NodeCreateRequest; import com.gentics.mesh.core.rest.node.NodeResponse; import com.gentics.mesh.core.rest.node.NodeUpdateRequest; import com.gentics.mesh.core.rest.node.PublishStatusModel; import com.gentics.mesh.core.rest.node.PublishStatusResponse; import com.gentics.mesh.core.rest.node.field.Field; import com.gentics.mesh.core.rest.node.field.NodeFieldListItem; import com.gentics.mesh.core.rest.node.field.list.impl.NodeFieldListItemImpl; import com.gentics.mesh.core.rest.node.version.NodeVersionsResponse; import com.gentics.mesh.core.rest.node.version.VersionInfo; import com.gentics.mesh.core.rest.schema.FieldSchema; import com.gentics.mesh.core.rest.schema.Schema; import com.gentics.mesh.core.rest.tag.TagReference; import com.gentics.mesh.core.rest.user.NodeReference; import com.gentics.mesh.core.webroot.PathPrefixUtil; import com.gentics.mesh.dagger.DB; import com.gentics.mesh.dagger.MeshInternal; import com.gentics.mesh.event.Assignment; import com.gentics.mesh.event.EventQueueBatch; import com.gentics.mesh.graphdb.spi.Database; import com.gentics.mesh.handler.ActionContext; import com.gentics.mesh.handler.VersionHandler; import com.gentics.mesh.json.JsonUtil; import com.gentics.mesh.madl.traversal.TraversalResult; import com.gentics.mesh.parameter.DeleteParameters; import com.gentics.mesh.parameter.GenericParameters; import com.gentics.mesh.parameter.LinkType; import com.gentics.mesh.parameter.NavigationParameters; import com.gentics.mesh.parameter.NodeParameters; import com.gentics.mesh.parameter.PagingParameters; import com.gentics.mesh.parameter.PublishParameters; import com.gentics.mesh.parameter.VersioningParameters; import com.gentics.mesh.parameter.impl.NavigationParametersImpl; import com.gentics.mesh.parameter.impl.VersioningParametersImpl; import com.gentics.mesh.parameter.value.FieldsSet; import com.gentics.mesh.path.Path; import com.gentics.mesh.path.PathSegment; import com.gentics.mesh.util.DateUtils; import com.gentics.mesh.util.ETag; import com.gentics.mesh.util.StreamUtil; import com.gentics.mesh.util.URIUtils; import com.gentics.mesh.util.VersionNumber; import com.syncleus.ferma.EdgeFrame; import com.syncleus.ferma.FramedGraph; import com.syncleus.ferma.traversals.EdgeTraversal; import com.syncleus.ferma.traversals.VertexTraversal; import com.tinkerpop.blueprints.Direction; import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Vertex; import io.reactivex.Observable; import io.reactivex.Single; import io.vertx.core.logging.Logger; import io.vertx.core.logging.LoggerFactory; /** * @see Node */ public class NodeImpl extends AbstractGenericFieldContainerVertex<NodeResponse, Node> implements Node { private static final Logger log = LoggerFactory.getLogger(NodeImpl.class); public static void init(TypeHandler type, IndexHandler index) { type.createType(vertexType(NodeImpl.class, MeshVertexImpl.class)); index.createIndex(edgeIndex(HAS_PARENT_NODE)); index.createIndex(edgeIndex(HAS_PARENT_NODE) .withPostfix("branch_out") .withField("out", LINK) .withField(BRANCH_UUID_KEY, STRING)); index.createIndex(edgeIndex(HAS_PARENT_NODE) .withPostfix("branch") .withField("in", LINK) .withField(BRANCH_UUID_KEY, STRING)); index.createIndex(edgeIndex(HAS_FIELD_CONTAINER) .withPostfix("field") .withField("out", LINK) .withField(GraphFieldContainerEdge.BRANCH_UUID_KEY, STRING) .withField(GraphFieldContainerEdge.EDGE_TYPE_KEY, STRING)); } @Override public String getPathSegment(String branchUuid, ContainerType type, String... languageTag) { // Check whether this node is the base node. if (getParentNode(branchUuid) == null) { return ""; } // Find the first matching container and fallback to other listed languages NodeGraphFieldContainer container = null; for (String tag : languageTag) { if ((container = getGraphFieldContainer(tag, branchUuid, type)) != null) { break; } } if (container != null) { return container.getSegmentFieldValue(); } return null; } @Override public void postfixPathSegment(String branchUuid, ContainerType type, String languageTag) { // Check whether this node is the base node. if (getParentNode(branchUuid) == null) { return; } // Find the first matching container and fallback to other listed languages NodeGraphFieldContainer container = getGraphFieldContainer(languageTag, branchUuid, type); if (container != null) { container.postfixSegmentFieldValue(); } } @Override public String getPath(ActionContext ac, String branchUuid, ContainerType type, String... languageTag) { // We want to avoid rending the path again for nodes which we have already handled. // Thus utilise the action context data map to retrieve already handled paths. String cacheKey = getUuid() + branchUuid + type.getCode() + Arrays.toString(languageTag); return (String) ac.data().computeIfAbsent(cacheKey, key -> { List<String> segments = new ArrayList<>(); String segment = getPathSegment(branchUuid, type, languageTag); if (segment == null) { return null; } segments.add(segment); // For the path segments of the container, we add all (additional) // project languages to the list of languages for the fallback. List<String> langList = new ArrayList<>(); langList.addAll(Arrays.asList(languageTag)); // TODO maybe we only want to get the project languages? langList.addAll(MeshInternal.get().boot().getAllLanguageTags()); String[] projectLanguages = langList.toArray(new String[langList.size()]); Node current = this; while (current != null) { current = current.getParentNode(branchUuid); if (current == null || current.getParentNode(branchUuid) == null) { break; } // For the path segments of the container, we allow ANY language (of the project) segment = current.getPathSegment(branchUuid, type, projectLanguages); // Abort early if one of the path segments could not be resolved. We // need to return a 404 in those cases. if (segment == null) { return null; } segments.add(segment); } Collections.reverse(segments); // Finally construct the path from all segments StringBuilder builder = new StringBuilder(); // Append the prefix first Branch branch = getProject().getBranchRoot().findByUuid(branchUuid); if (branch != null) { String prefix = PathPrefixUtil.sanitize(branch.getPathPrefix()); if (!prefix.isEmpty()) { String[] prefixSegments = prefix.split("/"); for (String prefixSegment : prefixSegments) { if (prefixSegment.isEmpty()) { continue; } builder.append("/").append(URIUtils.encodeSegment(prefixSegment)); } } } Iterator<String> it = segments.iterator(); while (it.hasNext()) { String currentSegment = it.next(); builder.append("/").append(URIUtils.encodeSegment(currentSegment)); } return builder.toString(); }); } @Override public void assertPublishConsistency(InternalActionContext ac, Branch branch) { String branchUuid = branch.getUuid(); // Check whether the node got a published version and thus is published boolean isPublished = hasPublishedContent(branch.getUuid()); // A published node must have also a published parent node. if (isPublished) { Node parentNode = getParentNode(branchUuid); // Only assert consistency of parent nodes which are not project // base nodes. if (parentNode != null && (!parentNode.getUuid().equals(getProject().getBaseNode().getUuid()))) { // Check whether the parent node has a published field container // for the given branch and language if (!parentNode.hasPublishedContent(branch.getUuid())) { log.error("Could not find published field container for node {" + parentNode.getUuid() + "} in branch {" + branchUuid + "}"); throw error(BAD_REQUEST, "node_error_parent_containers_not_published", parentNode.getUuid()); } } } // A draft node can't have any published child nodes. if (!isPublished) { // TODO handle branch for (Node node : getChildren()) { if (node.hasPublishedContent(branch.getUuid())) { log.error("Found published field container for node {" + node.getUuid() + "} in branch {" + branchUuid + "}. Node is child of {" + getUuid() + "}"); throw error(BAD_REQUEST, "node_error_children_containers_still_published", node.getUuid()); } } } } @Override public TraversalResult<? extends Tag> getTags(Branch branch) { return new TraversalResult<>(TagEdgeImpl.getTagTraversal(this, branch).frameExplicit(TagImpl.class)); } @Override public boolean hasTag(Tag tag, Branch branch) { return TagEdgeImpl.hasTag(this, tag, branch); } @Override public TraversalResult<? extends NodeGraphFieldContainer> getGraphFieldContainers(String branchUuid, ContainerType type) { return new TraversalResult<>(outE(HAS_FIELD_CONTAINER).has(GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branchUuid) .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, type.getCode()).inV().frameExplicit(NodeGraphFieldContainerImpl.class)); } @Override public TraversalResult<? extends NodeGraphFieldContainer> getGraphFieldContainersIt(ContainerType type) { return new TraversalResult<>( outE(HAS_FIELD_CONTAINER).has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, type.getCode()).inV() .frameExplicit(NodeGraphFieldContainerImpl.class)); } @Override public TraversalResult<? extends NodeGraphFieldContainer> getGraphFieldContainersIt(String branchUuid, ContainerType type) { return new TraversalResult<>( outE(HAS_FIELD_CONTAINER).has(GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branchUuid) .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, type.getCode()).inV().frameExplicit(NodeGraphFieldContainerImpl.class)); } @SuppressWarnings("unchecked") @Override public long getGraphFieldContainerCount() { return outE(HAS_FIELD_CONTAINER).or(e -> e.traversal().has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, DRAFT.getCode()), e -> e.traversal() .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, PUBLISHED.getCode())).inV().count(); } @Override public NodeGraphFieldContainer getLatestDraftFieldContainer(String languageTag) { return getGraphFieldContainer(languageTag, getProject().getLatestBranch(), DRAFT, NodeGraphFieldContainerImpl.class); } @Override public NodeGraphFieldContainer getGraphFieldContainer(String languageTag, Branch branch, ContainerType type) { return getGraphFieldContainer(languageTag, branch, type, NodeGraphFieldContainerImpl.class); } @Override public NodeGraphFieldContainer getGraphFieldContainer(String languageTag) { return getGraphFieldContainer(languageTag, getProject().getLatestBranch().getUuid(), DRAFT, NodeGraphFieldContainerImpl.class); } @Override public NodeGraphFieldContainer getGraphFieldContainer(String languageTag, String branchUuid, ContainerType type) { return getGraphFieldContainer(languageTag, branchUuid, type, NodeGraphFieldContainerImpl.class); } @Override public NodeGraphFieldContainer createGraphFieldContainer(String languageTag, Branch branch, User editor) { return createGraphFieldContainer(languageTag, branch, editor, null, true); } @Override public NodeGraphFieldContainer createGraphFieldContainer(String languageTag, Branch branch, User editor, NodeGraphFieldContainer original, boolean handleDraftEdge) { NodeGraphFieldContainerImpl previous = null; EdgeFrame draftEdge = null; String branchUuid = branch.getUuid(); // check whether there is a current draft version if (handleDraftEdge) { draftEdge = getGraphFieldContainerEdgeFrame(languageTag, branchUuid, DRAFT); if (draftEdge != null) { previous = draftEdge.inV().nextOrDefault(NodeGraphFieldContainerImpl.class, null); } } // Create the new container NodeGraphFieldContainerImpl newContainer = getGraph().addFramedVertex(NodeGraphFieldContainerImpl.class); if (original != null) { newContainer.setEditor(editor); newContainer.setLastEditedTimestamp(); newContainer.setLanguageTag(languageTag); newContainer.setSchemaContainerVersion(original.getSchemaContainerVersion()); } else { newContainer.setEditor(editor); newContainer.setLastEditedTimestamp(); newContainer.setLanguageTag(languageTag); // We need create a new container with no reference. So use the latest version available to use. newContainer.setSchemaContainerVersion(branch.findLatestSchemaVersion(getSchemaContainer())); } if (previous != null) { // set the next version number newContainer.setVersion(previous.getVersion().nextDraft()); previous.setNextVersion(newContainer); } else { // set the initial version number newContainer.setVersion(new VersionNumber()); } // clone the original or the previous container if (original != null) { newContainer.clone(original); } else if (previous != null) { newContainer.clone(previous); } // remove existing draft edge if (draftEdge != null) { draftEdge.remove(); newContainer.updateWebrootPathInfo(branchUuid, "node_conflicting_segmentfield_update"); } // We need to update the display field property since we created a new // node graph field container. newContainer.updateDisplayFieldValue(); if (handleDraftEdge) { // create a new draft edge GraphFieldContainerEdge edge = addFramedEdge(HAS_FIELD_CONTAINER, newContainer, GraphFieldContainerEdgeImpl.class); edge.setLanguageTag(languageTag); edge.setBranchUuid(branchUuid); edge.setType(DRAFT); } // if there is no initial edge, create one if (getGraphFieldContainerEdge(languageTag, branchUuid, INITIAL) == null) { GraphFieldContainerEdge initialEdge = addFramedEdge(HAS_FIELD_CONTAINER, newContainer, GraphFieldContainerEdgeImpl.class); initialEdge.setLanguageTag(languageTag); initialEdge.setBranchUuid(branchUuid); initialEdge.setType(INITIAL); } return newContainer; } @Override public EdgeFrame getGraphFieldContainerEdgeFrame(String languageTag, String branchUuid, ContainerType type) { EdgeTraversal<?, ?, ?> edgeTraversal = outE(HAS_FIELD_CONTAINER).has(GraphFieldContainerEdgeImpl.LANGUAGE_TAG_KEY, languageTag).has( GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branchUuid).has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, type.getCode()); if (edgeTraversal.hasNext()) { return edgeTraversal.next(); } else { return null; } } /** * Get all graph field. * * @param branchUuid * @param type * @return */ protected Iterable<? extends GraphFieldContainerEdgeImpl> getGraphFieldContainerEdges(String branchUuid, ContainerType type) { EdgeTraversal<?, ?, ?> edgeTraversal = outE(HAS_FIELD_CONTAINER) .has(GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branchUuid) .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, type.getCode()); return edgeTraversal.frameExplicit(GraphFieldContainerEdgeImpl.class); } @Override public void addTag(Tag tag, Branch branch) { removeTag(tag, branch); TagEdge edge = addFramedEdge(HAS_TAG, tag, TagEdgeImpl.class); edge.setBranchUuid(branch.getUuid()); } @Override public void removeTag(Tag tag, Branch branch) { outE(HAS_TAG).has(TagEdgeImpl.BRANCH_UUID_KEY, branch.getUuid()).mark().inV().retain(tag).back().removeAll(); } @Override public void removeAllTags(Branch branch) { outE(HAS_TAG).has(TagEdgeImpl.BRANCH_UUID_KEY, branch.getUuid()).removeAll(); } @Override public void setSchemaContainer(SchemaContainer schema) { setLinkOut(schema, HAS_SCHEMA_CONTAINER); } @Override public SchemaContainer getSchemaContainer() { return out(HAS_SCHEMA_CONTAINER).nextOrDefaultExplicit(SchemaContainerImpl.class, null); } @Override public TraversalResult<? extends Node> getChildren() { return new TraversalResult<>(in(HAS_PARENT_NODE).frameExplicit(NodeImpl.class)); } @Override public TraversalResult<Node> getChildren(String branchUuid) { Database db = MeshInternal.get().database(); FramedGraph graph = Tx.getActive().getGraph(); Iterable<Edge> edges = graph.getEdges("e." + HAS_PARENT_NODE.toLowerCase() + "_branch", db.createComposedIndexKey(id(), branchUuid)); Iterator<Edge> it = edges.iterator(); Iterable<Edge> iterable = () -> it; Stream<Edge> stream = StreamSupport.stream(iterable.spliterator(), false); Stream<Node> nstream = stream.map(edge -> { Vertex vertex = edge.getVertex(OUT); return graph.frameElementExplicit(vertex, NodeImpl.class); }); return new TraversalResult<>(() -> nstream.iterator()); } @Override public Stream<Node> getChildrenStream(InternalActionContext ac) { Database db = MeshInternal.get().database(); FramedGraph graph = Tx.get().getGraph(); MeshAuthUser user = ac.getUser(); Iterable<Edge> edges = graph.getEdges("e." + HAS_PARENT_NODE.toLowerCase() + "_branch", db.createComposedIndexKey(id(), ac.getBranch().getUuid())); Iterator<Edge> it = edges.iterator(); Iterable<Edge> iterable = () -> it; Stream<Edge> stream = StreamSupport.stream(iterable.spliterator(), false); return stream .map(edge -> edge.getVertex(OUT)) .filter(vertex -> { Object id = vertex.getId(); return user.hasPermissionForId(id, READ_PERM) || user.hasPermissionForId(id, READ_PUBLISHED_PERM); }) .map(vertex -> graph.frameElementExplicit(vertex, NodeImpl.class)); } @Override public Node getParentNode(String branchUuid) { Database db = MeshInternal.get().database(); FramedGraph graph = Tx.getActive().getGraph(); Iterable<Edge> edges = graph.getEdges("e." + HAS_PARENT_NODE.toLowerCase() + "_branch_out", db.createComposedIndexKey(id(), branchUuid)); Iterator<Edge> it = edges.iterator(); if (it.hasNext()) { Vertex in = it.next().getVertex(IN); return graph.frameElementExplicit(in, NodeImpl.class); } else { return null; } } @Override public void setParentNode(String branchUuid, Node parent) { outE(HAS_PARENT_NODE).has(BRANCH_UUID_KEY, branchUuid).removeAll(); addFramedEdge(HAS_PARENT_NODE, parent).setProperty(BRANCH_UUID_KEY, branchUuid); } @Override public Project getProject() { return out(ASSIGNED_TO_PROJECT, ProjectImpl.class).nextOrNull(); } @Override public void setProject(Project project) { setLinkOut(project, ASSIGNED_TO_PROJECT); } @Override public Node create(User creator, SchemaContainerVersion schemaVersion, Project project) { return create(creator, schemaVersion, project, project.getLatestBranch()); } /** * Create a new node and make sure to delegate the creation request to the main node root aggregation node. */ @Override public Node create(User creator, SchemaContainerVersion schemaVersion, Project project, Branch branch, String uuid) { if (!isBaseNode() && !isVisibleInBranch(branch.getUuid())) { log.error(String.format("Error while creating node in branch {%s}: requested parent node {%s} exists, but is not visible in branch.", branch.getName(), getUuid())); throw error(NOT_FOUND, "object_not_found_for_uuid", getUuid()); } // We need to use the (meshRoot)--(nodeRoot) node instead of the // (project)--(nodeRoot) node. Node node = MeshInternal.get().boot().nodeRoot().create(creator, schemaVersion, project, uuid); node.setParentNode(branch.getUuid(), this); node.setSchemaContainer(schemaVersion.getSchemaContainer()); // setCreated(creator); return node; } private String getLanguageInfo(List<String> languageTags) { Iterator<String> it = languageTags.iterator(); String langInfo = "["; while (it.hasNext()) { langInfo += it.next(); if (it.hasNext()) { langInfo += ","; } } langInfo += "]"; return langInfo; } @Override public NodeResponse transformToRestSync(InternalActionContext ac, int level, String... languageTags) { GenericParameters generic = ac.getGenericParameters(); FieldsSet fields = generic.getFields(); // Increment level for each node transformation to avoid stackoverflow situations level = level + 1; NodeResponse restNode = new NodeResponse(); if (fields.has("uuid")) { restNode.setUuid(getUuid()); // Performance shortcut to return now and ignore the other checks if (fields.size() == 1) { return restNode; } } SchemaContainer container = getSchemaContainer(); if (container == null) { throw error(BAD_REQUEST, "The schema container for node {" + getUuid() + "} could not be found."); } Branch branch = ac.getBranch(getProject()); if (fields.has("languages")) { restNode.setAvailableLanguages(getLanguageInfo(ac)); } setFields(ac, branch, restNode, level, fields, languageTags); if (fields.has("parent")) { setParentNodeInfo(ac, branch, restNode); } if (fields.has("perms")) { setRolePermissions(ac, restNode); } if (fields.has("children")) { setChildrenInfo(ac, branch, restNode); } if (fields.has("tags")) { setTagsToRest(ac, restNode, branch); } fillCommonRestFields(ac, fields, restNode); if (fields.has("breadcrumb")) { setBreadcrumbToRest(ac, restNode); } if (fields.has("path")) { setPathsToRest(ac, restNode, branch); } if (fields.has("project")) { setProjectReference(ac, restNode); } return restNode; } /** * Set the project reference to the node response model. * * @param ac * @param restNode */ private void setProjectReference(InternalActionContext ac, NodeResponse restNode) { restNode.setProject(getProject().transformToReference()); } /** * Set the parent node reference to the rest model. * * @param ac * @param branch * Use the given branch to identify the branch specific parent node * @param restNode * Model to be updated * @return */ private void setParentNodeInfo(InternalActionContext ac, Branch branch, NodeResponse restNode) { Node parentNode = getParentNode(branch.getUuid()); if (parentNode != null) { restNode.setParentNode(parentNode.transformToReference(ac)); } else { // Only the base node of the project has no parent. Therefore this // node must be a container. restNode.setContainer(true); } } /** * Set the node fields to the given rest model. * * @param ac * @param branch * Branch which will be used to locate the correct field container * @param restNode * Rest model which will be updated * @param fields * Field whitelist for the response * @param level * Current level of transformation * @param languageTags * @return */ private void setFields(InternalActionContext ac, Branch branch, NodeResponse restNode, int level, FieldsSet fieldsSet, String... languageTags) { VersioningParameters versioiningParameters = ac.getVersioningParameters(); NodeParameters nodeParameters = ac.getNodeParameters(); List<String> requestedLanguageTags = null; if (languageTags != null && languageTags.length > 0) { requestedLanguageTags = Arrays.asList(languageTags); } else { requestedLanguageTags = nodeParameters.getLanguageList(); } // First check whether the NGFC for the requested language,branch and version could be found. NodeGraphFieldContainer fieldContainer = findVersion(requestedLanguageTags, branch.getUuid(), versioiningParameters.getVersion()); if (fieldContainer == null) { // If a published version was requested, we check whether any // published language variant exists for the node, if not, response // with NOT_FOUND if (forVersion(versioiningParameters.getVersion()) == PUBLISHED && !getGraphFieldContainers(branch, PUBLISHED).iterator().hasNext()) { log.error("Could not find field container for languages {" + requestedLanguageTags + "} and branch {" + branch.getUuid() + "} and version params version {" + versioiningParameters.getVersion() + "}, branch {" + branch.getUuid() + "}"); throw error(NOT_FOUND, "node_error_published_not_found_for_uuid_branch_version", getUuid(), branch.getUuid()); } // If a specific version was requested, that does not exist, we also // return NOT_FOUND if (forVersion(versioiningParameters.getVersion()) == INITIAL) { throw error(NOT_FOUND, "object_not_found_for_version", versioiningParameters.getVersion()); } String langInfo = getLanguageInfo(requestedLanguageTags); if (log.isDebugEnabled()) { log.debug("The fields for node {" + getUuid() + "} can't be populated since the node has no matching language for the languages {" + langInfo + "}. Fields will be empty."); } // No field container was found so we can only set the schema // reference that points to the container (no version information // will be included) if (fieldsSet.has("schema")) { restNode.setSchema(getSchemaContainer().transformToReference()); } // TODO BUG Issue #119 - Actually we would need to throw a 404 in these cases but many current implementations rely on the empty node response. // The response will also contain information about other languages and general structure information. // We should change this behaviour and update the client implementations. // throw error(NOT_FOUND, "object_not_found_for_uuid", getUuid()); } else { Schema schema = fieldContainer.getSchemaContainerVersion().getSchema(); if (fieldsSet.has("container")) { restNode.setContainer(schema.getContainer()); } if (fieldsSet.has("displayField")) { restNode.setDisplayField(schema.getDisplayField()); } if (fieldsSet.has("displayName")) { restNode.setDisplayName(getDisplayName(ac)); } if (fieldsSet.has("language")) { restNode.setLanguage(fieldContainer.getLanguageTag()); } // List<String> fieldsToExpand = ac.getExpandedFieldnames(); // modify the language fallback list by moving the container's // language to the front List<String> containerLanguageTags = new ArrayList<>(requestedLanguageTags); containerLanguageTags.remove(restNode.getLanguage()); containerLanguageTags.add(0, restNode.getLanguage()); // Schema reference if (fieldsSet.has("schema")) { restNode.setSchema(fieldContainer.getSchemaContainerVersion().transformToReference()); } // Version reference if (fieldsSet.has("version") && fieldContainer.getVersion() != null) { restNode.setVersion(fieldContainer.getVersion().toString()); } // editor and edited if (fieldsSet.has("editor")) { User editor = fieldContainer.getEditor(); if (editor != null) { restNode.setEditor(editor.transformToReference()); } } if (fieldsSet.has("edited")) { restNode.setEdited(fieldContainer.getLastEditedDate()); } if (fieldsSet.has("fields")) { // Iterate over all fields and transform them to rest com.gentics.mesh.core.rest.node.FieldMap fields = new FieldMapImpl(); for (FieldSchema fieldEntry : schema.getFields()) { // boolean expandField = // fieldsToExpand.contains(fieldEntry.getName()) || // ac.getExpandAllFlag(); Field restField = fieldContainer.getRestFieldFromGraph(ac, fieldEntry.getName(), fieldEntry, containerLanguageTags, level); if (fieldEntry.isRequired() && restField == null) { // TODO i18n // throw error(BAD_REQUEST, "The field {" + // fieldEntry.getName() // + "} is a required field but it could not be found in the // node. Please add the field using an update call or change // the field schema and // remove the required flag."); fields.put(fieldEntry.getName(), null); } if (restField == null) { if (log.isDebugEnabled()) { log.debug("Field for key {" + fieldEntry.getName() + "} could not be found. Ignoring the field."); } } else { fields.put(fieldEntry.getName(), restField); } } restNode.setFields(fields); } } } /** * Set the children info to the rest model. * * @param ac * @param branch * Branch which will be used to identify the branch specific child nodes * @param restNode * Rest model which will be updated */ private void setChildrenInfo(InternalActionContext ac, Branch branch, NodeResponse restNode) { Map<String, NodeChildrenInfo> childrenInfo = new HashMap<>(); for (Node child : getChildren(branch.getUuid())) { if (ac.getUser().hasPermission(child, READ_PERM)) { String schemaName = child.getSchemaContainer().getName(); NodeChildrenInfo info = childrenInfo.get(schemaName); if (info == null) { info = new NodeChildrenInfo(); String schemaUuid = child.getSchemaContainer().getUuid(); info.setSchemaUuid(schemaUuid); info.setCount(1); childrenInfo.put(schemaName, info); } else { info.setCount(info.getCount() + 1); } } } restNode.setChildrenInfo(childrenInfo); } /** * Set the tag information to the rest model. * * @param ac * @param restNode * Rest model which will be updated * @param branch * Branch which will be used to identify the branch specific tags * @return */ private void setTagsToRest(InternalActionContext ac, NodeResponse restNode, Branch branch) { List<TagReference> list = getTags(branch).stream() .map(Tag::transformToReference) .collect(Collectors.toList()); restNode.setTags(list); } /** * Add the branch specific webroot and language paths to the given rest node. * * @param ac * @param restNode * Rest model which will be updated * @param branch * Branch which will be used to identify the nodes relations and thus the correct path can be determined * @return */ private void setPathsToRest(InternalActionContext ac, NodeResponse restNode, Branch branch) { VersioningParameters versioiningParameters = ac.getVersioningParameters(); if (ac.getNodeParameters().getResolveLinks() != LinkType.OFF) { String branchUuid = ac.getBranch(getProject()).getUuid(); ContainerType type = forVersion(versioiningParameters.getVersion()); LinkType linkType = ac.getNodeParameters().getResolveLinks(); // Path WebRootLinkReplacer linkReplacer = MeshInternal.get().webRootLinkReplacer(); String path = linkReplacer.resolve(ac, branchUuid, type, getUuid(), linkType, getProject().getName(), restNode.getLanguage()); restNode.setPath(path); // languagePaths restNode.setLanguagePaths(getLanguagePaths(ac, linkType, branch)); } } @Override public Map<String, String> getLanguagePaths(InternalActionContext ac, LinkType linkType, Branch branch) { VersioningParameters versioiningParameters = ac.getVersioningParameters(); String branchUuid = ac.getBranch(getProject()).getUuid(); ContainerType type = forVersion(versioiningParameters.getVersion()); Map<String, String> languagePaths = new HashMap<>(); WebRootLinkReplacer linkReplacer = MeshInternal.get().webRootLinkReplacer(); for (GraphFieldContainer currentFieldContainer : getGraphFieldContainers(branch, forVersion(versioiningParameters.getVersion()))) { String currLanguage = currentFieldContainer.getLanguageTag(); String languagePath = linkReplacer.resolve(ac, branchUuid, type, this, linkType, currLanguage); languagePaths.put(currLanguage, languagePath); } return languagePaths; } /** * Set the breadcrumb information to the given rest node. * * @param ac * @param restNode */ private void setBreadcrumbToRest(InternalActionContext ac, NodeResponse restNode) { List<NodeReference> breadcrumbs = getBreadcrumbNodeStream(ac) .map(node -> node.transformToReference(ac)) .collect(Collectors.toList()); restNode.setBreadcrumb(breadcrumbs); } @Override public TraversalResult<Node> getBreadcrumbNodes(InternalActionContext ac) { return new TraversalResult<>(() -> getBreadcrumbNodeStream(ac).iterator()); } private Stream<Node> getBreadcrumbNodeStream(InternalActionContext ac) { String branchUuid = ac.getBranch(getProject()).getUuid(); Node current = this; Deque<Node> breadcrumb = new ArrayDeque<>(); while (current != null) { breadcrumb.addFirst(current); current = current.getParentNode(branchUuid); } return breadcrumb.stream(); } @Override public Single<NavigationResponse> transformToNavigation(InternalActionContext ac) { NavigationParametersImpl parameters = new NavigationParametersImpl(ac); if (parameters.getMaxDepth() < 0) { throw error(BAD_REQUEST, "navigation_error_invalid_max_depth"); } return MeshInternal.get().database().asyncTx(() -> { // TODO assure that the schema version is correct if (!getSchemaContainer().getLatestVersion().getSchema().getContainer()) { throw error(BAD_REQUEST, "navigation_error_no_container"); } String etagKey = buildNavigationEtagKey(ac, this, parameters.getMaxDepth(), 0, ac.getBranch(getProject()).getUuid(), forVersion(ac .getVersioningParameters().getVersion())); String etag = ETag.hash(etagKey); ac.setEtag(etag, true); if (ac.matches(etag, true)) { return Single.error(new NotModifiedException()); } else { NavigationResponse response = new NavigationResponse(); return buildNavigationResponse(ac, this, parameters.getMaxDepth(), 0, response, response, ac.getBranch(getProject()).getUuid(), forVersion(ac.getVersioningParameters().getVersion())); } }); } @Override public NodeVersionsResponse transformToVersionList(InternalActionContext ac) { NodeVersionsResponse response = new NodeVersionsResponse(); Map<String, List<VersionInfo>> versions = new HashMap<>(); getGraphFieldContainersIt(ac.getBranch(), DRAFT).forEach(c -> { versions.put(c.getLanguageTag(), c.versions().stream() .map(v -> v.transformToVersionInfo(ac)) .collect(Collectors.toList())); }); response.setVersions(versions); return response; } /** * Generate the etag key for the requested navigation. * * @param ac * @param node * Current node to start building the navigation * @param maxDepth * Maximum depth of navigation * @param level * Current level of recursion * @param branchUuid * Branch uuid used to extract selected tree structure * @param type * @return */ private String buildNavigationEtagKey(InternalActionContext ac, Node node, int maxDepth, int level, String branchUuid, ContainerType type) { NavigationParametersImpl parameters = new NavigationParametersImpl(ac); StringBuilder builder = new StringBuilder(); builder.append(node.getETag(ac)); TraversalResult<? extends Node> nodes = node.getChildren(ac.getUser(), branchUuid, null, type); // Abort recursion when we reach the max level or when no more children // can be found. if (level == maxDepth || !nodes.iterator().hasNext()) { return builder.toString(); } for (Node child : nodes) { if (child.getSchemaContainer().getLatestVersion().getSchema().getContainer()) { builder.append(buildNavigationEtagKey(ac, child, maxDepth, level + 1, branchUuid, type)); } else if (parameters.isIncludeAll()) { builder.append(buildNavigationEtagKey(ac, child, maxDepth, level, branchUuid, type)); } } return builder.toString(); } /** * Recursively build the navigation response. * * @param ac * Action context * @param node * Current node that should be handled in combination with the given navigation element * @param maxDepth * Maximum depth for the navigation * @param level * Zero based level of the current navigation element * @param navigation * Current navigation response * @param currentElement * Current navigation element for the given level * @param branchUuid * Branch uuid to be used for loading children of nodes * @param type * container type to be used for transformation * @return */ private Single<NavigationResponse> buildNavigationResponse(InternalActionContext ac, Node node, int maxDepth, int level, NavigationResponse navigation, NavigationElement currentElement, String branchUuid, ContainerType type) { TraversalResult<? extends Node> nodes = node.getChildren(ac.getUser(), branchUuid, null, type); List<Single<NavigationResponse>> obsResponses = new ArrayList<>(); obsResponses.add(node.transformToRest(ac, 0).map(response -> { // Set current element data currentElement.setUuid(response.getUuid()); currentElement.setNode(response); return navigation; })); // Abort recursion when we reach the max level or when no more children // can be found. if (level == maxDepth || !nodes.iterator().hasNext()) { List<Observable<NavigationResponse>> obsList = obsResponses.stream().map(ele -> ele.toObservable()).collect(Collectors.toList()); return Observable.merge(obsList).lastOrError(); } NavigationParameters parameters = new NavigationParametersImpl(ac); // Add children for (Node child : nodes) { // TODO assure that the schema version is correct? // TODO also allow navigations over containers if (child.getSchemaContainer().getLatestVersion().getSchema().getContainer()) { NavigationElement childElement = new NavigationElement(); // We found at least one child so lets create the array if (currentElement.getChildren() == null) { currentElement.setChildren(new ArrayList<>()); } currentElement.getChildren().add(childElement); obsResponses.add(buildNavigationResponse(ac, child, maxDepth, level + 1, navigation, childElement, branchUuid, type)); } else if (parameters.isIncludeAll()) { // We found at least one child so lets create the array if (currentElement.getChildren() == null) { currentElement.setChildren(new ArrayList<>()); } NavigationElement childElement = new NavigationElement(); currentElement.getChildren().add(childElement); obsResponses.add(buildNavigationResponse(ac, child, maxDepth, level, navigation, childElement, branchUuid, type)); } } List<Observable<NavigationResponse>> obsList = obsResponses.stream().map(ele -> ele.toObservable()).collect(Collectors.toList()); return Observable.merge(obsList).lastOrError(); } @Override public NodeReference transformToReference(InternalActionContext ac) { Branch branch = ac.getBranch(getProject()); NodeReference nodeReference = new NodeReference(); nodeReference.setUuid(getUuid()); nodeReference.setDisplayName(getDisplayName(ac)); nodeReference.setSchema(getSchemaContainer().transformToReference()); nodeReference.setProjectName(getProject().getName()); if (LinkType.OFF != ac.getNodeParameters().getResolveLinks()) { WebRootLinkReplacer linkReplacer = MeshInternal.get().webRootLinkReplacer(); ContainerType type = forVersion(ac.getVersioningParameters().getVersion()); String url = linkReplacer.resolve(ac, branch.getUuid(), type, this, ac.getNodeParameters().getResolveLinks(), ac.getNodeParameters() .getLanguages()); nodeReference.setPath(url); } return nodeReference; } @Override public NodeReference transformToMinimalReference() { NodeReference ref = new NodeReference(); ref.setUuid(getUuid()); ref.setSchema(getSchemaContainer().transformToReference()); return ref; } @Override public NodeFieldListItem toListItem(InternalActionContext ac, String[] languageTags) { // Create the rest field and populate the fields NodeFieldListItemImpl listItem = new NodeFieldListItemImpl(getUuid()); String branchUuid = ac.getBranch(getProject()).getUuid(); ContainerType type = forVersion(new VersioningParametersImpl(ac).getVersion()); if (ac.getNodeParameters().getResolveLinks() != LinkType.OFF) { listItem.setUrl(MeshInternal.get().webRootLinkReplacer().resolve(ac, branchUuid, type, this, ac.getNodeParameters().getResolveLinks(), languageTags)); } return listItem; } @Override public PublishStatusResponse transformToPublishStatus(InternalActionContext ac) { PublishStatusResponse publishStatus = new PublishStatusResponse(); Map<String, PublishStatusModel> languages = getLanguageInfo(ac); publishStatus.setAvailableLanguages(languages); return publishStatus; } private Map<String, PublishStatusModel> getLanguageInfo(InternalActionContext ac) { Map<String, PublishStatusModel> languages = new HashMap<>(); Branch branch = ac.getBranch(getProject()); getGraphFieldContainers(branch, PUBLISHED).stream().forEach(c -> { String date = DateUtils.toISO8601(c.getLastEditedTimestamp(), 0); PublishStatusModel status = new PublishStatusModel(); status.setPublished(true); status.setVersion(c.getVersion().toString()); User editor = c.getEditor(); if (editor != null) { status.setPublisher(editor.transformToReference()); } status.setPublishDate(date); languages.put(c.getLanguageTag(), status); }); getGraphFieldContainers(branch, DRAFT).stream().filter(c -> !languages.containsKey(c.getLanguageTag())).forEach(c -> { PublishStatusModel status = new PublishStatusModel().setPublished(false).setVersion(c.getVersion().toString()); languages.put(c.getLanguageTag(), status); }); return languages; } @Override public void publish(InternalActionContext ac, Branch branch, BulkActionContext bac) { PublishParameters parameters = ac.getPublishParameters(); // .store(this, branchUuid, ContainerType.PUBLISHED, false); bac.batch().add(onUpdated()); // Handle recursion if (parameters.isRecursive()) { // TODO handle specific branch for (Node child : getChildren()) { child.publish(ac, branch, bac); } } assertPublishConsistency(ac, branch); } @Override public void publish(InternalActionContext ac, BulkActionContext bac) { Branch branch = ac.getBranch(getProject()); String branchUuid = branch.getUuid(); List<? extends NodeGraphFieldContainer> unpublishedContainers = getGraphFieldContainers(branch, ContainerType.DRAFT).stream().filter(c -> !c .isPublished(branchUuid)).collect(Collectors.toList()); // publish all unpublished containers and handle recursion unpublishedContainers.stream().forEach(c -> { NodeGraphFieldContainer newVersion = publish(ac, c.getLanguageTag(), branch, ac.getUser()); bac.add(newVersion.onPublish(branchUuid)); }); assertPublishConsistency(ac, branch); // Handle recursion after publishing the current node. // This is done to ensure the publish consistency. // Even if the publishing process stops at the initial // level the consistency is correct. PublishParameters parameters = ac.getPublishParameters(); if (parameters.isRecursive()) { // TODO handle specific branch for (Node node : getChildren()) { node.publish(ac, bac); } } bac.process(); } @Override public void takeOffline(InternalActionContext ac, BulkActionContext bac, Branch branch, PublishParameters parameters) { // Handle recursion first to start at the leafs if (parameters.isRecursive()) { for (Node node : getChildren()) { node.takeOffline(ac, bac, branch, parameters); } } String branchUuid = branch.getUuid(); TraversalResult<? extends GraphFieldContainerEdgeImpl> publishEdges = new TraversalResult<>( getGraphFieldContainerEdges(branchUuid, PUBLISHED)); // Remove the published edge for each found container publishEdges.forEach(edge -> { NodeGraphFieldContainer content = edge.getNodeContainer(); bac.add(content.onTakenOffline(branchUuid)); edge.remove(); if (content.isAutoPurgeEnabled() && content.isPurgeable()) { content.purge(bac); } }); assertPublishConsistency(ac, branch); bac.process(); } @Override public void takeOffline(InternalActionContext ac, BulkActionContext bac) { Branch branch = ac.getBranch(getProject()); PublishParameters parameters = ac.getPublishParameters(); takeOffline(ac, bac, branch, parameters); } @Override public PublishStatusModel transformToPublishStatus(InternalActionContext ac, String languageTag) { Branch branch = ac.getBranch(getProject()); NodeGraphFieldContainer container = getGraphFieldContainer(languageTag, branch.getUuid(), PUBLISHED); if (container != null) { String date = container.getLastEditedDate(); PublishStatusModel status = new PublishStatusModel(); status.setPublished(true); status.setVersion(container.getVersion().toString()); User editor = container.getEditor(); if (editor != null) { status.setPublisher(editor.transformToReference()); } status.setPublishDate(date); return status; } else { container = getGraphFieldContainer(languageTag, branch.getUuid(), DRAFT); if (container == null) { throw error(NOT_FOUND, "error_language_not_found", languageTag); } return new PublishStatusModel().setPublished(false).setVersion(container.getVersion().toString()); } } @Override public void publish(InternalActionContext ac, BulkActionContext bac, String languageTag) { Branch branch = ac.getBranch(getProject()); String branchUuid = branch.getUuid(); // get the draft version of the given language NodeGraphFieldContainer draftVersion = getGraphFieldContainer(languageTag, branchUuid, DRAFT); // if not existent -> NOT_FOUND if (draftVersion == null) { throw error(NOT_FOUND, "error_language_not_found", languageTag); } // If the located draft version was already published we are done if (draftVersion.isPublished(branchUuid)) { return; } // TODO check whether all required fields are filled, if not -> unable to publish NodeGraphFieldContainer publishedContainer = publish(ac, draftVersion.getLanguageTag(), branch, ac.getUser()); // Invoke a store of the document since it must now also be added to the published index bac.add(publishedContainer.onPublish(branchUuid)); } @Override public void takeOffline(InternalActionContext ac, BulkActionContext bac, Branch branch, String languageTag) { String branchUuid = branch.getUuid(); // Locate the published container NodeGraphFieldContainer published = getGraphFieldContainer(languageTag, branchUuid, PUBLISHED); if (published == null) { throw error(NOT_FOUND, "error_language_not_found", languageTag); } bac.add(published.onTakenOffline(branchUuid)); // Remove the "published" edge getGraphFieldContainerEdge(languageTag, branchUuid, PUBLISHED).remove(); assertPublishConsistency(ac, branch); bac.process(); } @Override public void setPublished(InternalActionContext ac, NodeGraphFieldContainer container, String branchUuid) { String languageTag = container.getLanguageTag(); boolean isAutoPurgeEnabled = container.isAutoPurgeEnabled(); // Remove an existing published edge EdgeFrame currentPublished = getGraphFieldContainerEdgeFrame(languageTag, branchUuid, PUBLISHED); if (currentPublished != null) { // We need to remove the edge first since updateWebrootPathInfo will // check the published edge again NodeGraphFieldContainerImpl oldPublishedContainer = currentPublished.inV().nextOrDefaultExplicit(NodeGraphFieldContainerImpl.class, null); currentPublished.remove(); oldPublishedContainer.updateWebrootPathInfo(branchUuid, "node_conflicting_segmentfield_publish"); if (ac.isPurgeAllowed() && isAutoPurgeEnabled && oldPublishedContainer.isPurgeable()) { oldPublishedContainer.purge(); } } if (ac.isPurgeAllowed()) { // Check whether a previous draft can be purged. NodeGraphFieldContainer prev = container.getPreviousVersion(); if (isAutoPurgeEnabled && prev != null && prev.isPurgeable()) { prev.purge(); } } // create new published edge GraphFieldContainerEdge edge = addFramedEdge(HAS_FIELD_CONTAINER, container, GraphFieldContainerEdgeImpl.class); edge.setLanguageTag(languageTag); edge.setBranchUuid(branchUuid); edge.setType(PUBLISHED); container.updateWebrootPathInfo(branchUuid, "node_conflicting_segmentfield_publish"); } @Override public NodeGraphFieldContainer publish(InternalActionContext ac, String languageTag, Branch branch, User user) { String branchUuid = branch.getUuid(); // create published version NodeGraphFieldContainer newVersion = createGraphFieldContainer(languageTag, branch, user); newVersion.setVersion(newVersion.getVersion().nextPublished()); setPublished(ac, newVersion, branchUuid); return newVersion; } @Override public NodeGraphFieldContainer findVersion(List<String> languageTags, String branchUuid, String version) { NodeGraphFieldContainer fieldContainer = null; // TODO refactor the type handling and don't return INITIAL. ContainerType type = forVersion(version); for (String languageTag : languageTags) { // Don't start the version lookup using the initial version. Instead start at the end of the chain and use the DRAFT version instead. fieldContainer = getGraphFieldContainer(languageTag, branchUuid, type == INITIAL ? DRAFT : type); // Traverse the chain downwards and stop once we found our target version or we reached the end. if (fieldContainer != null && type == INITIAL) { while (fieldContainer != null && !version.equals(fieldContainer.getVersion().toString())) { fieldContainer = fieldContainer.getPreviousVersion(); } } // We found a container for one of the languages if (fieldContainer != null) { break; } } return fieldContainer; } @Override public List<String> getAvailableLanguageNames() { List<String> languageTags = new ArrayList<>(); // TODO it would be better to store the languagetag along with the edge for (GraphFieldContainer container : getDraftGraphFieldContainers()) { languageTags.add(container.getLanguageTag()); } return languageTags; } @Override public List<String> getAvailableLanguageNames(Branch branch, ContainerType type) { List<String> languageTags = new ArrayList<>(); for (GraphFieldContainer container : getGraphFieldContainers(branch, type)) { languageTags.add(container.getLanguageTag()); } return languageTags; } @Override public void delete(BulkActionContext bac, boolean ignoreChecks, boolean recursive) { if (!ignoreChecks) { // Prevent deletion of basenode if (getProject().getBaseNode().getUuid().equals(getUuid())) { throw error(METHOD_NOT_ALLOWED, "node_basenode_not_deletable"); } } // Delete subfolders if (log.isDebugEnabled()) { log.debug("Deleting node {" + getUuid() + "}"); } // TODO Only affect a specific branch? if (recursive) { for (Node child : getChildren()) { child.delete(bac); bac.process(); } } // Delete all initial containers (which will delete all containers) for (NodeGraphFieldContainer container : getGraphFieldContainersIt(INITIAL)) { container.delete(bac); } if (log.isDebugEnabled()) { log.debug("Deleting node {" + getUuid() + "} vertex."); } addReferenceUpdates(bac); bac.add(onDeleted(getUuid(), getSchemaContainer(), null, null, null)); getElement().remove(); bac.process(); } @Override public Stream<? extends NodeGraphField> getInboundReferences() { return toStream(inE(HAS_FIELD, HAS_ITEM) .has(NodeGraphFieldImpl.class) .frameExplicit(NodeGraphFieldImpl.class)); } /** * Adds reference update events to the context for all draft and published contents that reference this node. * @param bac */ private void addReferenceUpdates(BulkActionContext bac) { Set<String> handledNodeUuids = new HashSet<>(); getInboundReferences() .flatMap(NodeGraphField::getReferencingContents) .forEach(nodeContainer -> { for (GraphFieldContainerEdgeImpl edge : nodeContainer.inE(HAS_FIELD_CONTAINER).frameExplicit(GraphFieldContainerEdgeImpl.class)) { ContainerType type = edge.getType(); // Only handle published or draft contents if (type.equals(DRAFT) || type.equals(PUBLISHED)) { Node node = nodeContainer.getParentNode(); String uuid = node.getUuid(); String languageTag = nodeContainer.getLanguageTag(); String branchUuid = edge.getBranchUuid(); String key = uuid + languageTag + branchUuid + type.getCode(); if (!handledNodeUuids.contains(key)) { bac.add(onReferenceUpdated(node.getUuid(), node.getSchemaContainer(), branchUuid, type, languageTag)); handledNodeUuids.add(key); } } } }); } @Override public void delete(BulkActionContext bac) { delete(bac, false, true); } @Override public void deleteFromBranch(InternalActionContext ac, Branch branch, BulkActionContext bac, boolean ignoreChecks) { DeleteParameters parameters = ac.getDeleteParameters(); // 1. Remove subfolders from branch String branchUuid = branch.getUuid(); for (Node child : getChildren(branchUuid)) { if (!parameters.isRecursive()) { throw error(BAD_REQUEST, "node_error_delete_failed_node_has_children"); } child.deleteFromBranch(ac, branch, bac, ignoreChecks); } // 2. Delete all language containers for (NodeGraphFieldContainer container : getGraphFieldContainers(branch, DRAFT)) { deleteLanguageContainer(ac, branch, container.getLanguageTag(), bac, false); } // 3. Now check if the node has no more field containers in any branch. We can delete it in those cases if (getGraphFieldContainerCount() == 0) { delete(bac); } else { // Otherwise we need to remove the "parent" edge for the branch // first remove the "parent" edge (because the node itself will // probably not be deleted, but just removed from the branch) outE(HAS_PARENT_NODE).has(BRANCH_UUID_KEY, branchUuid).removeAll(); } } /** * Get a vertex traversal to find the children of this node, this user has read permission for. * * @param requestUser * user * @param branchUuid * branch uuid * @param languageTags * Only list nodes which match the given language tags. Don't filter if the language tags list is null * @param type * edge type * @return vertex traversal */ private VertexTraversal<?, ?, ?> getChildrenTraversal(MeshAuthUser requestUser, String branchUuid, List<String> languageTags, ContainerType type) { String permLabel = type == PUBLISHED ? READ_PUBLISHED_PERM.label() : READ_PERM.label(); VertexTraversal<?, ?, ?> traversal = null; if (branchUuid != null) { traversal = inE(HAS_PARENT_NODE).has(BRANCH_UUID_KEY, branchUuid).outV(); } else { traversal = in(HAS_PARENT_NODE); } traversal = traversal.mark().in(permLabel).out(HAS_ROLE).in(HAS_USER).retain(requestUser).back(); if (branchUuid != null || type != null) { EdgeTraversal<?, ?, ?> edgeTraversal = traversal.mark().outE(HAS_FIELD_CONTAINER); if (branchUuid != null) { edgeTraversal = edgeTraversal.has(GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branchUuid); } if (type != null) { edgeTraversal = edgeTraversal.has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, type.getCode()); } // Filter out nodes which are not listed in the given language tags if (languageTags != null) { edgeTraversal = edgeTraversal.filter(edge -> { String languageTag = edge.getProperty(GraphFieldContainerEdgeImpl.LANGUAGE_TAG_KEY); return languageTags.contains(languageTag); }); } traversal = (VertexTraversal<?, ?, ?>) edgeTraversal.outV().back(); } return traversal; } @Override public TraversalResult<? extends Node> getChildren(MeshAuthUser requestUser, String branchUuid, List<String> languageTags, ContainerType type) { return new TraversalResult<>(getChildrenTraversal(requestUser, branchUuid, languageTags, type).frameExplicit(NodeImpl.class)); } @Override public TransformablePage<? extends Node> getChildren(InternalActionContext ac, List<String> languageTags, String branchUuid, ContainerType type, PagingParameters pagingInfo) { String indexName = "e." + HAS_PARENT_NODE.toLowerCase() + "_branch"; Object indexKey = DB.get().createComposedIndexKey(id(), branchUuid); GraphPermission perm = type == PUBLISHED ? READ_PUBLISHED_PERM : READ_PERM; if (languageTags == null) { return new DynamicTransformablePageImpl<>(ac.getUser(), indexName, indexKey, Direction.OUT, NodeImpl.class, pagingInfo, perm, null, true); } else { return new DynamicTransformablePageImpl<>(ac.getUser(), indexName, indexKey, Direction.OUT, NodeImpl.class, pagingInfo, perm, (item) -> { // Filter out nodes which do not provide one of the specified language tags and type for (String languageTag : languageTags) { if (item.getGraphFieldContainerEdge(languageTag, branchUuid, type) != null) { return true; } } return false; }, true); } } @Override public TransformablePage<? extends Tag> getTags(User user, PagingParameters params, Branch branch) { VertexTraversal<?, ?, ?> traversal = TagEdgeImpl.getTagTraversal(this, branch); return new DynamicTransformablePageImpl<Tag>(user, traversal, params, READ_PERM, TagImpl.class); } @Override public void applyPermissions(EventQueueBatch batch, Role role, boolean recursive, Set<GraphPermission> permissionsToGrant, Set<GraphPermission> permissionsToRevoke) { if (recursive) { // TODO for branch? for (Node child : getChildren()) { child.applyPermissions(batch, role, recursive, permissionsToGrant, permissionsToRevoke); } } super.applyPermissions(batch, role, recursive, permissionsToGrant, permissionsToRevoke); } @Override public String getDisplayName(InternalActionContext ac) { NodeParameters nodeParameters = ac.getNodeParameters(); VersioningParameters versioningParameters = ac.getVersioningParameters(); NodeGraphFieldContainer container = findVersion(nodeParameters.getLanguageList(), ac.getBranch(getProject()).getUuid(), versioningParameters .getVersion()); if (container == null) { if (log.isDebugEnabled()) { log.debug("Could not find any matching i18n field container for node {" + getUuid() + "}."); } return null; } else { // Determine the display field name and load the string value // from that field. return container.getDisplayFieldValue(); } } /** * Update the node language or create a new draft for the specific language. This method will also apply conflict detection and take care of deduplication. * * * <p> * Conflict detection: Conflict detection only occurs during update requests. Two diffs are created. The update request will be compared against base * version graph field container (version which is referenced by the request). The second diff is being created in-between the base version graph field * container and the latest version of the graph field container. This diff identifies previous changes in between those version. These both diffs are * compared in order to determine their intersection. The intersection identifies those fields which have been altered in between both versions and which * would now also be touched by the current request. This situation causes a conflict and the update would abort. * * <p> * Conflict cases * <ul> * <li>Initial creates - No conflict handling needs to be performed</li> * <li>Migration check - Nodes which have not yet migrated can't be updated</li> * </ul> * * * <p> * Deduplication: Field values that have not been changed in between the request data and the last version will not cause new fields to be created in new * version graph field containers. The new version graph field container will instead reference those fields from the previous graph field container * version. Please note that this deduplication only applies to complex fields (e.g.: Lists, Micronode) * * @param ac * @param batch * Batch which will be used to update the search index * @return */ @Override public boolean update(InternalActionContext ac, EventQueueBatch batch) { NodeUpdateRequest requestModel = ac.fromJson(NodeUpdateRequest.class); if (isEmpty(requestModel.getLanguage())) { throw error(BAD_REQUEST, "error_language_not_set"); } // Check whether the tags need to be updated List<TagReference> tags = requestModel.getTags(); if (tags != null) { updateTags(ac, batch, requestModel.getTags()); } // Set the language tag parameter here in order to return the updated language in the response String languageTag = requestModel.getLanguage(); NodeParameters nodeParameters = ac.getNodeParameters(); nodeParameters.setLanguages(languageTag); Language language = MeshInternal.get().boot().languageRoot().findByLanguageTag(languageTag); if (language == null) { throw error(BAD_REQUEST, "error_language_not_found", requestModel.getLanguage()); } Branch branch = ac.getBranch(getProject()); NodeGraphFieldContainer latestDraftVersion = getGraphFieldContainer(languageTag, branch, DRAFT); // Check whether this is the first time that an update for the given language and branch occurs. In this case a new container must be created. // This means that no conflict check can be performed. Conflict checks only occur for updates on existing contents. if (latestDraftVersion == null) { // Create a new field container latestDraftVersion = createGraphFieldContainer(languageTag, branch, ac.getUser()); // Check whether the node has a parent node in this branch, if not, the request is supposed to be a create request // and we get the parent node from this create request if (getParentNode(branch.getUuid()) == null) { NodeCreateRequest createRequest = JsonUtil.readValue(ac.getBodyAsString(), NodeCreateRequest.class); if (createRequest.getParentNode() == null || isEmpty(createRequest.getParentNode().getUuid())) { throw error(BAD_REQUEST, "node_missing_parentnode_field"); } Node parentNode = getProject().getNodeRoot().loadObjectByUuid(ac, createRequest.getParentNode().getUuid(), CREATE_PERM); // check whether the parent node is visible in the branch if (!parentNode.isBaseNode() && !parentNode.isVisibleInBranch(branch.getUuid())) { log.error( String.format("Error while creating node in branch {%s}: requested parent node {%s} exists, but is not visible in branch.", branch.getName(), parentNode.getUuid())); throw error(NOT_FOUND, "object_not_found_for_uuid", createRequest.getParentNode().getUuid()); } setParentNode(branch.getUuid(), parentNode); } latestDraftVersion.updateFieldsFromRest(ac, requestModel.getFields()); batch.add(latestDraftVersion.onCreated(branch.getUuid(), DRAFT)); return true; } else { String version = requestModel.getVersion(); if (version == null) { log.debug("No version was specified. Assuming 'draft' for latest version"); version = "draft"; } // Make sure the container was already migrated. Otherwise the update can't proceed. SchemaContainerVersion schemaContainerVersion = latestDraftVersion.getSchemaContainerVersion(); if (!latestDraftVersion.getSchemaContainerVersion().equals(branch.findLatestSchemaVersion(schemaContainerVersion .getSchemaContainer()))) { throw error(BAD_REQUEST, "node_error_migration_incomplete"); } // Load the base version field container in order to create the diff NodeGraphFieldContainer baseVersionContainer = findVersion(requestModel.getLanguage(), branch.getUuid(), version); if (baseVersionContainer == null) { throw error(BAD_REQUEST, "node_error_draft_not_found", version, requestModel.getLanguage()); } latestDraftVersion.getSchemaContainerVersion().getSchema().assertForUnhandledFields(requestModel.getFields()); // TODO handle simplified case in which baseContainerVersion and // latestDraftVersion are equal List<FieldContainerChange> baseVersionDiff = baseVersionContainer.compareTo(latestDraftVersion); List<FieldContainerChange> requestVersionDiff = latestDraftVersion.compareTo(requestModel.getFields()); // Compare both sets of change sets List<FieldContainerChange> intersect = baseVersionDiff.stream().filter(requestVersionDiff::contains).collect(Collectors.toList()); // Check whether the update was not based on the latest draft version. In that case a conflict check needs to occur. if (!latestDraftVersion.getVersion().getFullVersion().equals(version)) { // Check whether a conflict has been detected if (intersect.size() > 0) { NodeVersionConflictException conflictException = new NodeVersionConflictException("node_error_conflict_detected"); conflictException.setOldVersion(baseVersionContainer.getVersion().toString()); conflictException.setNewVersion(latestDraftVersion.getVersion().toString()); for (FieldContainerChange fcc : intersect) { conflictException.addConflict(fcc.getFieldCoordinates()); } throw conflictException; } } // Make sure to only update those fields which have been altered in between the latest version and the current request. Remove // unaffected fields from the rest request in order to prevent duplicate references. We don't want to touch field that have not been changed. // Otherwise the graph field references would no longer point to older revisions of the same field. Set<String> fieldsToKeepForUpdate = requestVersionDiff.stream().map(e -> e.getFieldKey()).collect(Collectors.toSet()); for (String fieldKey : requestModel.getFields().keySet()) { if (fieldsToKeepForUpdate.contains(fieldKey)) { continue; } if (log.isDebugEnabled()) { log.debug("Removing field from request {" + fieldKey + "} in order to handle deduplication."); } requestModel.getFields().remove(fieldKey); } // Check whether the request still contains data which needs to be updated. if (!requestModel.getFields().isEmpty()) { // Create new field container as clone of the existing NodeGraphFieldContainer newDraftVersion = createGraphFieldContainer(language.getLanguageTag(), branch, ac.getUser(), latestDraftVersion, true); // Update the existing fields newDraftVersion.updateFieldsFromRest(ac, requestModel.getFields()); // Purge the old draft if (ac.isPurgeAllowed() && newDraftVersion.isAutoPurgeEnabled() && latestDraftVersion.isPurgeable()) { latestDraftVersion.purge(); } latestDraftVersion = newDraftVersion; batch.add(newDraftVersion.onUpdated(branch.getUuid(), DRAFT)); return true; } } return false; } @Override public TransformablePage<? extends Tag> updateTags(InternalActionContext ac, EventQueueBatch batch) { List<Tag> tags = getTagsToSet(ac, batch); Branch branch = ac.getBranch(); applyTags(branch, tags, batch); User user = ac.getUser(); return getTags(user, ac.getPagingParameters(), branch); } @Override public void updateTags(InternalActionContext ac, EventQueueBatch batch, List<TagReference> list) { List<Tag> tags = getTagsToSet(list, ac, batch); Branch branch = ac.getBranch(); applyTags(branch, tags, batch); } private void applyTags(Branch branch, List<? extends Tag> tags, EventQueueBatch batch) { List<? extends Tag> currentTags = getTags(branch).list(); List<Tag> toBeAdded = tags.stream() .filter(StreamUtil.not(new HashSet<>(currentTags)::contains)) .collect(Collectors.toList()); toBeAdded.forEach(tag -> { addTag(tag, branch); batch.add(onTagged(tag, branch, ASSIGNED)); }); List<Tag> toBeRemoved = currentTags.stream() .filter(StreamUtil.not(new HashSet<>(tags)::contains)) .collect(Collectors.toList()); toBeRemoved.forEach(tag -> { removeTag(tag, branch); batch.add(onTagged(tag, branch, UNASSIGNED)); }); } @Override public void moveTo(InternalActionContext ac, Node targetNode, EventQueueBatch batch) { // TODO should we add a guard that terminates this loop when it runs to // long? // Check whether the target node is part of the subtree of the source // node. // We must detect and prevent such actions because those would // invalidate the tree structure Branch branch = ac.getBranch(getProject()); String branchUuid = branch.getUuid(); Node parent = targetNode.getParentNode(branchUuid); while (parent != null) { if (parent.getUuid().equals(getUuid())) { throw error(BAD_REQUEST, "node_move_error_not_allowed_to_move_node_into_one_of_its_children"); } parent = parent.getParentNode(branchUuid); } if (!targetNode.getSchemaContainer().getLatestVersion().getSchema().getContainer()) { throw error(BAD_REQUEST, "node_move_error_targetnode_is_no_folder"); } if (getUuid().equals(targetNode.getUuid())) { throw error(BAD_REQUEST, "node_move_error_same_nodes"); } setParentNode(branchUuid, targetNode); // Update published graph field containers getGraphFieldContainers(branchUuid, PUBLISHED).stream().forEach(container -> { container.updateWebrootPathInfo(branchUuid, "node_conflicting_segmentfield_move"); }); // Update draft graph field containers getGraphFieldContainers(branchUuid, DRAFT).stream().forEach(container -> { container.updateWebrootPathInfo(branchUuid, "node_conflicting_segmentfield_move"); }); batch.add(onNodeMoved(branchUuid, targetNode)); assertPublishConsistency(ac, branch); } @Override public void deleteLanguageContainer(InternalActionContext ac, Branch branch, String languageTag, BulkActionContext bac, boolean failForLastContainer) { // 1. Check whether the container has also a published variant. We need to take it offline in those cases NodeGraphFieldContainer container = getGraphFieldContainer(languageTag, branch, PUBLISHED); if (container != null) { takeOffline(ac, bac, branch, languageTag); } // 2. Load the draft container and remove it from the branch container = getGraphFieldContainer(languageTag, branch, DRAFT); if (container == null) { throw error(NOT_FOUND, "node_no_language_found", languageTag); } container.deleteFromBranch(branch, bac); // No need to delete the published variant because if the container was published the take offline call handled it // starting with the old draft, delete all GFC that have no next and are not draft (for other branches) NodeGraphFieldContainer dangling = container; while (dangling != null && !dangling.isDraft() && !dangling.hasNextVersion()) { NodeGraphFieldContainer toDelete = dangling; dangling = toDelete.getPreviousVersion(); toDelete.delete(bac); } NodeGraphFieldContainer initial = getGraphFieldContainer(languageTag, branch, INITIAL); if (initial != null) { // Remove the initial edge initial.inE(HAS_FIELD_CONTAINER).has(GraphFieldContainerEdgeImpl.BRANCH_UUID_KEY, branch.getUuid()) .has(GraphFieldContainerEdgeImpl.EDGE_TYPE_KEY, ContainerType.INITIAL.getCode()).removeAll(); // starting with the old initial, delete all GFC that have no previous and are not initial (for other branches) dangling = initial; while (dangling != null && !dangling.isInitial() && !dangling.hasPreviousVersion()) { NodeGraphFieldContainer toDelete = dangling; // since the GFC "toDelete" was only used by this branch, it can not have more than one "next" GFC // (multiple "next" would have to belong to different branches, and for every branch, there would have to be // an INITIAL, which would have to be either this GFC or a previous) dangling = toDelete.getNextVersions().iterator().next(); toDelete.delete(bac, false); } } // 3. Check whether this was be the last container of the node for this branch DeleteParameters parameters = ac.getDeleteParameters(); if (failForLastContainer) { TraversalResult<? extends NodeGraphFieldContainer> draftContainers = getGraphFieldContainers(branch.getUuid(), DRAFT); TraversalResult<? extends NodeGraphFieldContainer> publishContainers = getGraphFieldContainers(branch.getUuid(), PUBLISHED); boolean wasLastContainer = !draftContainers.iterator().hasNext() && !publishContainers.iterator().hasNext(); if (!parameters.isRecursive() && wasLastContainer) { throw error(BAD_REQUEST, "node_error_delete_failed_last_container_for_branch"); } // Also delete the node and children if (parameters.isRecursive() && wasLastContainer) { deleteFromBranch(ac, branch, bac, false); } } } @Override public PathSegment getSegment(String branchUuid, ContainerType type, String segment) { // Check the different language versions for (NodeGraphFieldContainer container : getGraphFieldContainersIt(branchUuid, type)) { Schema schema = container.getSchemaContainerVersion().getSchema(); String segmentFieldName = schema.getSegmentField(); // First check whether a string field exists for the given name StringGraphField field = container.getString(segmentFieldName); if (field != null) { String fieldValue = field.getString(); if (segment.equals(fieldValue)) { return new PathSegment(container, field, container.getLanguageTag(), segment); } } // No luck yet - lets check whether a binary field matches the // segmentField BinaryGraphField binaryField = container.getBinary(segmentFieldName); if (binaryField == null) { if (log.isDebugEnabled()) { log.debug("The node {" + getUuid() + "} did not contain a string or a binary field for segment field name {" + segmentFieldName + "}"); } } else { String binaryFilename = binaryField.getFileName(); if (segment.equals(binaryFilename)) { return new PathSegment(container, binaryField, container.getLanguageTag(), segment); } } } return null; } @Override public Path resolvePath(String branchUuid, ContainerType type, Path path, Stack<String> pathStack) { if (pathStack.isEmpty()) { return path; } String segment = pathStack.pop(); if (log.isDebugEnabled()) { log.debug("Resolving for path segment {" + segment + "}"); } FramedGraph graph = Tx.get().getGraph(); String segmentInfo = GraphFieldContainerEdgeImpl.composeSegmentInfo(this, segment); Object key = GraphFieldContainerEdgeImpl.composeWebrootIndexKey(segmentInfo, branchUuid, type); Iterator<? extends GraphFieldContainerEdge> edges = graph.getFramedEdges(WEBROOT_INDEX_NAME, key, GraphFieldContainerEdgeImpl.class) .iterator(); if (edges.hasNext()) { GraphFieldContainerEdge edge = edges.next(); Node childNode = edge.getNode(); PathSegment pathSegment = childNode.getSegment(branchUuid, type, segment); if (pathSegment != null) { path.addSegment(pathSegment); return childNode.resolvePath(branchUuid, type, path, pathStack); } } return path; } /** * Generate the etag for nodes. The etag consists of: * <ul> * <li>uuid of the node</li> * <li>parent node uuid (which is branch specific)</li> * <li>version and language specific etag of the field container</li> * <li>availableLanguages</li> * <li>breadcrumb</li> * <li>webroot path &amp; language paths</li> * <li>permissions</li> * </ul> */ @Override public String getETag(InternalActionContext ac) { String superkey = super.getETag(ac); // Parameters Branch branch = ac.getBranch(getProject()); VersioningParameters versioiningParameters = ac.getVersioningParameters(); ContainerType type = forVersion(versioiningParameters.getVersion()); Node parentNode = getParentNode(branch.getUuid()); NodeGraphFieldContainer container = findVersion(ac.getNodeParameters().getLanguageList(), branch.getUuid(), ac.getVersioningParameters() .getVersion()); StringBuilder keyBuilder = new StringBuilder(); keyBuilder.append(superkey); /** * branch uuid */ keyBuilder.append(branch.getUuid()); keyBuilder.append("-"); // TODO version, language list // We can omit further etag keys since this would return a 404 anyhow // since the requested container could not be found. if (container == null) { keyBuilder.append("404-no-container"); return keyBuilder.toString(); } /** * Parent node * * The node can be moved and this would also affect the response. The etag must also be changed when the node is moved. */ if (parentNode != null) { keyBuilder.append("-"); keyBuilder.append(parentNode.getUuid()); } // fields version if (container != null) { keyBuilder.append("-"); keyBuilder.append(container.getETag(ac)); } /** * Expansion (all) * * The expandAll parameter changes the json response and thus must be included in the etag computation. */ if (ac.getNodeParameters().getExpandAll()) { keyBuilder.append("-"); keyBuilder.append("expand:true"); } // expansion (selective) String expandedFields = Arrays.toString(ac.getNodeParameters().getExpandedFieldNames()); keyBuilder.append("-"); keyBuilder.append("expandFields:"); keyBuilder.append(expandedFields); // branch specific tags for (Tag tag : getTags(branch)) { // Tags can't be moved across branches thus we don't need to add the // tag family etag keyBuilder.append(tag.getETag(ac)); } // branch specific children for (Node child : getChildren(branch.getUuid())) { if (ac.getUser().hasPermission(child, READ_PUBLISHED_PERM)) { keyBuilder.append("-"); keyBuilder.append(child.getSchemaContainer().getName()); } } // Publish state & availableLanguages for (NodeGraphFieldContainer c : getGraphFieldContainers(branch, PUBLISHED)) { keyBuilder.append(c.getLanguageTag() + "published"); } for (NodeGraphFieldContainer c : getGraphFieldContainers(branch, DRAFT)) { keyBuilder.append(c.getLanguageTag() + "draft"); } // breadcrumb keyBuilder.append("-"); Node current = getParentNode(branch.getUuid()); if (current != null) { while (current != null) { String key = current.getUuid() + current.getDisplayName(ac); keyBuilder.append(key); if (LinkType.OFF != ac.getNodeParameters().getResolveLinks()) { WebRootLinkReplacer linkReplacer = MeshInternal.get().webRootLinkReplacer(); String url = linkReplacer.resolve(ac, branch.getUuid(), type, current.getUuid(), ac.getNodeParameters().getResolveLinks(), getProject().getName(), container.getLanguageTag()); keyBuilder.append(url); } current = current.getParentNode(branch.getUuid()); } } /** * webroot path & language paths * * The webroot and language paths must be included in the etag computation in order to invalidate the etag once a node language gets updated or once the * display name of any parent node changes. */ if (ac.getNodeParameters().getResolveLinks() != LinkType.OFF) { WebRootLinkReplacer linkReplacer = MeshInternal.get().webRootLinkReplacer(); String path = linkReplacer.resolve(ac, branch.getUuid(), type, getUuid(), ac.getNodeParameters().getResolveLinks(), getProject() .getName(), container.getLanguageTag()); keyBuilder.append(path); // languagePaths for (GraphFieldContainer currentFieldContainer : getGraphFieldContainers(branch, forVersion(versioiningParameters.getVersion()))) { String currLanguage = currentFieldContainer.getLanguageTag(); keyBuilder.append(currLanguage + "=" + linkReplacer.resolve(ac, branch.getUuid(), type, this, ac.getNodeParameters() .getResolveLinks(), currLanguage)); } } /** * permissions (&roleUuid query parameter aware) * * Permissions can change and thus must be included in the etag computation in order to invalidate the etag once the permissions change. */ String roleUuid = ac.getRolePermissionParameters().getRoleUuid(); if (!isEmpty(roleUuid)) { Role role = MeshInternal.get().boot().meshRoot().getRoleRoot().loadObjectByUuid(ac, roleUuid, READ_PERM); if (role != null) { Set<GraphPermission> permSet = role.getPermissions(this); Set<String> humanNames = new HashSet<>(); for (GraphPermission permission : permSet) { humanNames.add(permission.getRestPerm().getName()); } String[] names = humanNames.toArray(new String[humanNames.size()]); keyBuilder.append(Arrays.toString(names)); } } if (log.isDebugEnabled()) { log.debug("Creating etag from key {" + keyBuilder.toString() + "}"); } return ETag.hash(keyBuilder.toString()); } @Override public String getAPIPath(InternalActionContext ac) { return VersionHandler.baseRoute(ac) + "/" + encodeSegment(getProject().getName()) + "/nodes/" + getUuid(); } @Override public User getCreator() { return out(HAS_CREATOR, UserImpl.class).nextOrNull(); } @Override public MeshElementEventModel onDeleted() { throw new NotImplementedException("Use dedicated onDeleted method for nodes instead."); } public NodeMovedEventModel onNodeMoved(String branchUuid, Node target) { NodeMovedEventModel model = new NodeMovedEventModel(); model.setEvent(NODE_MOVED); model.setBranchUuid(branchUuid); model.setProject(getProject().transformToReference()); fillEventInfo(model); model.setTarget(target.transformToMinimalReference()); return model; } @Override protected MeshProjectElementEventModel createEvent(MeshEvent event) { NodeMeshEventModel model = new NodeMeshEventModel(); model.setEvent(event); model.setProject(getProject().transformToReference()); fillEventInfo(model); return model; } public NodeMeshEventModel onReferenceUpdated(String uuid, SchemaContainer schema, String branchUuid, ContainerType type, String languageTag) { NodeMeshEventModel event = new NodeMeshEventModel(); event.setEvent(NODE_REFERENCE_UPDATED); event.setUuid(uuid); event.setLanguageTag(languageTag); event.setType(type); event.setBranchUuid(branchUuid); event.setProject(getProject().transformToReference()); if (schema != null) { event.setSchema(schema.transformToReference()); } return event; } @Override public NodeMeshEventModel onDeleted(String uuid, SchemaContainer schema, String branchUuid, ContainerType type, String languageTag) { NodeMeshEventModel event = new NodeMeshEventModel(); event.setEvent(getTypeInfo().getOnDeleted()); event.setUuid(uuid); event.setLanguageTag(languageTag); event.setType(type); event.setBranchUuid(branchUuid); event.setProject(getProject().transformToReference()); if (schema != null) { event.setSchema(schema.transformToReference()); } return event; } @Override public NodeTaggedEventModel onTagged(Tag tag, Branch branch, Assignment assignment) { NodeTaggedEventModel model = new NodeTaggedEventModel(); model.setTag(tag.transformToReference()); model.setBranch(branch.transformToReference()); model.setProject(getProject().transformToReference()); model.setNode(transformToMinimalReference()); switch (assignment) { case ASSIGNED: model.setEvent(NODE_TAGGED); break; case UNASSIGNED: model.setEvent(NODE_UNTAGGED); break; } return model; } @Override public Single<NodeResponse> transformToRest(InternalActionContext ac, int level, String... languageTags) { return MeshInternal.get().database().asyncTx(() -> { return Single.just(transformToRestSync(ac, level, languageTags)); }); } @Override public boolean isBaseNode() { return inE(HAS_ROOT_NODE).hasNext(); } @Override public boolean isVisibleInBranch(String branchUuid) { return getGraphFieldContainersIt(branchUuid, DRAFT).iterator().hasNext(); } @Override public PermissionChangedProjectElementEventModel onPermissionChanged(Role role) { PermissionChangedProjectElementEventModel model = new PermissionChangedProjectElementEventModel(); fillPermissionChanged(model, role); return model; } }
Make sure publish consistency check will only check within the current branch
core/src/main/java/com/gentics/mesh/core/data/node/impl/NodeImpl.java
Make sure publish consistency check will only check within the current branch
Java
apache-2.0
77c14d0735eb59b20bb6f930e9de96db41c925ba
0
esbullington/speechtojapanese
package com.ericbullington.speechtojapanese; import android.content.Context; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Environment; import android.util.Log; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; // Approach to recording to audio sampling inspired by article "Audio Recording in .wav format" at: // http://www.edumobile.org/android/android-development/audio-recording-in-wav-format-in-android-programming/ // Actual WAV file header write method taken directly from AOSP project and licensed under Apache @SuppressWarnings("ResultOfMethodCallIgnored") public class AudioRecorder { // Class constants private static final String TAG="AudioRecorder"; // Audio file config private static final String FILE_EXTENSION = ".wav"; private static final String FILE_DIRECTORY = "speechtojapanese"; private static final String FILE_NAME = "temp.audio"; // Audio constants private static final int ENCODING_PCM_16BIT = AudioFormat.ENCODING_PCM_16BIT; private static final int CHANNEL_IN_MONO = AudioFormat.CHANNEL_IN_MONO; private static final int SAMPLE_RATEInHz = 44100; private Context mContext = null; private AudioRecord mRecorder = null; private int mAudioBufferSize = 0; private Thread mRecordingThread = null; private boolean mIsRecording = false; public AudioRecorder(Context mContext) { this.mContext = mContext; // Set audio buffer size for given sample rate mAudioBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATEInHz, CHANNEL_IN_MONO, ENCODING_PCM_16BIT); } public File getFileDirectory() { String filepath = Environment.getExternalStorageDirectory().getPath(); return new File(filepath, FILE_DIRECTORY); } private String getFilename(){ String filepath = Environment.getExternalStorageDirectory().getPath(); File file = new File(filepath,FILE_DIRECTORY); if(!file.exists()){ file.mkdirs(); } return (file.getAbsolutePath() + "/" + System.currentTimeMillis() + FILE_EXTENSION); } private String getTempFilename(){ String filepath = Environment.getExternalStorageDirectory().getPath(); File fileDir = new File(filepath,FILE_DIRECTORY); if(!fileDir.exists()){ fileDir.mkdirs(); } File file = new File(filepath,FILE_NAME); if(file.exists()) file.delete(); return (fileDir.getAbsolutePath() + "/" + FILE_NAME); } public void start(){ mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATEInHz, CHANNEL_IN_MONO, ENCODING_PCM_16BIT, mAudioBufferSize); if(mRecorder.getState() == 1) mRecorder.startRecording(); synchronized (this) { mIsRecording = true; } mRecordingThread = new Thread(new Runnable() { @Override public void run() { writeTempAudio(); } }); mRecordingThread.start(); } private void writeTempAudio(){ // Buffer size set in constructor by AudioRecord byte data[] = new byte[mAudioBufferSize]; String filename = getTempFilename(); FileOutputStream out = null; try { out = new FileOutputStream(filename); } catch (FileNotFoundException ex) { Log.e(TAG, "Error writing audio to file: ", ex); } int read = 0; if(null != out){ // Keep recording until user presses "stop" button while(mIsRecording){ read = mRecorder.read(data, 0, mAudioBufferSize); if(AudioRecord.ERROR_INVALID_OPERATION != read){ try { out.write(data); } catch (IOException ex) { Log.e(TAG, "Error writing audio to file: ", ex); } } } try { out.close(); } catch (IOException ex) { Log.e(TAG, "Error writing audio to file: ", ex); } } } public void stop(){ if(null != mRecorder){ synchronized (this) { mIsRecording = false; } if(mRecorder.getState() == 1) mRecorder.stop(); mRecorder.release(); mRecorder = null; mRecordingThread = null; } postWaveFile(getTempFilename(), getFilename()); new File(getTempFilename()).delete(); } private void postWaveFile(String inFilename,String outFilename){ FileInputStream in = null; FileOutputStream out = null; int channelCount = 2; int audioLength = 0; // Will always be at least 44 bytes long since that's how long the header is int dataLength = 44; byte[] data = new byte[mAudioBufferSize]; try { in = new FileInputStream(inFilename); out = new FileOutputStream(outFilename); audioLength = (int) in.getChannel().size(); dataLength = audioLength - 44; byte[] header = makeWavHeader(SAMPLE_RATEInHz, CHANNEL_IN_MONO, channelCount, dataLength); out.write(header, 0, 44); while(in.read(data) != -1){ out.write(data); } in.close(); out.close(); new PostSample(mContext).execute(outFilename); } catch (IOException ex) { Log.e(TAG, "Error writing audio: ", ex); } } // makeWaveHeader From AOSP // https://github.com/android/platform_frameworks_base/blob/master/core/java/android/speech/tts/FileSynthesisCallback.java // Apache License, Version 2.0 private byte[] makeWavHeader(int SAMPLE_RATEInHzInHz, int audioFormat, int channelCount, int dataLength) throws IOException { final int WAV_HEADER_LENGTH = 44; int sampleSizeInBytes = (audioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2); int byteRate = SAMPLE_RATEInHzInHz * sampleSizeInBytes * channelCount; short blockAlign = (short) (sampleSizeInBytes * channelCount); short bitsPerSample = (short) (sampleSizeInBytes * 8); // byte[] header = new byte[44]; byte[] headerBuf = new byte[WAV_HEADER_LENGTH]; ByteBuffer header = ByteBuffer.wrap(headerBuf); header.order(ByteOrder.LITTLE_ENDIAN); header.put(new byte[]{ 'R', 'I', 'F', 'F' }); header.putInt(dataLength + WAV_HEADER_LENGTH - 8); // RIFF chunk size header.put(new byte[]{ 'W', 'A', 'V', 'E' }); header.put(new byte[]{ 'f', 'm', 't', ' ' }); header.putInt(16); // size of fmt chunk header.putShort((short) ENCODING_PCM_16BIT); header.putShort((short) channelCount); header.putInt(SAMPLE_RATEInHzInHz); header.putInt(byteRate); header.putShort(blockAlign); header.putShort(bitsPerSample); header.put(new byte[]{ 'd', 'a', 't', 'a' }); header.putInt(dataLength); return headerBuf; } }
src/main/java/com/ericbullington/speechtojapanese/AudioRecorder.java
package com.ericbullington.speechtojapanese; import android.content.Context; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Environment; import android.util.Log; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; // Approach to recording to audio sampling inspired by article "Audio Recording in .wav format" at: // http://www.edumobile.org/android/android-development/audio-recording-in-wav-format-in-android-programming/ // Actual WAV file header write method taken directly from AOSP project and licensed under Apache @SuppressWarnings("ResultOfMethodCallIgnored") public class AudioRecorder { private static final String TAG="AudioRecorder"; private static final String FILE_EXTENSION = ".wav"; private static final String FILE_DIRECTORY = "speechtojapanese"; private static final String FILE_NAME = "temp.audio"; private static final int BPP = 16; private static final int SAMPLE_RATE = 44100; private static final int WAV_FORMAT_PCM = AudioFormat.ENCODING_PCM_16BIT; private Context mContext = null; private AudioRecord mRecorder = null; private int audioBufferSize = 0; private Thread recordingThread = null; private boolean isRecording = false; public AudioRecorder(Context mContext) { this.mContext = mContext; // Set audio buffer size for given sample rate audioBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); } public File getFileDirectory() { String filepath = Environment.getExternalStorageDirectory().getPath(); return new File(filepath, FILE_DIRECTORY); } private String getFilename(){ String filepath = Environment.getExternalStorageDirectory().getPath(); File file = new File(filepath,FILE_DIRECTORY); if(!file.exists()){ file.mkdirs(); } return (file.getAbsolutePath() + "/" + System.currentTimeMillis() + FILE_EXTENSION); } private String getTempFilename(){ String filepath = Environment.getExternalStorageDirectory().getPath(); File fileDir = new File(filepath,FILE_DIRECTORY); if(!fileDir.exists()){ fileDir.mkdirs(); } File file = new File(filepath,FILE_NAME); if(file.exists()) file.delete(); return (fileDir.getAbsolutePath() + "/" + FILE_NAME); } public void start(){ mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, WAV_FORMAT_PCM, audioBufferSize); if(mRecorder.getState() == 1) mRecorder.startRecording(); synchronized (this) { isRecording = true; } recordingThread = new Thread(new Runnable() { @Override public void run() { writeTempAudio(); } }); recordingThread.start(); } private void writeTempAudio(){ byte data[] = new byte[audioBufferSize]; String filename = getTempFilename(); FileOutputStream out = null; try { out = new FileOutputStream(filename); } catch (FileNotFoundException ex) { Log.i(TAG, "Error writing audio to file: ", ex); } int read = 0; if(null != out){ // Keep recording until user presses "stop" button while(isRecording){ read = mRecorder.read(data, 0, audioBufferSize); if(AudioRecord.ERROR_INVALID_OPERATION != read){ try { out.write(data); } catch (IOException ex) { Log.i(TAG, "Error writing audio to file: ", ex); } } } try { out.close(); } catch (IOException ex) { Log.i(TAG, "Error writing audio to file: ", ex); } } } public void stop(){ if(null != mRecorder){ synchronized (this) { isRecording = false; } if(mRecorder.getState() == 1) mRecorder.stop(); mRecorder.release(); mRecorder = null; recordingThread = null; } postWaveFile(getTempFilename(), getFilename()); new File(getTempFilename()).delete(); } private void postWaveFile(String inFilename,String outFilename){ FileInputStream in = null; FileOutputStream out = null; int channelNumber = 2; int audioLength = 0; int dataLength = 36; byte[] data = new byte[audioBufferSize]; try { in = new FileInputStream(inFilename); out = new FileOutputStream(outFilename); audioLength = (int) in.getChannel().size(); dataLength = audioLength + 36; byte[] header = makeWavHeader(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, channelNumber, dataLength); out.write(header, 0, 44); while(in.read(data) != -1){ out.write(data); } in.close(); out.close(); new PostSample(mContext).execute(outFilename); } catch (IOException e) { e.printStackTrace(); } } // makeWaveHeader From AOSP // https://github.com/android/platform_frameworks_base/blob/master/core/java/android/speech/tts/FileSynthesisCallback.java // Apache License, Version 2.0 private byte[] makeWavHeader(int SAMPLE_RATEInHz, int audioFormat, int channelCount, int dataLength) throws IOException { final int WAV_HEADER_LENGTH = 44; int sampleSizeInBytes = (audioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2); int byteRate = SAMPLE_RATEInHz * sampleSizeInBytes * channelCount; short blockAlign = (short) (sampleSizeInBytes * channelCount); short bitsPerSample = (short) (sampleSizeInBytes * 8); // byte[] header = new byte[44]; byte[] headerBuf = new byte[WAV_HEADER_LENGTH]; ByteBuffer header = ByteBuffer.wrap(headerBuf); header.order(ByteOrder.LITTLE_ENDIAN); header.put(new byte[]{ 'R', 'I', 'F', 'F' }); header.putInt(dataLength + WAV_HEADER_LENGTH - 8); // RIFF chunk size header.put(new byte[]{ 'W', 'A', 'V', 'E' }); header.put(new byte[]{ 'f', 'm', 't', ' ' }); header.putInt(16); // size of fmt chunk header.putShort((short) WAV_FORMAT_PCM); header.putShort((short) channelCount); header.putInt(SAMPLE_RATEInHz); header.putInt(byteRate); header.putShort(blockAlign); header.putShort(bitsPerSample); header.put(new byte[]{ 'd', 'a', 't', 'a' }); header.putInt(dataLength); return headerBuf; } }
Refactor and fix WAV header bug in AudioRecorder class
src/main/java/com/ericbullington/speechtojapanese/AudioRecorder.java
Refactor and fix WAV header bug in AudioRecorder class
Java
apache-2.0
a7decca33a0ccad3322d3b099bc44bf02d0ed982
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.remoteServer.agent.impl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.util.containers.hash.HashSet; import org.jetbrains.annotations.Nullable; import java.lang.reflect.*; import java.net.URL; import java.net.URLClassLoader; import java.util.Arrays; import java.util.Set; /** * @author michael.golubev */ public class RemoteAgentReflectiveProxyFactory extends RemoteAgentProxyFactoryBase { private static final Logger LOG = Logger.getInstance(RemoteAgentReflectiveProxyFactory.class); private final RemoteAgentClassLoaderCache myClassLoaderCache; public RemoteAgentReflectiveProxyFactory(@Nullable RemoteAgentClassLoaderCache classLoaderCache, CallerClassLoaderProvider callerClassLoaderProvider) { super(callerClassLoaderProvider); myClassLoaderCache = classLoaderCache; } @Override protected ClassLoader createAgentClassLoader(URL[] agentLibraryUrls) throws Exception { Set<URL> urls = new HashSet<>(); urls.addAll(Arrays.asList(agentLibraryUrls)); return myClassLoaderCache == null ? new URLClassLoader(urls.toArray(new URL[0]), null) : myClassLoaderCache.getOrCreateClassLoader(urls); } @Override protected InvocationHandler createInvocationHandler(Object agentImpl, ClassLoader agentClassLoader, ClassLoader callerClassLoader) { return new ReflectiveInvocationHandler(agentImpl, agentClassLoader, callerClassLoader); } private static class ReflectiveInvocationHandler implements InvocationHandler { private final Object myTarget; private final ClassLoader myTargetClassLoader; private final ClassLoader mySourceClassLoader; ReflectiveInvocationHandler(Object target, ClassLoader targetClassLoader, ClassLoader sourceClassLoader) { myTarget = target; myTargetClassLoader = targetClassLoader; mySourceClassLoader = sourceClassLoader; } @Nullable @Override public Object invoke(Object proxy, final Method method, final Object[] args) { ClassLoader initialClassLoader = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(myTargetClassLoader); Class<?>[] parameterTypes = method.getParameterTypes(); Class<?>[] delegateParameterTypes = new Class<?>[parameterTypes.length]; Object[] delegateArgs = new Object[parameterTypes.length]; for (int i = 0; i < parameterTypes.length; i++) { Mirror parameterMirror = new Mirror(parameterTypes[i], args[i], mySourceClassLoader, myTargetClassLoader); delegateParameterTypes[i] = parameterMirror.getMirrorType(); delegateArgs[i] = parameterMirror.getMirrorValue(); } Method delegateMethod = myTarget.getClass().getMethod(method.getName(), delegateParameterTypes); delegateMethod.setAccessible(true); Object result = delegateMethod.invoke(myTarget, delegateArgs); Mirror resultMirror = new Mirror(delegateMethod.getReturnType(), result, myTargetClassLoader, mySourceClassLoader); return resultMirror.getMirrorValue(); } catch (IllegalAccessException | ClassNotFoundException | NoSuchMethodException | InvocationTargetException e) { LOG.error(e); return null; } finally { Thread.currentThread().setContextClassLoader(initialClassLoader); } } } private static class Mirror { private final Class<?> myMirrorType; private final Object myMirrorValue; Mirror(Class<?> type, Object value, ClassLoader classLoader, ClassLoader mirrorClassLoader) throws ClassNotFoundException { if (type.isArray()) { Class<?> componentType = type.getComponentType(); Mirror componentMirror = new Mirror(componentType, null, classLoader, mirrorClassLoader); int length = value == null ? 0 : Array.getLength(value); Object mirrorValue = Array.newInstance(componentMirror.getMirrorType(), length); for (int i = 0; i < length; i++) { Mirror itemMirror = new Mirror(componentType, Array.get(value, i), classLoader, mirrorClassLoader); Array.set(mirrorValue, i, itemMirror.getMirrorValue()); } myMirrorType = mirrorValue.getClass(); myMirrorValue = value == null ? null : mirrorValue; } else if (type.isEnum()) { @SuppressWarnings("unchecked") Class<? extends Enum> mirroredEnum = (Class<? extends Enum>)mirrorClassLoader.loadClass(type.getName()); myMirrorType = mirroredEnum; //noinspection unchecked myMirrorValue = value == null ? null : Enum.valueOf(mirroredEnum, ((Enum)value).name()); } else if (type.isInterface()) { myMirrorType = mirrorClassLoader.loadClass(type.getName()); myMirrorValue = value == null ? null : Proxy.newProxyInstance(mirrorClassLoader, new Class[]{myMirrorType}, new ReflectiveInvocationHandler(value, classLoader, mirrorClassLoader)); } else { myMirrorType = type; myMirrorValue = value; } } public Class<?> getMirrorType() { return myMirrorType; } public Object getMirrorValue() { return myMirrorValue; } } }
platform/remote-servers/impl/src/com/intellij/remoteServer/agent/impl/RemoteAgentReflectiveProxyFactory.java
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.remoteServer.agent.impl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.util.containers.hash.HashSet; import org.jetbrains.annotations.Nullable; import java.lang.reflect.*; import java.net.URL; import java.net.URLClassLoader; import java.util.Arrays; import java.util.Set; /** * @author michael.golubev */ public class RemoteAgentReflectiveProxyFactory extends RemoteAgentProxyFactoryBase { private static final Logger LOG = Logger.getInstance(RemoteAgentReflectiveProxyFactory.class); private final RemoteAgentClassLoaderCache myClassLoaderCache; public RemoteAgentReflectiveProxyFactory(@Nullable RemoteAgentClassLoaderCache classLoaderCache, CallerClassLoaderProvider callerClassLoaderProvider) { super(callerClassLoaderProvider); myClassLoaderCache = classLoaderCache; } @Override protected ClassLoader createAgentClassLoader(URL[] agentLibraryUrls) throws Exception { Set<URL> urls = new HashSet<>(); urls.addAll(Arrays.asList(agentLibraryUrls)); return myClassLoaderCache == null ? new URLClassLoader(urls.toArray(new URL[0]), null) : myClassLoaderCache.getOrCreateClassLoader(urls); } @Override protected InvocationHandler createInvocationHandler(Object agentImpl, ClassLoader agentClassLoader, ClassLoader callerClassLoader) { return new ReflectiveInvocationHandler(agentImpl, agentClassLoader, callerClassLoader); } private static class ReflectiveInvocationHandler implements InvocationHandler { private final Object myTarget; private final ClassLoader myTargetClassLoader; private final ClassLoader mySourceClassLoader; ReflectiveInvocationHandler(Object target, ClassLoader targetClassLoader, ClassLoader sourceClassLoader) { myTarget = target; myTargetClassLoader = targetClassLoader; mySourceClassLoader = sourceClassLoader; } @Nullable @Override public Object invoke(Object proxy, final Method method, final Object[] args) { ClassLoader initialClassLoader = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(myTargetClassLoader); Class<?>[] parameterTypes = method.getParameterTypes(); Class<?>[] delegateParameterTypes = new Class<?>[parameterTypes.length]; Object[] delegateArgs = new Object[parameterTypes.length]; for (int i = 0; i < parameterTypes.length; i++) { Mirror parameterMirror = new Mirror(parameterTypes[i], args[i], mySourceClassLoader, myTargetClassLoader); delegateParameterTypes[i] = parameterMirror.getMirrorType(); delegateArgs[i] = parameterMirror.getMirrorValue(); } Method delegateMethod = myTarget.getClass().getMethod(method.getName(), delegateParameterTypes); delegateMethod.setAccessible(true); Object result = delegateMethod.invoke(myTarget, delegateArgs); Mirror resultMirror = new Mirror(delegateMethod.getReturnType(), result, myTargetClassLoader, mySourceClassLoader); return resultMirror.getMirrorValue(); } catch (IllegalAccessException | ClassNotFoundException | NoSuchMethodException | InvocationTargetException e) { LOG.error(e); return null; } finally { Thread.currentThread().setContextClassLoader(initialClassLoader); } } } private static class Mirror { private final Class<?> myMirrorType; private final Object myMirrorValue; Mirror(Class<?> type, Object value, ClassLoader classLoader, ClassLoader mirrorClassLoader) throws ClassNotFoundException { if (type.isArray()) { Class<?> componentType = type.getComponentType(); Mirror componentMirror = new Mirror(componentType, null, classLoader, mirrorClassLoader); int length = value == null ? 0 : Array.getLength(value); Object mirrorValue = Array.newInstance(componentMirror.getMirrorType(), length); for (int i = 0; i < length; i++) { Mirror itemMirror = new Mirror(componentType, Array.get(value, i), classLoader, mirrorClassLoader); Array.set(mirrorValue, i, itemMirror.getMirrorValue()); } myMirrorType = mirrorValue.getClass(); myMirrorValue = value == null ? null : mirrorValue; } else if (type.isInterface()) { myMirrorType = mirrorClassLoader.loadClass(type.getName()); myMirrorValue = value == null ? null : Proxy.newProxyInstance(mirrorClassLoader, new Class[]{myMirrorType}, new ReflectiveInvocationHandler(value, classLoader, mirrorClassLoader)); } else { myMirrorType = type; myMirrorValue = value; } } public Class<?> getMirrorType() { return myMirrorType; } public Object getMirrorValue() { return myMirrorValue; } } }
IDEA-215876 - Docker: separate from old remote-servers API to complete switch to new services view - mirror Enum values to allow them to pass agent reflective barrier GitOrigin-RevId: 36205667c3dbdf6e1d47f50e5bfd2d2c8696318b
platform/remote-servers/impl/src/com/intellij/remoteServer/agent/impl/RemoteAgentReflectiveProxyFactory.java
IDEA-215876 - Docker: separate from old remote-servers API to complete switch to new services view
Java
apache-2.0
8f9ab0fa3d3a923108485231ca6cc3934f8aa0c4
0
doubledutch/ProjectPika
package me.doubledutch.pikadb; import java.io.*; import java.util.*; import org.json.*; import me.doubledutch.pikadb.query.*; public class Table{ private String name; private PageFile pageFile; private int rootPageId; private Column metaData; private Map<String,Column> columnMap; public Table(String name,PageFile pageFile,int rootPageId,boolean preserve_order) throws IOException{ this.name=name; this.pageFile=pageFile; this.rootPageId=rootPageId; metaData=new Column(name+".metadata",pageFile,rootPageId,false); loadColumns(); } protected void enforceConstraints(int... constraints) throws IOException{ // TODO: Implement } private void loadColumns() throws IOException{ ObjectSet set=new ObjectSet(true); Map<String,Column> tmp=new HashMap<String,Column>(); ColumnResult result=metaData.scan(set); List<Variant> list=result.getVariantList(); int index=0; while(list.size()>index){ Variant.String name=(Variant.String)list.get(index++); Variant.Integer pageId=(Variant.Integer)list.get(index++); Column col=new Column(name.getValue(),pageFile,pageId.getValue(),true); tmp.put(name.getValue(),col); } columnMap=tmp; } public void declareColumn(String name) throws IOException{ } public void declareColumn(String name,int... constraints) throws IOException{ } public String[] getColumns(){ return columnMap.keySet().toArray(new String[0]); } private Column getColumn(String name) throws IOException{ if(!columnMap.containsKey(name)){ return createColumn(name); } return columnMap.get(name); } private Column createColumn(String name) throws IOException{ Page page=pageFile.createPage(); metaData.append(new Variant.String(-1,name)); metaData.append(new Variant.Integer(-1,page.getId())); pageFile.saveChanges(false); Column col=new Column(name,pageFile,page.getId(),true); columnMap.put(name,col); return col; } public void delete(int oid) throws IOException{ for(Column col:columnMap.values()){ col.delete(oid); } } public void update(int oid,JSONObject obj) throws IOException,JSONException{ Iterator<String> it=obj.keys(); while(it.hasNext()){ String key=it.next(); Column col=getColumn(key); Object value=obj.get(key); Variant variant=Variant.createVariant(oid,value); col.delete(oid); col.append(variant); } } public void add(int oid,JSONObject obj) throws IOException,JSONException{ Iterator<String> it=obj.keys(); while(it.hasNext()){ String key=it.next(); Column col=getColumn(key); Object value=obj.get(key); Variant variant=Variant.createVariant(oid,value); col.append(variant); } } public Query select(String... columns){ Query q=new Query(this,columns); return q; } public ResultSet scan() throws IOException,JSONException{ return scan(columnMap.keySet().toArray(new String[0])); } public ResultSet scan(String[] columns) throws IOException,JSONException{ ObjectSet set=new ObjectSet(true); return scan(set,columns); } public ResultSet scan(int oid) throws IOException,JSONException{ return scan(oid,columnMap.keySet().toArray(new String[0])); } public ResultSet scan(int oid,String[] columns) throws IOException,JSONException{ ObjectSet set=new ObjectSet(false); set.addOID(oid); return scan(set,columns); /*JSONObject obj=set.getObject(oid); Iterator<String> it=obj.keys(); if(!it.hasNext()){ return null; }*/ } public ResultSet scan(ObjectSet set) throws IOException,JSONException{ return scan(set,columnMap.keySet().toArray(new String[0])); } public ResultSet scan(ObjectSet set,String[] columns) throws IOException,JSONException{ ResultSet result=new ResultSet(); result.startTimer(); for(String columnName:columns){ Column col=columnMap.get(columnName); ColumnResult colResult=col.scan(set); List<Variant> list=colResult.getVariantList(); for(Variant v:list){ set.addVariant(columnName,v); } result.addExecutionPlan(colResult.getExecutionPlan()); } result.setObjectList(set.getObjectList()); result.endTimer(); return result; } }
src/main/java/me/doubledutch/pikadb/Table.java
package me.doubledutch.pikadb; import java.io.*; import java.util.*; import org.json.*; import me.doubledutch.pikadb.query.*; public class Table{ private String name; private PageFile pageFile; private int rootPageId; private Column metaData; private Map<String,Column> columnMap; public Table(String name,PageFile pageFile,int rootPageId,boolean preserve_order) throws IOException{ this.name=name; this.pageFile=pageFile; this.rootPageId=rootPageId; metaData=new Column(name+".metadata",pageFile,rootPageId,false); loadColumns(); } protected void enforceConstraints(int... constraints) throws IOException{ // TODO: Implement } private void loadColumns() throws IOException{ ObjectSet set=new ObjectSet(true); Map<String,Column> tmp=new HashMap<String,Column>(); ColumnResult result=metaData.scan(set); List<Variant> list=result.getVariantList(); int index=0; while(list.size()>index){ Variant.String name=(Variant.String)list.get(index++); Variant.Integer pageId=(Variant.Integer)list.get(index++); Column col=new Column(name.getValue(),pageFile,pageId.getValue(),true); tmp.put(name.getValue(),col); } columnMap=tmp; } public void declareColumn(String name) throws IOException{ } public void declareColumn(String name,int... constraints) throws IOException{ } private Column getColumn(String name) throws IOException{ if(!columnMap.containsKey(name)){ return createColumn(name); } return columnMap.get(name); } private Column createColumn(String name) throws IOException{ Page page=pageFile.createPage(); metaData.append(new Variant.String(-1,name)); metaData.append(new Variant.Integer(-1,page.getId())); pageFile.saveChanges(false); Column col=new Column(name,pageFile,page.getId(),true); columnMap.put(name,col); return col; } public void delete(int oid) throws IOException{ for(Column col:columnMap.values()){ col.delete(oid); } } public void update(int oid,JSONObject obj) throws IOException,JSONException{ Iterator<String> it=obj.keys(); while(it.hasNext()){ String key=it.next(); Column col=getColumn(key); Object value=obj.get(key); Variant variant=Variant.createVariant(oid,value); col.delete(oid); col.append(variant); } } public void add(int oid,JSONObject obj) throws IOException,JSONException{ Iterator<String> it=obj.keys(); while(it.hasNext()){ String key=it.next(); Column col=getColumn(key); Object value=obj.get(key); Variant variant=Variant.createVariant(oid,value); col.append(variant); } } public Query select(String... columns){ Query q=new Query(this,columns); return q; } public ResultSet scan() throws IOException,JSONException{ return scan(columnMap.keySet().toArray(new String[0])); } public ResultSet scan(String[] columns) throws IOException,JSONException{ ObjectSet set=new ObjectSet(true); return scan(set,columns); } public ResultSet scan(int oid) throws IOException,JSONException{ return scan(oid,columnMap.keySet().toArray(new String[0])); } public ResultSet scan(int oid,String[] columns) throws IOException,JSONException{ ObjectSet set=new ObjectSet(false); set.addOID(oid); return scan(set,columns); /*JSONObject obj=set.getObject(oid); Iterator<String> it=obj.keys(); if(!it.hasNext()){ return null; }*/ } public ResultSet scan(ObjectSet set) throws IOException,JSONException{ return scan(set,columnMap.keySet().toArray(new String[0])); } public ResultSet scan(ObjectSet set,String[] columns) throws IOException,JSONException{ ResultSet result=new ResultSet(); result.startTimer(); for(String columnName:columns){ Column col=columnMap.get(columnName); ColumnResult colResult=col.scan(set); List<Variant> list=colResult.getVariantList(); for(Variant v:list){ set.addVariant(columnName,v); } result.addExecutionPlan(colResult.getExecutionPlan()); } result.setObjectList(set.getObjectList()); result.endTimer(); return result; } }
added getColumns
src/main/java/me/doubledutch/pikadb/Table.java
added getColumns
Java
apache-2.0
0dcfb8dbe7bd0190cf08663733003f9194b06f20
0
wyona/yanel,wyona/yanel,wyona/yanel,baszero/yanel,wyona/yanel,baszero/yanel,baszero/yanel,baszero/yanel,baszero/yanel,wyona/yanel,wyona/yanel,baszero/yanel
/* * Copyright 2006 Wyona * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.wyona.org/licenses/APACHE-LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wyona.yanel.impl.resources; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.OutputStream; import java.util.Calendar; import java.util.Enumeration; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.sax.SAXResult; import javax.xml.transform.sax.SAXTransformerFactory; import javax.xml.transform.sax.TransformerHandler; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import javax.xml.transform.dom.DOMSource; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.w3c.dom.Document; import org.apache.commons.io.FileUtils; import org.apache.log4j.Category; import org.apache.tools.ant.Project; import org.apache.tools.ant.DirectoryScanner; import org.apache.tools.ant.types.PatternSet; import org.apache.tools.ant.types.FileSet; import org.apache.tools.ant.types.ZipFileSet; import org.apache.tools.ant.taskdefs.optional.junit.JUnitTask; import org.apache.tools.ant.taskdefs.optional.junit.BatchTest; import org.apache.xml.resolver.tools.CatalogResolver; import org.apache.xml.serializer.Serializer; import org.wyona.yanel.core.Resource; import org.wyona.yanel.core.api.attributes.ViewableV2; import org.wyona.yanel.core.attributes.viewable.View; import org.wyona.yanel.core.attributes.viewable.ViewDescriptor; import org.wyona.yanel.core.serialization.SerializerFactory; import org.wyona.yanel.core.source.ResourceResolver; import org.wyona.yanel.core.transformation.I18nTransformer2; import org.wyona.yanel.core.transformation.XIncludeTransformer; import org.wyona.yanel.core.util.PathUtil; import org.xml.sax.InputSource; import org.xml.sax.XMLReader; import org.xml.sax.helpers.XMLReaderFactory; /** * */ public class TestingControlResource extends Resource implements ViewableV2 { private static final String JUNIT_JAR = "yanel-JunitTests.jar"; private static final String HTMLUNIT_JAR = "yanel-HtmlUnitTests.jar"; private static Category log = Category.getInstance(TestingControlResource.class); private boolean ajaxBrowser = false; private File JunitJarLocation; private File HtmlunitJarLocation; private File tmpResultDir; public TestingControlResource() { } /** * */ public boolean exists() { return true; } /** * */ public long getSize() { return -1; } /** * */ public String getMimeType(String viewId) { if (viewId != null && viewId.equals("source")) return "application/xml"; return "application/xhtml+xml"; } /** * */ public View getView(String viewId) { if (request.getHeader("User-Agent").indexOf("rv:1.7") < 0) { ajaxBrowser = true; } try { setLocations(); } catch (Exception e) { // sb.append("<p>Could not get the Locations: " + e + "</p>"); log.error(e.getMessage(), e); } View view = new View(); String mimeType = getMimeType(viewId); view.setMimeType(mimeType); try { org.wyona.yarep.core.Repository repo = getRealm().getRepository(); if (viewId != null && viewId.equals("source")) { view.setInputStream(new java.io.StringBufferInputStream(getScreen())); view.setMimeType("application/xml"); return view; } String[] xsltPath = getXSLTPath(getPath()); if (xsltPath != null) { // create reader: XMLReader xmlReader = XMLReaderFactory.createXMLReader(); CatalogResolver catalogResolver = new CatalogResolver(); xmlReader.setEntityResolver(catalogResolver); // create xslt transformer: SAXTransformerFactory tf = (SAXTransformerFactory) TransformerFactory.newInstance(); TransformerHandler[] xsltHandlers = new TransformerHandler[xsltPath.length]; for (int i = 0; i < xsltPath.length; i++) { xsltHandlers[i] = tf.newTransformerHandler(new StreamSource(repo.getNode(xsltPath[i]) .getInputStream())); xsltHandlers[i].getTransformer().setParameter("yanel.path.name", org.wyona.commons.io.PathUtil.getName(getPath())); xsltHandlers[i].getTransformer().setParameter("yanel.path", getPath()); xsltHandlers[i].getTransformer().setParameter("yanel.back2context", PathUtil.backToContext(realm, getPath())); xsltHandlers[i].getTransformer().setParameter("yarep.back2realm", PathUtil.backToRealm(getPath())); xsltHandlers[i].getTransformer().setParameter("language", getRequestedLanguage()); } // create i18n transformer: I18nTransformer2 i18nTransformer = new I18nTransformer2("global", getRequestedLanguage(), getRealm().getDefaultLanguage()); i18nTransformer.setEntityResolver(catalogResolver); // create xinclude transformer: XIncludeTransformer xIncludeTransformer = new XIncludeTransformer(); ResourceResolver resolver = new ResourceResolver(this); xIncludeTransformer.setResolver(resolver); // create serializer: Serializer serializer = SerializerFactory.getSerializer(SerializerFactory.XHTML_STRICT); ByteArrayOutputStream baos = new ByteArrayOutputStream(); // chain everything together (create a pipeline): xmlReader.setContentHandler(xsltHandlers[0]); for (int i = 0; i < xsltHandlers.length - 1; i++) { xsltHandlers[i].setResult(new SAXResult(xsltHandlers[i + 1])); } xsltHandlers[xsltHandlers.length - 1].setResult(new SAXResult(xIncludeTransformer)); xIncludeTransformer.setResult(new SAXResult(i18nTransformer)); i18nTransformer.setResult(new SAXResult(serializer.asContentHandler())); serializer.setOutputStream(baos); // execute pipeline: xmlReader.parse(new InputSource(new java.io.StringBufferInputStream(getScreen()))); // write result into view: view.setInputStream(new ByteArrayInputStream(baos.toByteArray())); return view; } log.debug("Mime-Type: " + mimeType); view.setInputStream(new java.io.StringBufferInputStream(getScreen())); return view; } catch (Exception e) { log.error(e + " (" + getPath() + ", " + getRealm() + ")", e); } view.setInputStream(new java.io.StringBufferInputStream(getScreen())); return view; } /** * */ public ViewDescriptor[] getViewDescriptors() { ViewDescriptor[] vd = new ViewDescriptor[2]; vd[0] = new ViewDescriptor("default"); vd[0].setMimeType(getMimeType(null)); vd[1] = new ViewDescriptor("source"); vd[1].setMimeType(getMimeType("source")); return vd; } /** * Flow */ private String getScreen() { StringBuffer sbContent = new StringBuffer(); Enumeration parameters = request.getParameterNames(); if (request.getSession().getAttribute("tmpResultDir") != null) { if (request.getParameterValues("ajaxshowprogress") != null) return showProgress().toString(); sbContent.append(showProgress()); } else if (!parameters.hasMoreElements()) { sbContent.append(getPlainRequest()); } else { if (request.getParameterValues("testnames") != null) { if (request.getParameterValues("ajaxexecutetest") != null) return executeTests().toString(); sbContent.append(executeTests()); } else { log.info("Fallback ..."); sbContent.append(getPlainRequest()); } } StringBuffer sb = new StringBuffer("<?xml version=\"1.0\"?>"); sb.append("<html xmlns=\"http://www.w3.org/1999/xhtml\">"); sb.append("<head><title>Testing Control</title>"); if (request.getSession().getAttribute("tmpResultDir") != null && !ajaxBrowser) { sb.append("<meta http-equiv=\"refresh\" content=\"5; URL=\"/>"); } sb.append("<link rel=\"stylesheet\" type=\"text/css\" href=\"" + PathUtil.getGlobalHtdocsPath(this) + "yanel-css/progressBar.css\"/>"); sb.append("<script src=\"" + PathUtil.getGlobalHtdocsPath(this) + "yanel-js/prototype.js\" type=\"text/javascript\"></script>"); sb.append("<script src=\"" + PathUtil.getGlobalHtdocsPath(this) + "yanel-js/progressBar.js\" type=\"text/javascript\"></script>"); sb.append("<script src=\"" + PathUtil.getResourcesHtdocsPath(this) + "js/ajaxexecutetests.js\" type=\"text/javascript\"></script>"); sb.append("<link rel=\"stylesheet\" type=\"text/css\" href=\"" + PathUtil.getResourcesHtdocsPath(this) + "css/testingcontroler.css\"/>"); sb.append("</head>"); sb.append("<body>"); sb.append("<span id=\"yanelprogressbarph\"/>"); sb.append("<div id=\"ajaxreplace\">"); sb.append(sbContent); sb.append("</div>"); sb.append("</body>"); sb.append("</html>"); return sb.toString(); } private StringBuffer getPlainRequest() { StringBuffer sb = new StringBuffer(); sb.append("<form method=\"post\">"); sb.append("<h3>HtmlUnit Tests</h3>"); sb.append("<ul id=\"htmlunit\">"); String[] allHtmlUnitTestNames = getAllTestNames("htmlunit"); for (int i = 0; i < allHtmlUnitTestNames.length; i++) { String title = allHtmlUnitTestNames[i].substring(allHtmlUnitTestNames[i].lastIndexOf("/") + 1) .replaceAll(".class", ""); sb.append("<li title=\"" + "\">"); sb.append(title); sb.append("<input type=\"checkbox\" name=\"testnames\" value=\"" + allHtmlUnitTestNames[i] + "\"/>"); sb.append("</li>"); } sb.append("</ul>"); sb.append("<hr/>"); sb.append("<h3>JUnit Tests</h3>"); sb.append("<ul id=\"junit\">"); String[] allJUnitTestNames = getAllTestNames("junit"); for (int i = 0; i < allJUnitTestNames.length; i++) { String title = allJUnitTestNames[i].substring(allJUnitTestNames[i].lastIndexOf("/") + 1) .replaceAll(".class", ""); sb.append("<li title=\"" + title + "\">"); sb.append(title); sb.append("<input type=\"checkbox\" name=\"testnames\" value=\"" + allJUnitTestNames[i] + "\"/>"); sb.append("</li>"); } sb.append("</ul>"); if (ajaxBrowser) { sb.append("<input type=\"hidden\" name=\"yanel.resource.viewid\" value=\"source\"/>"); sb.append("<input type=\"hidden\" name=\"ajaxexecutetest\" value=\"true\"/>"); sb.append("<input type=\"button\" name=\"submit\" value=\"Test\" onclick=\"ajaxexecutetests();\" />"); } else { sb.append("<input type=\"submit\" name=\"submit\" value=\"Test\"/>"); } sb.append("</form>"); return sb; } private StringBuffer executeTests() { StringBuffer sb = new StringBuffer(); String[] testnames = request.getParameterValues("testnames"); // prepare tmpResultDir if (request.getSession().getAttribute("tmpResultDir") == null) { String uuid = new java.rmi.server.UID().toString().replaceAll(":", ""); tmpResultDir = new File(request.getSession().getServletContext().getRealPath("tmp" + File.separator + "test-results-" + uuid)); request.getSession().setAttribute("tmpResultDir", tmpResultDir); } else { tmpResultDir = (File) request.getSession().getAttribute("tmpResultDir"); } request.getSession().setAttribute("exectime", getTime()); request.getSession().setAttribute("numberOfTests", "" + testnames.length); // delete the resultdir before making new tests tmpResultDir.mkdir(); Runnable runtest = new ExecuteTests(testnames, JunitJarLocation, HtmlunitJarLocation, tmpResultDir); new Thread(runtest).start(); sb.append(showProgress()); return sb; } private StringBuffer showProgress() { StringBuffer sb = new StringBuffer(); // get tmpResultDir from session tmpResultDir = (File) request.getSession().getAttribute("tmpResultDir"); // number of executed tests int numberOfTests = Integer.parseInt((String) request.getSession() .getAttribute("numberOfTests")); String resultName = request.getSession().getAttribute("exectime") + "-tests.xml"; ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); if (tmpResultDir.list().length < numberOfTests) { // geting the test results // aggregate all tests in the tmp dir try { DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document result = builder.newDocument(); Project project = new Project(); ResultAggregator junitreport = new ResultAggregator(); junitreport.setTaskName("JUnitReport"); junitreport.setProject(project); FileSet fs_report = new FileSet(); fs_report.setDir(tmpResultDir); fs_report.setProject(project); PatternSet.NameEntry ne = fs_report.createInclude(); ne.setName("**/TEST-*.xml"); junitreport.addFileSet(fs_report); junitreport.init(); // get the result to show for this request result = junitreport.getDocument(); File result2htmlXsltFile = org.wyona.commons.io.FileUtil.file(rtd.getConfigFile() .getParentFile() .getAbsolutePath(), "xslt" + File.separator + "result2html.xsl"); Transformer transResult2html = TransformerFactory.newInstance() .newTransformer(new StreamSource(result2htmlXsltFile)); transResult2html.setParameter("testing.result.title", "stillTesting"); transResult2html.setParameter("testing.number.requested.tests", "" + numberOfTests); transResult2html.transform(new DOMSource(result), new StreamResult(byteArrayOutputStream)); } catch (Exception e) { sb.append("<p>Could not create folder. Exception: " + e + "</p>"); log.error(e.getMessage(), e); } } else { request.getSession().removeAttribute("tmpResultDir"); request.getSession().removeAttribute("exectime"); request.getSession().removeAttribute("numberOfTests"); try { // geting the test results DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document result = builder.newDocument(); Project aggregatorproject = new Project(); // aggregate all tests in the tmp dir ResultAggregator junitreport = new ResultAggregator(); junitreport.setTaskName("JUnitReport"); junitreport.setProject(aggregatorproject); FileSet fs_report = new FileSet(); fs_report.setDir(tmpResultDir); fs_report.setProject(aggregatorproject); PatternSet.NameEntry ne = fs_report.createInclude(); ne.setName("**/TEST-*.xml"); junitreport.addFileSet(fs_report); junitreport.init(); // get the result to show for this request result = junitreport.getDocument(); // write test result to repo org.wyona.yarep.core.Repository Repo = this.getRealm().getRepository(); org.wyona.commons.io.Path newPath = new org.wyona.commons.io.Path("/test-results-archive/" + resultName); log.error("DEBUG: " + newPath); org.wyona.yanel.core.util.YarepUtil.addNodes(Repo, newPath.toString(), org.wyona.yarep.core.NodeType.RESOURCE); OutputStream out = Repo.getNode(newPath.toString()).getOutputStream(); javax.xml.transform.TransformerFactory.newInstance() .newTransformer() .transform(new javax.xml.transform.dom.DOMSource(result), new javax.xml.transform.stream.StreamResult(out)); out.close(); // delete the test dir FileUtils.deleteDirectory(tmpResultDir); File result2htmlXsltFile = org.wyona.commons.io.FileUtil.file(rtd.getConfigFile() .getParentFile() .getAbsolutePath(), "xslt" + File.separator + "result2html.xsl"); Transformer transResult2html = TransformerFactory.newInstance() .newTransformer(new StreamSource(result2htmlXsltFile)); transResult2html.setParameter("testing.result.title", "testDone"); transResult2html.transform(new DOMSource(result), new StreamResult(byteArrayOutputStream)); } catch (Exception e) { sb.append("<p>Could not create folder. Exception: " + e + "</p>"); log.error(e.getMessage(), e); } } sb.append(byteArrayOutputStream); return sb; } private void setLocations() throws Exception { String WEBINFPath = request.getSession().getServletContext().getRealPath("WEB-INF"); HtmlunitJarLocation = new File(WEBINFPath + File.separator + "lib" + File.separator + HTMLUNIT_JAR); JunitJarLocation = new File(WEBINFPath + File.separator + "lib" + File.separator + JUNIT_JAR); if (!HtmlunitJarLocation.exists()) { throw new Exception("HtmlUnit-Tests not found"); } if (!JunitJarLocation.exists()) { throw new Exception("JUnit-Tests not found"); } // create tmp-directory to write the tests if (!new File(request.getSession().getServletContext().getRealPath("tmp")).exists()) { if (!new File(request.getSession().getServletContext().getRealPath("tmp")).mkdir()) { throw new Exception("Creation of tmp directory faild."); } } } /** * get Tests. * @param htmlOrJunit type of tests should be selected. can be htmlunit or junit. * @return an array with the aviable tests. */ private String[] getAllTestNames(String htmlOrJunit) { Project project = new Project(); try { JUnitTask junit = new JUnitTask(); ZipFileSet zipfileset = new ZipFileSet(); zipfileset.setProject(project); if (htmlOrJunit.equals("htmlunit")) { zipfileset.setSrc(HtmlunitJarLocation); } else { zipfileset.setSrc(JunitJarLocation); } zipfileset.setIncludes("**/*Test.class"); zipfileset.setExcludes("**/Abstract*.class"); BatchTest batchTest = junit.createBatchTest(); batchTest.addFileSet(zipfileset); DirectoryScanner directoryscanner = zipfileset.getDirectoryScanner(project); return directoryscanner.getIncludedFiles(); } catch (Exception e) { log.error(e); } return null; } /** * get time as string * @return timestamp (yyyy-MM-dd-HH-mm-ss) */ private String getTime() { Calendar cal = Calendar.getInstance(java.util.TimeZone.getDefault()); String dateFormat = "yyyy-MM-dd-HH-mm-ss"; java.text.SimpleDateFormat sdf = new java.text.SimpleDateFormat(dateFormat); sdf.setTimeZone(java.util.TimeZone.getDefault()); return sdf.format(cal.getTime()); } /** * Get XSLT path */ private String[] getXSLTPath(String path) throws Exception { String[] xsltPath = getResourceConfigProperties("xslt"); if (xsltPath != null) return xsltPath; log.info("No XSLT Path within: " + path); return null; } }
src/resources/testing-control/src/java/org/wyona/yanel/impl/resources/TestingControlResource.java
/* * Copyright 2006 Wyona * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.wyona.org/licenses/APACHE-LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wyona.yanel.impl.resources; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.InputStream; import java.io.OutputStream; import java.util.Calendar; import java.util.Enumeration; import javax.servlet.http.HttpServletRequest; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.TransformerFactoryConfigurationError; import javax.xml.transform.sax.SAXResult; import javax.xml.transform.sax.SAXTransformerFactory; import javax.xml.transform.sax.TransformerHandler; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import javax.xml.transform.dom.DOMSource; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.w3c.dom.Document; import org.apache.commons.io.FileUtils; import org.apache.log4j.Category; import org.apache.tools.ant.Project; import org.apache.tools.ant.DirectoryScanner; import org.apache.tools.ant.types.PatternSet; import org.apache.tools.ant.types.FileSet; import org.apache.tools.ant.types.ZipFileSet; import org.apache.tools.ant.taskdefs.optional.junit.JUnitTask; import org.apache.tools.ant.taskdefs.optional.junit.BatchTest; import org.apache.xml.resolver.tools.CatalogResolver; import org.apache.xml.serializer.Serializer; import org.wyona.yanel.core.Path; import org.wyona.yanel.core.Resource; import org.wyona.yanel.core.ResourceConfiguration; import org.wyona.yanel.core.api.attributes.ViewableV2; import org.wyona.yanel.core.attributes.viewable.View; import org.wyona.yanel.core.attributes.viewable.ViewDescriptor; import org.wyona.yarep.core.NoSuchNodeException; import org.wyona.yarep.core.Repository; import org.wyona.yarep.core.RepositoryFactory; import org.wyona.yanel.core.serialization.SerializerFactory; import org.wyona.yanel.core.source.ResourceResolver; import org.wyona.yanel.core.transformation.I18nTransformer2; import org.wyona.yanel.core.transformation.XIncludeTransformer; import org.wyona.yanel.core.util.PathUtil; import org.wyona.yarep.util.RepoPath; import org.wyona.yarep.util.YarepUtil; import org.xml.sax.InputSource; import org.xml.sax.XMLReader; import org.xml.sax.helpers.XMLReaderFactory; /** * */ public class TestingControlResource extends Resource implements ViewableV2 { private static final String JUNIT_JAR = "yanel-JunitTests.jar"; private static final String HTMLUNIT_JAR = "yanel-HtmlUnitTests.jar"; private static Category log = Category.getInstance(TestingControlResource.class); private boolean ajaxBrowser = false; private File JunitJarLocation; private File HtmlunitJarLocation; private File tmpResultDir; public TestingControlResource() { } /** * */ public boolean exists() { return true; } /** * */ public long getSize() { return -1; } /** * */ public String getMimeType(String viewId) { if (viewId != null && viewId.equals("source")) return "application/xml"; return "application/xhtml+xml"; } /** * */ public View getView(String viewId) { if (request.getHeader("User-Agent").indexOf("rv:1.7") < 0) { ajaxBrowser = true; } try { setLocations(); } catch (Exception e) { // sb.append("<p>Could not get the Locations: " + e + "</p>"); log.error(e.getMessage(), e); } View view = new View(); String mimeType = getMimeType(viewId); view.setMimeType(mimeType); try { org.wyona.yarep.core.Repository repo = getRealm().getRepository(); if (viewId != null && viewId.equals("source")) { view.setInputStream(new java.io.StringBufferInputStream(getScreen())); view.setMimeType("application/xml"); return view; } String[] xsltPath = getXSLTPath(getPath()); if (xsltPath != null) { // create reader: XMLReader xmlReader = XMLReaderFactory.createXMLReader(); CatalogResolver catalogResolver = new CatalogResolver(); xmlReader.setEntityResolver(catalogResolver); // create xslt transformer: SAXTransformerFactory tf = (SAXTransformerFactory) TransformerFactory.newInstance(); TransformerHandler[] xsltHandlers = new TransformerHandler[xsltPath.length]; for (int i = 0; i < xsltPath.length; i++) { xsltHandlers[i] = tf.newTransformerHandler(new StreamSource(repo.getNode(xsltPath[i]) .getInputStream())); xsltHandlers[i].getTransformer().setParameter("yanel.path.name", PathUtil.getName(getPath())); xsltHandlers[i].getTransformer().setParameter("yanel.path", getPath()); xsltHandlers[i].getTransformer().setParameter("yanel.back2context", PathUtil.backToContext(realm, getPath())); xsltHandlers[i].getTransformer().setParameter("yarep.back2realm", PathUtil.backToRealm(getPath())); xsltHandlers[i].getTransformer().setParameter("language", getRequestedLanguage()); } // create i18n transformer: I18nTransformer2 i18nTransformer = new I18nTransformer2("global", getRequestedLanguage(), getRealm().getDefaultLanguage()); i18nTransformer.setEntityResolver(catalogResolver); // create xinclude transformer: XIncludeTransformer xIncludeTransformer = new XIncludeTransformer(); ResourceResolver resolver = new ResourceResolver(this); xIncludeTransformer.setResolver(resolver); // create serializer: Serializer serializer = SerializerFactory.getSerializer(SerializerFactory.XHTML_STRICT); ByteArrayOutputStream baos = new ByteArrayOutputStream(); // chain everything together (create a pipeline): xmlReader.setContentHandler(xsltHandlers[0]); for (int i = 0; i < xsltHandlers.length - 1; i++) { xsltHandlers[i].setResult(new SAXResult(xsltHandlers[i + 1])); } xsltHandlers[xsltHandlers.length - 1].setResult(new SAXResult(xIncludeTransformer)); xIncludeTransformer.setResult(new SAXResult(i18nTransformer)); i18nTransformer.setResult(new SAXResult(serializer.asContentHandler())); serializer.setOutputStream(baos); // execute pipeline: xmlReader.parse(new InputSource(new java.io.StringBufferInputStream(getScreen()))); // write result into view: view.setInputStream(new ByteArrayInputStream(baos.toByteArray())); return view; } else { log.debug("Mime-Type: " + mimeType); view.setInputStream(new java.io.StringBufferInputStream(getScreen())); return view; } } catch (Exception e) { log.error(e + " (" + getPath() + ", " + getRealm() + ")", e); } view.setInputStream(new java.io.StringBufferInputStream(getScreen())); return view; } /** * */ public ViewDescriptor[] getViewDescriptors() { ViewDescriptor[] vd = new ViewDescriptor[2]; vd[0] = new ViewDescriptor("default"); vd[0].setMimeType(getMimeType(null)); vd[1] = new ViewDescriptor("source"); vd[1].setMimeType(getMimeType("source")); return vd; } /** * Flow */ private String getScreen() { StringBuffer sbContent = new StringBuffer(); Enumeration parameters = request.getParameterNames(); if (request.getSession().getAttribute("tmpResultDir") != null) { if (request.getParameterValues("ajaxshowprogress") != null) { return showProgress().toString(); } else { sbContent.append(showProgress()); } } else if (!parameters.hasMoreElements()) { sbContent.append(getPlainRequest()); } else { if (request.getParameterValues("testnames") != null) { if (request.getParameterValues("ajaxexecutetest") != null) { return executeTests().toString(); }else { sbContent.append(executeTests()); } } else { log.info("Fallback ..."); sbContent.append(getPlainRequest()); } } StringBuffer sb = new StringBuffer("<?xml version=\"1.0\"?>"); sb.append("<html xmlns=\"http://www.w3.org/1999/xhtml\">"); sb.append("<head><title>Testing Control</title>"); if (request.getSession().getAttribute("tmpResultDir") != null && !ajaxBrowser) { sb.append("<meta http-equiv=\"refresh\" content=\"5; URL=\"/>"); } sb.append("<link rel=\"stylesheet\" type=\"text/css\" href=\"" + PathUtil.getGlobalHtdocsPath(this) + "yanel-css/progressBar.css\"/>"); sb.append("<script src=\"" + PathUtil.getGlobalHtdocsPath(this) + "yanel-js/prototype.js\" type=\"text/javascript\"></script>"); sb.append("<script src=\"" + PathUtil.getGlobalHtdocsPath(this) + "yanel-js/progressBar.js\" type=\"text/javascript\"></script>"); sb.append("<script src=\"" + PathUtil.getResourcesHtdocsPath(this) + "js/ajaxexecutetests.js\" type=\"text/javascript\"></script>"); sb.append("<link rel=\"stylesheet\" type=\"text/css\" href=\"" + PathUtil.getResourcesHtdocsPath(this) + "css/testingcontroler.css\"/>"); sb.append("</head>"); sb.append("<body>"); sb.append("<span id=\"yanelprogressbarph\"/>"); sb.append("<div id=\"ajaxreplace\">"); sb.append(sbContent); sb.append("</div>"); sb.append("</body>"); sb.append("</html>"); return sb.toString(); } private StringBuffer getPlainRequest() { StringBuffer sb = new StringBuffer(); sb.append("<form method=\"post\">"); sb.append("<h3>HtmlUnit Tests</h3>"); sb.append("<ul id=\"htmlunit\">"); String[] allHtmlUnitTestNames = getAllTestNames("htmlunit"); for (int i = 0; i < allHtmlUnitTestNames.length; i++) { String title = allHtmlUnitTestNames[i].substring(allHtmlUnitTestNames[i].lastIndexOf("/") + 1) .replaceAll(".class", ""); sb.append("<li title=\"" + "\">"); sb.append(title); sb.append("<input type=\"checkbox\" name=\"testnames\" value=\"" + allHtmlUnitTestNames[i] + "\"/>"); sb.append("</li>"); } sb.append("</ul>"); sb.append("<hr/>"); sb.append("<h3>JUnit Tests</h3>"); sb.append("<ul id=\"junit\">"); String[] allJUnitTestNames = getAllTestNames("junit"); for (int i = 0; i < allJUnitTestNames.length; i++) { String title = allJUnitTestNames[i].substring(allJUnitTestNames[i].lastIndexOf("/") + 1) .replaceAll(".class", ""); sb.append("<li title=\"" + title + "\">"); sb.append(title); sb.append("<input type=\"checkbox\" name=\"testnames\" value=\"" + allJUnitTestNames[i] + "\"/>"); sb.append("</li>"); } sb.append("</ul>"); if (ajaxBrowser) { sb.append("<input type=\"hidden\" name=\"yanel.resource.viewid\" value=\"source\"/>"); sb.append("<input type=\"hidden\" name=\"ajaxexecutetest\" value=\"true\"/>"); sb.append("<input type=\"button\" name=\"submit\" value=\"Test\" onclick=\"ajaxexecutetests();\" />"); } else { sb.append("<input type=\"submit\" name=\"submit\" value=\"Test\"/>"); } sb.append("</form>"); return sb; } private StringBuffer executeTests() { StringBuffer sb = new StringBuffer(); String[] testnames = request.getParameterValues("testnames"); // prepare tmpResultDir if (request.getSession().getAttribute("tmpResultDir") == null) { String uuid = new java.rmi.server.UID().toString().replaceAll(":", ""); tmpResultDir = new File(request.getSession().getServletContext().getRealPath("tmp" + File.separator + "test-results-" + uuid)); request.getSession().setAttribute("tmpResultDir", tmpResultDir); } else { tmpResultDir = (File) request.getSession().getAttribute("tmpResultDir"); } request.getSession().setAttribute("exectime", getTime()); request.getSession().setAttribute("numberOfTests", "" + testnames.length); // delete the resultdir before making new tests tmpResultDir.mkdir(); Runnable runtest = new ExecuteTests(testnames, JunitJarLocation, HtmlunitJarLocation, tmpResultDir); new Thread(runtest).start(); sb.append(showProgress()); return sb; } private StringBuffer showProgress() { StringBuffer sb = new StringBuffer(); // get tmpResultDir from session tmpResultDir = (File) request.getSession().getAttribute("tmpResultDir"); // number of executed tests int numberOfTests = Integer.parseInt((String) request.getSession() .getAttribute("numberOfTests")); String resultName = request.getSession().getAttribute("exectime") + "-tests.xml"; ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); if (tmpResultDir.list().length < numberOfTests) { // geting the test results // aggregate all tests in the tmp dir try { DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document result = builder.newDocument(); Project project = new Project(); ResultAggregator junitreport = new ResultAggregator(); junitreport.setTaskName("JUnitReport"); junitreport.setProject(project); FileSet fs_report = new FileSet(); fs_report.setDir(tmpResultDir); fs_report.setProject(project); PatternSet.NameEntry ne = fs_report.createInclude(); ne.setName("**/TEST-*.xml"); junitreport.addFileSet(fs_report); junitreport.init(); // get the result to show for this request result = junitreport.getDocument(); File result2htmlXsltFile = org.wyona.commons.io.FileUtil.file(rtd.getConfigFile() .getParentFile() .getAbsolutePath(), "xslt" + File.separator + "result2html.xsl"); Transformer transResult2html = TransformerFactory.newInstance() .newTransformer(new StreamSource(result2htmlXsltFile)); transResult2html.setParameter("testing.result.title", "stillTesting"); transResult2html.setParameter("testing.number.requested.tests", "" + numberOfTests); transResult2html.transform(new DOMSource(result), new StreamResult(byteArrayOutputStream)); } catch (Exception e) { sb.append("<p>Could not create folder. Exception: " + e + "</p>"); log.error(e.getMessage(), e); } } else { request.getSession().removeAttribute("tmpResultDir"); request.getSession().removeAttribute("exectime"); request.getSession().removeAttribute("numberOfTests"); try { // geting the test results DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document result = builder.newDocument(); Project aggregatorproject = new Project(); // aggregate all tests in the tmp dir ResultAggregator junitreport = new ResultAggregator(); junitreport.setTaskName("JUnitReport"); junitreport.setProject(aggregatorproject); FileSet fs_report = new FileSet(); fs_report.setDir(tmpResultDir); fs_report.setProject(aggregatorproject); PatternSet.NameEntry ne = fs_report.createInclude(); ne.setName("**/TEST-*.xml"); junitreport.addFileSet(fs_report); junitreport.init(); // get the result to show for this request result = junitreport.getDocument(); // write test result to repo org.wyona.yarep.core.Repository Repo = this.getRealm().getRepository(); org.wyona.commons.io.Path newPath = new org.wyona.commons.io.Path("/test-results-archive/" + resultName); log.error("DEBUG: " + newPath); org.wyona.yanel.core.util.YarepUtil.addNodes(Repo, newPath.toString(), org.wyona.yarep.core.NodeType.RESOURCE); OutputStream out = Repo.getNode(newPath.toString()).getOutputStream(); javax.xml.transform.TransformerFactory.newInstance() .newTransformer() .transform(new javax.xml.transform.dom.DOMSource(result), new javax.xml.transform.stream.StreamResult(out)); out.close(); // delete the test dir FileUtils.deleteDirectory(tmpResultDir); File result2htmlXsltFile = org.wyona.commons.io.FileUtil.file(rtd.getConfigFile() .getParentFile() .getAbsolutePath(), "xslt" + File.separator + "result2html.xsl"); Transformer transResult2html = TransformerFactory.newInstance() .newTransformer(new StreamSource(result2htmlXsltFile)); transResult2html.setParameter("testing.result.title", "testDone"); transResult2html.transform(new DOMSource(result), new StreamResult(byteArrayOutputStream)); } catch (Exception e) { sb.append("<p>Could not create folder. Exception: " + e + "</p>"); log.error(e.getMessage(), e); } } sb.append(byteArrayOutputStream); return sb; } private void setLocations() throws Exception { String WEBINFPath = request.getSession().getServletContext().getRealPath("WEB-INF"); HtmlunitJarLocation = new File(WEBINFPath + File.separator + "lib" + File.separator + HTMLUNIT_JAR); JunitJarLocation = new File(WEBINFPath + File.separator + "lib" + File.separator + JUNIT_JAR); if (!HtmlunitJarLocation.exists()) { throw new Exception("HtmlUnit-Tests not found"); } if (!JunitJarLocation.exists()) { throw new Exception("JUnit-Tests not found"); } // create tmp-directory to write the tests if (!new File(request.getSession().getServletContext().getRealPath("tmp")).exists()) { if (!new File(request.getSession().getServletContext().getRealPath("tmp")).mkdir()) { throw new Exception("Creation of tmp directory faild."); } } } /** * get Tests. * @param htmlOrJunit type of tests should be selected. can be htmlunit or junit. * @return an array with the aviable tests. */ private String[] getAllTestNames(String htmlOrJunit) { Project project = new Project(); try { JUnitTask junit = new JUnitTask(); ZipFileSet zipfileset = new ZipFileSet(); zipfileset.setProject(project); if (htmlOrJunit.equals("htmlunit")) { zipfileset.setSrc(HtmlunitJarLocation); } else { zipfileset.setSrc(JunitJarLocation); } zipfileset.setIncludes("**/*Test.class"); zipfileset.setExcludes("**/Abstract*.class"); BatchTest batchTest = junit.createBatchTest(); batchTest.addFileSet(zipfileset); DirectoryScanner directoryscanner = zipfileset.getDirectoryScanner(project); return directoryscanner.getIncludedFiles(); } catch (Exception e) { log.error(e); } return null; } /** * get time as string * @return timestamp (yyyy-MM-dd-HH-mm-ss) */ private String getTime() { Calendar cal = Calendar.getInstance(java.util.TimeZone.getDefault()); String dateFormat = "yyyy-MM-dd-HH-mm-ss"; java.text.SimpleDateFormat sdf = new java.text.SimpleDateFormat(dateFormat); sdf.setTimeZone(java.util.TimeZone.getDefault()); return sdf.format(cal.getTime()); } /** * Get XSLT path */ private String[] getXSLTPath(String path) throws Exception { String[] xsltPath = getResourceConfigProperties("xslt"); if (xsltPath != null) return xsltPath; log.info("No XSLT Path within: " + path); return null; } }
obsolete imports and else statements removed
src/resources/testing-control/src/java/org/wyona/yanel/impl/resources/TestingControlResource.java
obsolete imports and else statements removed
Java
apache-2.0
9ec365276d17d8f50dcbb47bd97c6ac2f3c56ebd
0
dbeaver/dbeaver,Sargul/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,Sargul/dbeaver,dbeaver/dbeaver,serge-rider/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,dbeaver/dbeaver,Sargul/dbeaver,serge-rider/dbeaver
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2020 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.registry; import org.jkiss.dbeaver.model.navigator.DBNBrowseSettings; import java.util.LinkedHashMap; import java.util.Map; /** * Browse settings */ public class DataSourceNavigatorSettings implements DBNBrowseSettings { public static final Map<String, Preset> PRESETS = new LinkedHashMap<>(); public static final Preset PRESET_SIMPLE = new Preset("simple", "Simple", "Shows only tables"); public static final Preset PRESET_FULL = new Preset("advanced", "Advanced", "Shows all database objects"); public static final Preset PRESET_CUSTOM = new Preset("custom", "Custom", "User configuration"); public static class Preset { private final String id; private final String name; private final String description; private final DataSourceNavigatorSettings settings = new DataSourceNavigatorSettings(); public Preset(String id, String name, String description) { this.id = id; this.name = name; this.description = description; } public String getId() { return id; } public String getName() { return name; } public String getDescription() { return description; } public DataSourceNavigatorSettings getSettings() { return settings; } } static { PRESET_SIMPLE.settings.setShowOnlyEntities(true); PRESET_SIMPLE.settings.setHideFolders(true); PRESET_SIMPLE.settings.setHideVirtualModel(true); PRESET_FULL.settings.setShowSystemObjects(true); PRESETS.put(PRESET_SIMPLE.name, PRESET_SIMPLE); PRESETS.put(PRESET_FULL.name, PRESET_FULL); PRESETS.put(PRESET_CUSTOM.name, PRESET_CUSTOM); } private boolean showSystemObjects; private boolean showUtilityObjects; private boolean showOnlyEntities; private boolean mergeEntities; private boolean hideFolders; private boolean hideSchemas; private boolean hideVirtualModel; public DataSourceNavigatorSettings() { } public DataSourceNavigatorSettings(DBNBrowseSettings copyFrom) { this.showSystemObjects = copyFrom.isShowSystemObjects(); this.showUtilityObjects = copyFrom.isShowUtilityObjects(); this.showOnlyEntities = copyFrom.isShowOnlyEntities(); this.mergeEntities = copyFrom.isMergeEntities(); this.hideFolders = copyFrom.isHideFolders(); this.hideSchemas = copyFrom.isHideSchemas(); this.hideVirtualModel = copyFrom.isHideVirtualModel(); } @Override public boolean isShowSystemObjects() { return showSystemObjects; } public void setShowSystemObjects(boolean showSystemObjects) { this.showSystemObjects = showSystemObjects; } @Override public boolean isShowUtilityObjects() { return showUtilityObjects; } public void setShowUtilityObjects(boolean showUtilityObjects) { this.showUtilityObjects = showUtilityObjects; } @Override public boolean isShowOnlyEntities() { return showOnlyEntities; } public void setShowOnlyEntities(boolean showOnlyEntities) { this.showOnlyEntities = showOnlyEntities; } @Override public boolean isMergeEntities() { return mergeEntities; } public void setMergeEntities(boolean mergeEntities) { this.mergeEntities = mergeEntities; } @Override public boolean isHideFolders() { return hideFolders; } public void setHideFolders(boolean hideFolders) { this.hideFolders = hideFolders; } @Override public boolean isHideSchemas() { return hideSchemas; } public void setHideSchemas(boolean hideSchemas) { this.hideSchemas = hideSchemas; } @Override public boolean isHideVirtualModel() { return hideVirtualModel; } public void setHideVirtualModel(boolean hideVirtualModel) { this.hideVirtualModel = hideVirtualModel; } @Override public boolean equals(Object obj) { if (!(obj instanceof DataSourceNavigatorSettings)) { return false; } DataSourceNavigatorSettings source = (DataSourceNavigatorSettings) obj; return this.showSystemObjects == source.showSystemObjects && this.showUtilityObjects == source.showUtilityObjects && this.showOnlyEntities == source.showOnlyEntities && this.mergeEntities == source.mergeEntities && this.hideFolders == source.hideFolders && this.hideSchemas == source.hideSchemas && this.hideVirtualModel == source.hideVirtualModel; } }
plugins/org.jkiss.dbeaver.registry/src/org/jkiss/dbeaver/registry/DataSourceNavigatorSettings.java
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2020 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.registry; import org.jkiss.dbeaver.model.navigator.DBNBrowseSettings; import java.util.LinkedHashMap; import java.util.Map; /** * Browse settings */ public class DataSourceNavigatorSettings implements DBNBrowseSettings { public static final Map<String, Preset> PRESETS = new LinkedHashMap<>(); public static final Preset PRESET_SIMPLE = new Preset("simple", "Simple", "Shows only tables"); public static final Preset PRESET_FULL = new Preset("advanced", "Advanced", "Shows all database objects"); public static final Preset PRESET_CUSTOM = new Preset("custom", "Custom", "User configuration"); public static class Preset { private final String id; private final String name; private final String description; private final DataSourceNavigatorSettings settings = new DataSourceNavigatorSettings(); public Preset(String id, String name, String description) { this.id = id; this.name = name; this.description = description; } public String getId() { return id; } public String getName() { return name; } public String getDescription() { return description; } public DBNBrowseSettings getSettings() { return settings; } } static { PRESET_SIMPLE.settings.setShowOnlyEntities(true); PRESET_SIMPLE.settings.setHideFolders(true); PRESET_SIMPLE.settings.setHideVirtualModel(true); PRESET_FULL.settings.setShowSystemObjects(true); PRESETS.put(PRESET_SIMPLE.name, PRESET_SIMPLE); PRESETS.put(PRESET_FULL.name, PRESET_FULL); PRESETS.put(PRESET_CUSTOM.name, PRESET_CUSTOM); } private boolean showSystemObjects; private boolean showUtilityObjects; private boolean showOnlyEntities; private boolean mergeEntities; private boolean hideFolders; private boolean hideSchemas; private boolean hideVirtualModel; public DataSourceNavigatorSettings() { } public DataSourceNavigatorSettings(DBNBrowseSettings copyFrom) { this.showSystemObjects = copyFrom.isShowSystemObjects(); this.showUtilityObjects = copyFrom.isShowUtilityObjects(); this.showOnlyEntities = copyFrom.isShowOnlyEntities(); this.mergeEntities = copyFrom.isMergeEntities(); this.hideFolders = copyFrom.isHideFolders(); this.hideSchemas = copyFrom.isHideSchemas(); this.hideVirtualModel = copyFrom.isHideVirtualModel(); } @Override public boolean isShowSystemObjects() { return showSystemObjects; } public void setShowSystemObjects(boolean showSystemObjects) { this.showSystemObjects = showSystemObjects; } @Override public boolean isShowUtilityObjects() { return showUtilityObjects; } public void setShowUtilityObjects(boolean showUtilityObjects) { this.showUtilityObjects = showUtilityObjects; } @Override public boolean isShowOnlyEntities() { return showOnlyEntities; } public void setShowOnlyEntities(boolean showOnlyEntities) { this.showOnlyEntities = showOnlyEntities; } @Override public boolean isMergeEntities() { return mergeEntities; } public void setMergeEntities(boolean mergeEntities) { this.mergeEntities = mergeEntities; } @Override public boolean isHideFolders() { return hideFolders; } public void setHideFolders(boolean hideFolders) { this.hideFolders = hideFolders; } @Override public boolean isHideSchemas() { return hideSchemas; } public void setHideSchemas(boolean hideSchemas) { this.hideSchemas = hideSchemas; } @Override public boolean isHideVirtualModel() { return hideVirtualModel; } public void setHideVirtualModel(boolean hideVirtualModel) { this.hideVirtualModel = hideVirtualModel; } @Override public boolean equals(Object obj) { if (!(obj instanceof DataSourceNavigatorSettings)) { return false; } DataSourceNavigatorSettings source = (DataSourceNavigatorSettings) obj; return this.showSystemObjects == source.showSystemObjects && this.showUtilityObjects == source.showUtilityObjects && this.showOnlyEntities == source.showOnlyEntities && this.mergeEntities == source.mergeEntities && this.hideFolders == source.hideFolders && this.hideSchemas == source.hideSchemas && this.hideVirtualModel == source.hideVirtualModel; } }
Nav settings preset refactoring Former-commit-id: 29b32ce2da41245451f93a985be534f25cf0c1d9
plugins/org.jkiss.dbeaver.registry/src/org/jkiss/dbeaver/registry/DataSourceNavigatorSettings.java
Nav settings preset refactoring
Java
apache-2.0
b70b62339ca000aaa95ba5c3331c27293d016d0d
0
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.config.codegen; import java.io.FileWriter; import java.io.File; import java.io.BufferedReader; import java.io.IOException; import java.io.FileReader; import java.io.StringWriter; import java.io.Writer; import java.util.Map; import java.util.HashMap; import java.util.Collections; import java.util.Arrays; import java.util.StringTokenizer; import java.util.stream.Collectors; /** * This class autogenerates C++ code for the C++ config, based on a CNode tree given. */ public class CppClassBuilder implements ClassBuilder { private final CNode root; private final NormalizedDefinition nd; private final File rootDir; private final String relativePathUnderRoot; private static final Map<String, String> vectorTypeDefs; static { Map<String, String> map = new HashMap<String, String>(); map.put("bool", "BoolVector"); map.put("int32_t", "IntVector"); map.put("int64_t", "LongVector"); map.put("double", "DoubleVector"); map.put("vespalib::string", "StringVector"); vectorTypeDefs = Collections.unmodifiableMap(map); } private static final Map<String, String> mapTypeDefs; static { Map<String, String> map = new HashMap<>(); map.put("bool", "BoolMap"); map.put("int32_t", "IntMap"); map.put("int64_t", "LongMap"); map.put("double", "DoubleMap"); map.put("vespalib::string", "StringMap"); mapTypeDefs = Collections.unmodifiableMap(map); } private static final Map<String, String> slimeTypeMap; static { Map<String, String> map = new HashMap<String, String>(); map.put("bool", "Bool"); map.put("int", "Long"); map.put("long", "Long"); map.put("double", "Double"); map.put("string", "String"); map.put("enum", "String"); map.put("file", "String"); map.put("reference", "String"); slimeTypeMap = Collections.unmodifiableMap(map); } public CppClassBuilder(CNode root, NormalizedDefinition nd, File rootDir, String relativePathUnderRoot) { this.root = root; this.nd = nd; this.rootDir = rootDir; this.relativePathUnderRoot = relativePathUnderRoot; } public void createConfigClasses() { generateConfig(root, nd); } String readFile(File f) throws IOException { if (!f.isFile()) return null; StringBuilder sb = new StringBuilder(); try (BufferedReader sr = new BufferedReader(new FileReader(f))) { while (true) { String line = sr.readLine(); if (line == null) break; sb.append(line).append("\n"); } return sb.toString(); } } void writeFile(File f, String content) throws IOException { FileWriter fw = new FileWriter(f); fw.write(content); fw.close(); } void generateConfig(CNode root, NormalizedDefinition nd) { try{ StringWriter headerWriter = new StringWriter(); StringWriter bodyWriter = new StringWriter(); writeHeaderFile(headerWriter, root); writeBodyFile(bodyWriter, root, relativePathUnderRoot, nd); String newHeader = headerWriter.toString(); String newBody = bodyWriter.toString(); File headerFile = new File(rootDir, relativePathUnderRoot + "/" + getFileName(root, "h")); File bodyFile = new File(rootDir, relativePathUnderRoot + "/" + getFileName(root, "cpp")); String oldHeader = readFile(headerFile); String oldBody = readFile(bodyFile); if (oldHeader == null || !oldHeader.equals(newHeader)) { writeFile(headerFile, newHeader); } if (oldBody == null || !oldBody.equals(newBody)) { writeFile(bodyFile, newBody); } } catch (IOException e) { e.printStackTrace(); } } String getFileName(CNode node, String extension) { return "config-" + node.getName() + "." + extension; } static String removeDashesAndUpperCaseAllFirstChars(String source, boolean capitalizeFirst) { // Create upper case chars after each dash String parts[] = source.split("[-_]"); StringBuilder sb = new StringBuilder(); for (String s : parts) { sb.append(s.substring(0, 1).toUpperCase()).append(s.substring(1)); } String result = sb.toString(); if (!capitalizeFirst) { result = result.substring(0,1).toLowerCase() + result.substring(1); } return result; } /** Convert name of type to the name we want to use in macro ifdefs in file. */ String getDefineName(String name) { return name.toUpperCase().replace("-", ""); } /** Convert name of type to the name we want to use as type name in the generated code. */ static String getTypeName(String name) { return removeDashesAndUpperCaseAllFirstChars(name, true); } /** Convert name of an identifier from value in def file to name to use in C++ file. */ String getIdentifier(String name) { return removeDashesAndUpperCaseAllFirstChars(name, false); } void writeHeaderFile(Writer w, CNode root) throws IOException { writeHeaderHeader(w, root); writeHeaderPublic(w, root); writeHeaderFooter(w, root); } void writeHeaderPublic(Writer w, CNode root) throws IOException { w.write("public:\n"); writeHeaderTypeDefs(w, root, " "); writeTypeDeclarations(w, root, " "); writeHeaderFunctionDeclarations(w, getTypeName(root, false), root, " "); writeStaticMemberDeclarations(w, " "); writeMembers(w, root, " "); } String [] generateCppNameSpace(CNode root) { String namespace = root.getNamespace(); if (namespace.contains(".")) { return namespace.split("\\."); } return new String[]{namespace}; } String generateCppNameSpaceString(String[] namespaceList) { StringBuilder str = new StringBuilder(); for (int i = 0; i < namespaceList.length - 1; i++) { str.append(namespaceList[i]); str.append("::"); } str.append(namespaceList[namespaceList.length - 1]); return str.toString(); } String generateCppNameSpaceDefine(String[] namespaceList) { StringBuilder str = new StringBuilder(); for (int i = 0; i < namespaceList.length - 1; i++) { str.append(namespaceList[i].toUpperCase()); str.append("_"); } str.append(namespaceList[namespaceList.length - 1].toUpperCase()); return str.toString(); } void writeNameSpaceBegin(Writer w, String [] namespaceList) throws IOException { w.write("namespace "); w.write(getNestedNameSpace(namespaceList)); w.write(" {\n"); } String getNestedNameSpace(String [] namespaceList) { return Arrays.stream(namespaceList).map(String::toString).collect(Collectors.joining("::")); } void writeNameSpaceEnd(Writer w, String [] namespaceList) throws IOException { w.write("} // namespace "); w.write(getNestedNameSpace(namespaceList)); w.write("\n"); } void writeHeaderHeader(Writer w, CNode root) throws IOException { String [] namespaceList = generateCppNameSpace(root); String namespacePrint = generateCppNameSpaceString(namespaceList); String namespaceDefine = generateCppNameSpaceDefine(namespaceList); String className = getTypeName(root, false); String defineName = namespaceDefine + "_" + getDefineName(className); w.write("" + "/**\n" + " * @class " + namespacePrint + "::" + className + "\n" + " * @ingroup config\n" + " *\n" + " * @brief This is an autogenerated class for handling VESPA config.\n" + " *\n" + " * This class is autogenerated by vespa from a config definition file.\n" + " * To subscribe to config, you need to include the config/config.h header, \n" + " * and create a ConfigSubscriber in order to subscribe for config.\n" ); if (root.getComment().length() > 0) { w.write(" *\n"); StringTokenizer st = new StringTokenizer(root.getComment(), "\n"); while (st.hasMoreTokens()) { w.write(" * " + st.nextToken() + "\n"); } } w.write("" + " */\n" + "#ifndef CLOUD_CONFIG_" + defineName + "_H\n" + "#define CLOUD_CONFIG_" + defineName + "_H\n" + "\n" + "#include <vespa/config/configgen/configinstance.h>\n" + "#include <vespa/vespalib/stllike/string.h>\n" + "#include <vector>\n" + "#include <map>\n" + "\n"); w.write("namespace config {\n"); w.write(" class ConfigValue;\n"); w.write(" class ConfigPayload;\n"); w.write("}\n\n"); w.write("namespace vespalib::slime {\n"); w.write(" struct Inspector;\n"); w.write(" struct Cursor;\n"); w.write("}\n\n"); writeNameSpaceBegin(w, namespaceList); w.write("\nnamespace internal {\n\n"); w.write("" + "/**\n" + " * This class contains the config. DO NOT USE THIS CLASS DIRECTLY. Use the typedeffed\n" + " * versions after this class declaration.\n" + " */\n" + "class Internal" + className + "Type : public ::config::ConfigInstance\n" + "{\n" ); } void writeTypeDeclarations(Writer w, CNode node, String indent) throws IOException { java.util.Set<String> declaredTypes = new java.util.HashSet<String>(); for (CNode child : node.getChildren()) { boolean complexType = (child instanceof InnerCNode || child instanceof LeafCNode.EnumLeaf); if (complexType && !declaredTypes.contains(child.getName())) { String typeName = getTypeName(child, false); declaredTypes.add(child.getName()); if (child instanceof LeafCNode.EnumLeaf) { w.write(indent + "enum " + typeName + " { "); LeafCNode.EnumLeaf leaf = (LeafCNode.EnumLeaf) child; for (int i=0; i<leaf.getLegalValues().length; ++i) { if (i != 0) { w.write(", "); } w.write(leaf.getLegalValues()[i]); } w.write(" };\n" + indent + "typedef std::vector<" + typeName + "> " + typeName + "Vector;" + "\n" + indent + "typedef std::map<vespalib::string, " + typeName + "> " + typeName + "Map;" + "\n" + indent + "static " + typeName + " get" + typeName + "(const vespalib::string&);\n" + indent + "static vespalib::string get" + typeName + "Name(" + typeName + " e);\n" + "\n" ); w.write(indent + "struct Internal" + typeName + "Converter {\n"); w.write(indent + " " + typeName + " operator()(const ::vespalib::string & __fieldName, const ::vespalib::slime::Inspector & __inspector);\n"); w.write(indent + " " + typeName + " operator()(const ::vespalib::slime::Inspector & __inspector);\n"); w.write(indent + " " + typeName + " operator()(const ::vespalib::slime::Inspector & __inspector, " + typeName + " __eDefault);\n"); w.write(indent + "};\n"); } else { w.write(indent + "class " + typeName + " {\n"); w.write(indent + "public:\n"); writeTypeDeclarations(w, child, indent + " "); writeStructFunctionDeclarations(w, getTypeName(child, false), child, indent + " "); writeMembers(w, child, indent + " "); w.write(indent + "};\n"); w.write(indent + "typedef std::vector<" + typeName + "> " + typeName + "Vector;\n\n"); w.write(indent + "typedef std::map<vespalib::string, " + typeName + "> " + typeName + "Map;\n\n"); } } } } void writeHeaderFunctionDeclarations(Writer w, String className, CNode node, String indent) throws IOException { w.write("" + indent + "const vespalib::string & defName() const override { return CONFIG_DEF_NAME; }\n" + indent + "const vespalib::string & defMd5() const override { return CONFIG_DEF_MD5; }\n" + indent + "const vespalib::string & defNamespace() const override { return CONFIG_DEF_NAMESPACE; }\n" + indent + "void serialize(::config::ConfigDataBuffer & __buffer) const override;\n"); writeConfigClassFunctionDeclarations(w, "Internal" + className + "Type", node, indent); } void writeConfigClassFunctionDeclarations(Writer w, String className, CNode node, String indent) throws IOException { w.write(indent + className + "(const ::config::ConfigValue & __value);\n"); w.write(indent + className + "(const ::config::ConfigDataBuffer & __value);\n"); w.write(indent + className + "(const ::config::ConfigPayload & __payload);\n"); writeCommonFunctionDeclarations(w, className, node, indent); } void writeStructFunctionDeclarations(Writer w, String className, CNode node, String indent) throws IOException { w.write(indent + className + "(const std::vector<vespalib::string> & __lines);\n"); w.write(indent + className + "(const vespalib::slime::Inspector & __inspector);\n"); w.write(indent + className + "(const ::config::ConfigPayload & __payload);\n"); writeCommonFunctionDeclarations(w, className, node, indent); w.write(indent + "void serialize(vespalib::slime::Cursor & __cursor) const;\n"); } void writeClassCopyConstructorDeclaration(Writer w, String className, String indent) throws IOException { w.write(indent + className + "(const " + className + " & __rhs);\n"); } void writeClassAssignmentOperatorDeclaration(Writer w, String className, String indent) throws IOException { w.write(indent + className + " & operator = (const " + className + " & __rhs);\n"); } void writeConfigClassCopyConstructorDefinition(Writer w, String parent, String className) throws IOException { w.write(parent + "::" + className + "(const " + className + " & __rhs) = default;\n"); } void writeConfigClassAssignmentOperatorDefinition(Writer w, String parent, String className) throws IOException { w.write(parent + " & " + parent + "::" + "operator =(const " + className + " & __rhs) = default;\n"); } void writeClassCopyConstructorDefinition(Writer w, String parent, CNode node) throws IOException { String typeName = getTypeName(node, false); w.write(parent + "::" + typeName + "(const " + typeName + " & __rhs) = default;\n"); } void writeClassAssignmentOperatorDefinition(Writer w, String parent, CNode node) throws IOException { String typeName = getTypeName(node, false); // Write empty constructor w.write(parent + " & " + parent + "::" + "operator = (const " + typeName + " & __rhs) = default;\n"); } void writeDestructor(Writer w, String parent, String className) throws IOException { w.write(parent + "~" + className + "() { } \n"); } void writeCommonFunctionDeclarations(Writer w, String className, CNode node, String indent) throws IOException { w.write("" + indent + className + "();\n"); writeClassCopyConstructorDeclaration(w, className, indent); writeClassAssignmentOperatorDeclaration(w, className, indent); w.write("" + indent + "~" + className + "();\n"); w.write("\n" + indent + "bool operator==(const " + className + "& __rhs) const;\n" + indent + "bool operator!=(const " + className + "& __rhs) const;\n" + "\n" ); } static String getTypeName(CNode node, boolean includeArray) { String type = null; if (node instanceof InnerCNode) { InnerCNode innerNode = (InnerCNode) node; type = getTypeName(innerNode.getName()); } else if (node instanceof LeafCNode) { LeafCNode leaf = (LeafCNode) node; if (leaf.getType().equals("bool")) { type = "bool"; } else if (leaf.getType().equals("int")) { type = "int32_t"; } else if (leaf.getType().equals("long")) { type = "int64_t"; } else if (leaf.getType().equals("double")) { type = "double"; } else if (leaf.getType().equals("enum")) { type = getTypeName(node.getName()); } else if (leaf.getType().equals("string")) { type = "vespalib::string"; } else if (leaf.getType().equals("reference")) { type = "vespalib::string"; } else if (leaf.getType().equals("file")) { type = "vespalib::string"; } else { throw new IllegalArgumentException("Unknown leaf datatype " + leaf.getType()); } } if (type == null) { throw new IllegalArgumentException("Unknown node " + node); } if (node.isArray && includeArray) { if (vectorTypeDefs.containsKey(type)) { type = vectorTypeDefs.get(type); } else { type = type + "Vector"; } } else if (node.isMap && includeArray) { if (mapTypeDefs.containsKey(type)) { type = mapTypeDefs.get(type); } else { type = type + "Map"; } } return type; } void writeStaticMemberDeclarations(Writer w, String indent) throws IOException { w.write("" + indent + "static const vespalib::string CONFIG_DEF_MD5;\n" + indent + "static const vespalib::string CONFIG_DEF_VERSION;\n" + indent + "static const vespalib::string CONFIG_DEF_NAME;\n" + indent + "static const vespalib::string CONFIG_DEF_NAMESPACE;\n" + indent + "static const std::vector<vespalib::string> CONFIG_DEF_SCHEMA;\n" + indent + "static const int64_t CONFIG_DEF_SERIALIZE_VERSION;\n" + "\n" ); } void writeComment(Writer w, String indent, String comment, boolean javadoc) throws IOException { /** If simple one liner comment, write on one line. */ if (javadoc && comment.indexOf('\n') == -1 && comment.length() <= 80 - (indent.length() + 7)) { w.write(indent + "/** " + comment + " */\n"); return; } else if (!javadoc && comment.indexOf('\n') == -1 && comment.length() <= 80 - (indent.length() + 3)) { w.write(indent + "// " + comment + "\n"); return; } /** If not we need to write multi line comment. */ int maxLineLen = 80 - (indent.length() + 3); if (javadoc) w.write(indent + "/**\n"); do { String current; // Extract first line to write int newLine = comment.indexOf('\n'); if (newLine == -1) { current = comment; comment = ""; } else { current = comment.substring(0, newLine); comment = comment.substring(newLine + 1); } // If line too long, cut it in two if (current.length() > maxLineLen) { int spaceIndex = current.lastIndexOf(' ', maxLineLen); if (spaceIndex >= maxLineLen - 15) { comment = current.substring(spaceIndex + 1) + "\n" + comment; current = current.substring(0, spaceIndex); } else { comment = current.substring(maxLineLen) + "\n" + comment; current = current.substring(0, maxLineLen) + "-"; } } w.write(indent + (javadoc ? " * " : "// ") + current + "\n"); } while (comment.length() > 0); if (javadoc) w.write(indent + " */\n"); } void writeMembers(Writer w, CNode node, String indent) throws IOException { for (CNode child : node.getChildren()) { String typeName = getTypeName(child, true); if (child.getComment().length() > 0) { String comment = child.getComment(); int index; do { index = comment.indexOf("\n\n"); if (index == -1) break; String next = comment.substring(0, index); comment = comment.substring(index + 2); w.write("\n"); writeComment(w, indent, next, false); } while (true); w.write("\n"); writeComment(w, indent, comment, true); } w.write(indent + typeName + " " + getIdentifier(child.getName()) + ";"); if (child instanceof LeafCNode) { LeafCNode leaf = (LeafCNode) child; DefaultValue value = leaf.getDefaultValue(); if (value != null) { w.write(" // Default: " + value.getStringRepresentation()); } } w.write("\n"); } } void writeHeaderTypeDefs(Writer w, CNode root, String indent) throws IOException { w.write(indent + "typedef std::unique_ptr<const " + getInternalClassName(root) + "> UP;\n"); for (Map.Entry<String, String> entry : vectorTypeDefs.entrySet()) { String typeName = entry.getKey(); String vectorName = entry.getValue(); String typeDef = "typedef std::vector<" + typeName + "> " + vectorName; w.write(indent + typeDef + ";\n"); } for (Map.Entry<String, String> entry : mapTypeDefs.entrySet()) { String typeName = entry.getKey(); String mapName = entry.getValue(); String typeDef = "typedef std::map<vespalib::string, " + typeName + "> " + mapName; w.write(indent + typeDef + ";\n"); } } private static String getInternalClassName(CNode root) { return "Internal" + getTypeName(root, false) + "Type"; } void writeHeaderFooter(Writer w, CNode root) throws IOException { String [] namespaceList = generateCppNameSpace(root); String namespaceDefine = generateCppNameSpaceDefine(namespaceList); String className = getTypeName(root, false); String defineName = namespaceDefine + "_" + getDefineName(className); w.write("" + "};\n" + "\n" + "} // namespace internal\n\n"); w.write("typedef internal::" + getInternalClassName(root) + " " + className + "ConfigBuilder;\n"); w.write("typedef const internal::" + getInternalClassName(root) + " " + className + "Config;\n"); w.write("\n"); writeNameSpaceEnd(w, namespaceList); w.write("#endif // VESPA_config_" + defineName + "_H\n"); } void writeBodyFile(Writer w, CNode root, String subdir, NormalizedDefinition nd) throws IOException { writeBodyHeader(w, root, subdir); writeStaticMemberDefinitions(w, root, nd); writeDefinition(w, root, null); writeBodyFooter(w, root); } void writeBodyHeader(Writer w, CNode root, String subdir) throws IOException { if (subdir == null) { w.write("#include \"" + getFileName(root, "h") + "\""); } else { w.write("#include <" + subdir + "/" + getFileName(root, "h") + ">"); } w.write("\n"); w.write("#include <vespa/config/common/configvalue.h>\n"); w.write("#include <vespa/config/common/exceptions.h>\n"); w.write("#include <vespa/config/configgen/configpayload.h>\n"); w.write("#include <vespa/config/print/configdatabuffer.h>\n"); w.write("#include <vespa/config/common/configparser.h>\n"); w.write("#include <vespa/config/configgen/vector_inserter.h>\n"); w.write("#include <vespa/config/configgen/map_inserter.h>\n"); w.write("#include <vespa/vespalib/data/slime/convenience.h>\n"); w.write("#include <vespa/vespalib/data/slime/slime.h>\n"); w.write("#include <vespa/vespalib/stllike/asciistream.h>\n"); w.write("\n"); writeNameSpaceBegin(w, generateCppNameSpace(root)); w.write("\nnamespace internal {\n\n"); w.write("using ::config::ConfigParser;\n"); w.write("using ::config::InvalidConfigException;\n"); w.write("using ::config::ConfigInstance;\n"); w.write("using ::config::ConfigValue;\n"); w.write("using namespace vespalib::slime::convenience;\n"); w.write("\n"); } void writeStaticMemberDefinitions(Writer w, CNode root, NormalizedDefinition nd) throws IOException { String typeName = getInternalClassName(root); w.write("const vespalib::string " + typeName + "::CONFIG_DEF_MD5(\"" + root.defMd5 + "\");\n" + "const vespalib::string " + typeName + "::CONFIG_DEF_VERSION(\"" + root.defVersion + "\");\n" + "const vespalib::string " + typeName + "::CONFIG_DEF_NAME(\"" + root.defName + "\");\n" + "const vespalib::string " + typeName + "::CONFIG_DEF_NAMESPACE(\"" + root.getNamespace() + "\");\n" + "const int64_t " + typeName + "::CONFIG_DEF_SERIALIZE_VERSION(1);\n"); w.write("const static vespalib::string __internalDefSchema[] = {\n"); for (String line : nd.getNormalizedContent()) { w.write("\"" + line.replace("\"", "\\\"") + "\",\n"); } w.write("};\n"); w.write("const std::vector<vespalib::string> " + typeName + "::CONFIG_DEF_SCHEMA(__internalDefSchema,\n"); w.write(" __internalDefSchema + (sizeof(__internalDefSchema) / \n"); w.write(" sizeof(__internalDefSchema[0])));\n"); w.write("\n"); } void writeDefinition(Writer w, CNode node, String fullClassName) throws IOException { boolean root = false; if (fullClassName == null) { fullClassName = getInternalClassName(node); root = true; } final String parent = fullClassName + "::"; java.util.Set<String> declaredTypes = new java.util.HashSet<String>(); for (CNode child : node.getChildren()) { boolean complexType = (child instanceof InnerCNode || child instanceof LeafCNode.EnumLeaf); if (complexType && !declaredTypes.contains(child.getName())) { String typeName = getTypeName(child, false); declaredTypes.add(child.getName()); if (child instanceof LeafCNode.EnumLeaf) { LeafCNode.EnumLeaf leaf = (LeafCNode.EnumLeaf) child; // Definition of getType(string) w.write(parent + typeName + "\n" + parent + "get" + typeName + "(const vespalib::string& name)\n" + "{\n" ); for (int i=0; i<leaf.getLegalValues().length; ++i) { w.write(" " + (i != 0 ? "} else " : "")); w.write("if (name == \"" + leaf.getLegalValues()[i] + "\") {\n" + " return " + leaf.getLegalValues()[i] + ";\n"); } w.write(" } else {\n" + " throw InvalidConfigException(\"Illegal enum value '\" + name + \"'\");\n" + " }\n" + "}\n" + "\n" ); // Definition of getTypeName(enum) w.write("vespalib::string\n" + parent + "get" + typeName + "Name(" + typeName + " t)\n" + "{\n" + " switch (t) {\n" ); for (int i=0; i<leaf.getLegalValues().length; ++i) { w.write(" case " + leaf.getLegalValues()[i] + ": return \"" + leaf.getLegalValues()[i] + "\";\n"); } w.write(" default:\n" + " {\n" + " vespalib::asciistream ost;\n" + " ost << \"UNKNOWN(\" << t << \")\";\n" + " return ost.str();\n" + " }\n" + " }\n" + "}\n" + "\n" ); w.write(parent + typeName + " " + parent + "Internal" + typeName + "Converter::operator()(const ::vespalib::string & __fieldName, const ::vespalib::slime::Inspector & __inspector) {\n"); w.write(" if (__inspector.valid()) {\n"); w.write(" return " + parent + "get" + typeName + "(__inspector.asString().make_string());\n"); w.write(" }\n"); w.write(" throw InvalidConfigException(\"Value for '\" + __fieldName + \"' required but not found\");\n"); w.write("}\n"); w.write(parent + typeName + " " + parent + "Internal" + typeName + "Converter::operator()(const ::vespalib::slime::Inspector & __inspector) {\n"); w.write(" return " + parent + "get" + typeName + "(__inspector.asString().make_string());\n"); w.write("}\n"); w.write(parent + typeName + " " + parent + "Internal" + typeName + "Converter::operator()(const ::vespalib::slime::Inspector & __inspector, " + typeName + " __eDefault) {\n"); w.write(" if (__inspector.valid()) {\n"); w.write(" return " + parent + "get" + typeName + "(__inspector.asString().make_string());\n"); w.write(" }\n"); w.write(" return __eDefault;\n"); w.write("}\n\n"); } else { writeDefinition(w, child, parent + typeName); } } } String tmpName = getTypeName(node, false); String typeName = root ? getInternalClassName(node) : tmpName; // Write empty constructor w.write(parent + typeName + "()\n"); for (int i=0; i<node.getChildren().length; ++i) { CNode child = node.getChildren()[i]; String childName = getIdentifier(child.getName()); if (i == 0) { w.write(" : " + childName + "("); } else { w.write("),\n " + childName + "("); } if (child.isArray || child.isMap) { // Default array for empty constructor is empty array. } else if (child instanceof LeafCNode) { // If we have a default value, use that.. LeafCNode leaf = (LeafCNode) child; if (leaf.getDefaultValue() != null) { w.write(getDefaultValue(leaf)); } else { // Defines empty constructor defaults for primitives without default set if (leaf.getType().equals("bool")) { w.write("false"); } else if (leaf.getType().equals("int")) { w.write("0"); } else if (leaf.getType().equals("double")) { w.write("0"); } else if (leaf.getType().equals("string")) { } else if (leaf.getType().equals("enum")) { LeafCNode.EnumLeaf enumNode = (LeafCNode.EnumLeaf) leaf; w.write(enumNode.getLegalValues()[0]); } else if (leaf.getType().equals("reference")) { } else if (leaf.getType().equals("file")) { } } } // If we hit neither else, we're an inner node, thus special type that has its own empty constructor } if (node.getChildren().length > 0) w.write(")\n"); w.write("" + "{\n" + "}\n" + "\n" ); // Write copy constructor if (root) { writeConfigClassCopyConstructorDefinition(w, fullClassName, typeName); writeConfigClassAssignmentOperatorDefinition(w, fullClassName, typeName); } else { writeClassCopyConstructorDefinition(w, fullClassName, node); writeClassAssignmentOperatorDefinition(w, fullClassName, node); } writeDestructor(w, parent, typeName); // Write parsing constructor String indent = " "; if (root) { w.write(typeName + "::" + typeName + "(const ConfigValue & __value)\n" + "{\n" + indent + "try {\n"); indent = " "; w.write(indent + "const std::vector<vespalib::string> & __lines(__value.getLines());\n"); } else { w.write(parent + typeName + "(const std::vector<vespalib::string> & __lines)\n" + "{\n"); } w.write("" + indent + "std::set<vespalib::string> __remainingValuesToParse(" + "__lines.begin(), __lines.end());\n"); w.write(indent + "for(std::set<vespalib::string>::iterator __rVTPiter = __remainingValuesToParse.begin();\n" + indent + " __rVTPiter != __remainingValuesToParse.end();)\n" + indent + "{\n" + indent + " if (ConfigParser::stripWhitespace(*__rVTPiter).empty()) {\n" + indent + " std::set<vespalib::string>::iterator __rVTPiter2 = __rVTPiter++;\n" + indent + " __remainingValuesToParse.erase(__rVTPiter2);\n" + indent + " } else {\n" + indent + " ++__rVTPiter;\n" + indent + " }\n" + indent + "}\n"); for (CNode child : node.getChildren()) { String childType = getTypeName(child, false); String childName = getIdentifier(child.getName()); if (child instanceof LeafCNode.EnumLeaf) { if (child.isArray) { w.write(indent + "std::vector<vespalib::string> " + childName + "__ValueList(\n "); } else if (child.isMap) { w.write(indent + "std::map<vespalib::string, vespalib::string> " + childName + "__ValueMap(\n "); } else { w.write(indent + childName + " = get" + childType + "("); } childType = "vespalib::string"; } else { w.write(indent + childName + " = "); } if (child.isArray) { w.write("ConfigParser::parseArray<" + childType + ">(\"" + child.getName() + "\", __lines)"); } else if (child.isMap) { w.write("ConfigParser::parseMap<" + childType + ">(\"" + child.getName() + "\", __lines)"); } else { if (child instanceof LeafCNode) { w.write("ConfigParser::parse<" + childType + ">(\"" + child.getName() + "\", __lines"); } else { w.write("ConfigParser::parseStruct<" + childType + ">(\"" + child.getName() + "\", __lines"); } if (child instanceof LeafCNode && ((LeafCNode) child).getDefaultValue() != null) { LeafCNode leaf = (LeafCNode) child; if (leaf.getDefaultValue().getValue() != null) { String defaultVal = getDefaultValue(leaf); if (leaf instanceof LeafCNode.EnumLeaf) { defaultVal = '"' + defaultVal + '"'; } w.write(", " + defaultVal); } } w.write(")"); } if (child instanceof LeafCNode.EnumLeaf) { childType = getTypeName(child, false); w.write(");\n"); if (child.isArray) { w.write(indent + childName + ".reserve(" + childName + "__ValueList.size());\n" + indent + "for (std::vector<vespalib::string>::const_iterator __it\n" + indent + " = " + childName + "__ValueList.begin();\n" + indent + " __it != " + childName + "__ValueList.end(); ++__it)\n" + indent + "{\n" + indent + " " + childName + ".push_back(get" + childType + "(*__it));\n" + indent + "}\n" ); } else if (child.isMap) { w.write(indent + "typedef std::map<vespalib::string, vespalib::string> __ValueMap;\n"); w.write(indent + "for (__ValueMap::iterator __it(" + childName + "__ValueMap.begin()), __mt(" + childName + "__ValueMap.end()); __it != __mt; __it++) {\n" + " " + childName + "[__it->first] = get" + childType + "(__it->second);\n" + "}\n" ); } } else { w.write(";\n"); } w.write(indent + "ConfigParser::stripLinesForKey(\"" + child.getName() + "\", " + "__remainingValuesToParse);\n"); } if (root) { indent = " "; w.write(indent + "} catch (InvalidConfigException & __ice) {\n"); w.write(indent + " throw InvalidConfigException(\"Error parsing config '\" + CONFIG_DEF_NAME + \"' in namespace '\" + CONFIG_DEF_NAMESPACE + \"'" + ": \" + __ice.getMessage());\n" + indent + "}\n"); } w.write("}\n" + "\n" ); // Write operator== String lineBreak = (parent.length() + typeName.length() < 50 ? "" : "\n"); w.write("bool\n" + parent + lineBreak + "operator==(const " + typeName + "& __rhs) const\n" + "{\n" + " return (" ); for (int i = 0; i<node.getChildren().length; ++i) { CNode child = node.getChildren()[i]; String childName = getIdentifier(child.getName()); if (i != 0) { w.write(" &&\n "); } w.write(childName + " == __rhs." + childName); } w.write(");\n" + "}\n" + "\n" ); // Write operator!= lineBreak = (parent.length() + typeName.length() < 50 ? "" : "\n"); w.write("bool\n" + parent + lineBreak + "operator!=(const " + typeName + "& __rhs) const\n" + "{\n" + " return !(operator==(__rhs));\n" + "}\n" + "\n" ); writeSlimeEncoder(w, node, parent, root); writeSlimeDecoder(w, node, parent, root); writeSlimeConstructor(w, node, parent, root); } public void writeSlimeEncoder(Writer w, CNode node, String parent, boolean root) throws IOException { String indent = " "; if (root) { w.write("void\n" + parent + "serialize(::config::ConfigDataBuffer & __buffer) const\n" + "{\n"); w.write(indent + "vespalib::Slime & __slime(__buffer.slimeObject());\n"); w.write(indent + "vespalib::slime::Cursor & __croot = __slime.setObject();\n"); w.write(indent + "__croot.setDouble(\"version\", CONFIG_DEF_SERIALIZE_VERSION);\n"); w.write(indent + "vespalib::slime::Cursor & __key = __croot.setObject(\"configKey\");\n"); w.write(indent + "__key.setString(\"defName\", vespalib::Memory(CONFIG_DEF_NAME));\n"); w.write(indent + "__key.setString(\"defNamespace\", vespalib::Memory(CONFIG_DEF_NAMESPACE));\n"); w.write(indent + "__key.setString(\"defMd5\", vespalib::Memory(CONFIG_DEF_MD5));\n"); w.write(indent + "vespalib::slime::Cursor & __keySchema =__key.setArray(\"defSchema\");\n"); w.write(indent + "for (size_t i = 0; i < CONFIG_DEF_SCHEMA.size(); i++) {\n"); w.write(indent + " __keySchema.addString(vespalib::Memory(CONFIG_DEF_SCHEMA[i]));\n"); w.write(indent + "}\n"); w.write(indent + "vespalib::slime::Cursor & __cursor = __croot.setObject(\"configPayload\");\n"); } else { w.write("void\n" + parent + "serialize(vespalib::slime::Cursor & __cursor) const\n" + "{\n"); } for (CNode child : node.getChildren()) { String childName = getIdentifier(child.getName()); String childType = getTypeName(child, false); w.write(indent + "{\n"); indent = " "; w.write(indent + "vespalib::slime::Cursor & __c = __cursor.setObject(\"" + child.getName() + "\");\n"); if (child.isArray) { w.write(indent + "__c.setString(\"type\", \"array\");\n"); w.write(indent + "vespalib::slime::Cursor & __c2 = __c.setArray(\"value\");\n"); w.write(indent + "for (size_t __i = 0; __i < " + childName + ".size(); __i++) {\n"); w.write(indent + " vespalib::slime::Cursor & __c3 = __c2.addObject();\n"); if (child instanceof LeafCNode.EnumLeaf) { String repType = slimeTypeMap.get("enum"); w.write(indent + " __c3.setString(\"type\", \"enum\");\n"); w.write(indent + " __c3.set" + repType); w.write("(\"value\", vespalib::Memory(get" + childType + "Name(" + childName + "[__i])));\n"); } else if (child instanceof LeafCNode) { String type = ((LeafCNode) child).getType(); String repType = slimeTypeMap.get(type); w.write(indent + " __c3.setString(\"type\", \"" + type + "\");\n"); w.write(indent + " __c3.set" + repType); if ("String".equals(repType)) { w.write("(\"value\", vespalib::Memory(" + childName + "[__i]));\n"); } else { w.write("(\"value\", " + childName + "[__i]);\n"); } } else { w.write(indent + " __c3.setString(\"type\", \"struct\");\n"); w.write(indent + " Cursor & __c4 = __c3.setObject(\"value\");\n"); w.write(indent + " " + childName + "[__i].serialize(__c4);\n"); } w.write(indent + "}\n"); } else if (child.isMap) { w.write(indent + "__c.setString(\"type\", \"map\");\n"); w.write(indent + "vespalib::slime::Cursor & __c2 = __c.setArray(\"value\");\n"); String childMapType = getTypeName(child, true); w.write(indent + "for (" + childMapType + "::const_iterator it(" + childName + ".begin()), mt(" + childName + ".end()); it != mt; it++) {\n"); w.write(indent + " vespalib::slime::Cursor & __c3 = __c2.addObject();\n"); w.write(indent + " __c3.setString(\"key\", vespalib::Memory(it->first));\n"); if (child instanceof LeafCNode.EnumLeaf) { String repType = slimeTypeMap.get("enum"); w.write(indent + " __c3.setString(\"type\", \"enum\");\n"); w.write(indent + " __c3.set" + repType); w.write("(\"value\", vespalib::Memory(get" + childType + "Name(it->second)));\n"); } else if (child instanceof LeafCNode) { String type = ((LeafCNode) child).getType(); String repType = slimeTypeMap.get(type); w.write(indent + " __c3.setString(\"type\", \"" + type + "\");\n"); w.write(indent + " __c3.set" + repType); if ("String".equals(repType)) { w.write("(\"value\", vespalib::Memory(it->second));\n"); } else { w.write("(\"value\", it->second);\n"); } } else { w.write(indent + " __c3.setString(\"type\", \"struct\");\n"); w.write(indent + " Cursor & __c4 = __c3.setObject(\"value\");\n"); w.write(indent + " it->second.serialize(__c4);\n"); } w.write(indent + "}\n"); } else { if (child instanceof LeafCNode.EnumLeaf) { String repType = slimeTypeMap.get("enum"); w.write(indent + "__c.setString(\"type\", \"enum\");\n"); w.write(indent + "__c.set" + repType); w.write("(\"value\", vespalib::Memory(get" + childType + "Name(" + childName + ")));\n"); } else if (child instanceof LeafCNode) { String type = ((LeafCNode) child).getType(); String repType = slimeTypeMap.get(type); w.write(indent + "__c.setString(\"type\", \"" + type + "\");\n"); w.write(indent + "__c.set" + repType); if ("String".equals(repType)) { w.write("(\"value\", vespalib::Memory(" + childName + "));\n"); } else { w.write("(\"value\", " + childName + ");\n"); } } else { w.write(indent + "__c.setString(\"type\", \"struct\");\n"); w.write(indent + "Cursor & __c2 = __c.setObject(\"value\");\n"); w.write(indent + childName + ".serialize(__c2);\n"); } } indent = " "; w.write(indent + "}\n"); } w.write("}\n\n"); } public void writeSlimeDecoder(Writer w, CNode node, String parent, boolean root) throws IOException { String tmpName = getTypeName(node, false); String typeName = root ? getInternalClassName(node) : tmpName; String indent = " "; if (root) { w.write("" + typeName + "::" + typeName + "(const ::config::ConfigDataBuffer & __buffer)\n" + "{\n"); w.write(indent + "const vespalib::Slime & __slime(__buffer.slimeObject());\n"); w.write(indent + "vespalib::slime::Inspector & __croot = __slime.get();\n"); w.write(indent + "vespalib::slime::Inspector & __inspector = __croot[\"configPayload\"];\n"); } else { w.write("" + parent + typeName + "(const vespalib::slime::Inspector & __inspector)\n" + "{\n"); } for (CNode child : node.getChildren()) { String childName = getIdentifier(child.getName()); String childType = getTypeName(child, false); String inspectorLine = "__inspector[\"" + child.getName() + "\"][\"value\"]"; if (child.isArray) { w.write(indent + "for (size_t __i = 0; __i < " + inspectorLine + ".children(); __i++) {\n"); w.write(indent + " " + childName + ".push_back("); if (child instanceof LeafCNode.EnumLeaf) { String repType = slimeTypeMap.get("enum"); w.write("get" + childType + "(" + inspectorLine + "[__i][\"value\"].as" + repType + "().make_string())"); } else if (child instanceof LeafCNode) { String type = ((LeafCNode) child).getType(); String repType = slimeTypeMap.get(type); if ("String".equals(repType)) { w.write("" + inspectorLine + "[__i][\"value\"].as" + repType + "().make_string()"); } else { w.write("" + inspectorLine + "[__i][\"value\"].as" + repType + "()"); } } else { w.write(childType + "(" + inspectorLine + "[__i][\"value\"])"); } w.write(");\n"); w.write(indent + "}\n"); } else if (child.isMap) { w.write(indent + "for (size_t __i = 0; __i < " + inspectorLine + ".children(); __i++) {\n"); w.write(indent + " " + childName + "[" + inspectorLine + "[__i][\"key\"].asString().make_string()] = "); if (child instanceof LeafCNode.EnumLeaf) { String repType = slimeTypeMap.get("enum"); w.write("get" + childType + "(" + inspectorLine + "[__i][\"value\"].as" + repType + "().make_string())"); } else if (child instanceof LeafCNode) { String type = ((LeafCNode) child).getType(); String repType = slimeTypeMap.get(type); if ("String".equals(repType)) { w.write("" + inspectorLine + "[__i][\"value\"].as" + repType + "().make_string()"); } else { w.write("" + inspectorLine + "[__i][\"value\"].as" + repType + "()"); } } else { w.write(childType + "(" + inspectorLine + "[__i][\"value\"])"); } w.write(";\n"); w.write(indent + "}\n"); } else { w.write(indent + childName + " = "); if (child instanceof LeafCNode.EnumLeaf) { String repType = slimeTypeMap.get("enum"); w.write("get" + childType + "(" + inspectorLine + ".as" + repType + "().make_string())"); } else if (child instanceof LeafCNode) { String type = ((LeafCNode) child).getType(); String repType = slimeTypeMap.get(type); if ("String".equals(repType)) { w.write("" + inspectorLine + ".as" + repType + "().make_string()"); } else { w.write("" + inspectorLine + ".as" + repType + "()"); } } else { w.write(childType + "(" + inspectorLine + ")"); } w.write(";\n"); } } w.write("}\n\n"); } public void writeSlimeConstructor(Writer w, CNode node, String parent, boolean root) throws IOException { String tmpName = getTypeName(node, false); String typeName = root ? getInternalClassName(node) : tmpName; String indent = " "; if (root) { w.write("" + typeName + "::" + typeName + "(const ::config::ConfigPayload & __payload)\n" + "{\n"); } else { w.write("" + parent + typeName + "(const ::config::ConfigPayload & __payload)\n" + "{\n"); } w.write(indent + "const vespalib::slime::Inspector & __inspector(__payload.get());\n"); for (CNode child : node.getChildren()) { String childName = getIdentifier(child.getName()); String childType = getTypeName(child, false); String childInspector = "__inspector[\"" + child.getName() + "\"]"; if (child.isArray) { String inserterName = "__" + childName + "Inserter"; w.write(indent + "::config::internal::VectorInserter<" + childType); if (child instanceof LeafCNode.EnumLeaf) { w.write(", Internal" + childType + "Converter"); } w.write("> " + inserterName + "(" + childName + ");\n"); w.write(indent + childInspector + ".traverse(" + inserterName + ");\n"); } else if (child.isMap) { String inserterName = "__" + childName + "Inserter"; w.write(indent + "::config::internal::MapInserter<" + childType); if (child instanceof LeafCNode.EnumLeaf) { w.write(", Internal" + childType + "Converter"); } w.write("> " + inserterName + "(" + childName + ");\n"); w.write(indent + childInspector + ".traverse(" + inserterName + ");\n"); } else { w.write(indent + childName + " = "); if (child instanceof LeafCNode.EnumLeaf) { w.write("Internal" + childType + "Converter"); } else { w.write("::config::internal::ValueConverter<" + childType + ">"); } if (child instanceof LeafCNode && ((LeafCNode) child).getDefaultValue() != null) { LeafCNode leaf = (LeafCNode) child; String defaultValue = getDefaultValue(leaf); w.write("()(" + childInspector + ", " + defaultValue + ");\n"); } else if (child instanceof InnerCNode) { w.write("()(" + childInspector + ");\n"); } else { w.write("()(\"" + child.getName() + "\", " + childInspector + ");\n"); } } } w.write("}\n\n"); } void writeBodyFooter(Writer w, CNode root) throws IOException { w.write("} // namespace internal\n\n"); writeNameSpaceEnd(w, generateCppNameSpace(root)); } String getDefaultValue(LeafCNode leaf) { String defaultVal = leaf.getDefaultValue().getStringRepresentation(); if (leaf.getType().equals("string") && defaultVal.equals("null")) throw new CodegenRuntimeException("Default value null not allowed for C++ config"); if (leaf.getType().equals("long") && "-9223372036854775808".equals(defaultVal)) { return "LONG_MIN"; } else if (leaf.getType().equals("int") && "-2147483648".equals(defaultVal)) { return "INT_MIN"; } else { return defaultVal; } } }
configgen/src/main/java/com/yahoo/config/codegen/CppClassBuilder.java
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.config.codegen; import java.io.FileWriter; import java.io.File; import java.io.BufferedReader; import java.io.IOException; import java.io.FileReader; import java.io.StringWriter; import java.io.Writer; import java.util.Map; import java.util.HashMap; import java.util.Collections; import java.util.Arrays; import java.util.StringTokenizer; import java.util.stream.Collectors; /** * This class autogenerates C++ code for the C++ config, based on a CNode tree given. */ public class CppClassBuilder implements ClassBuilder { private final CNode root; private final NormalizedDefinition nd; private final File rootDir; private final String relativePathUnderRoot; private static final Map<String, String> vectorTypeDefs; static { Map<String, String> map = new HashMap<String, String>(); map.put("bool", "BoolVector"); map.put("int32_t", "IntVector"); map.put("int64_t", "LongVector"); map.put("double", "DoubleVector"); map.put("vespalib::string", "StringVector"); vectorTypeDefs = Collections.unmodifiableMap(map); } private static final Map<String, String> mapTypeDefs; static { Map<String, String> map = new HashMap<>(); map.put("bool", "BoolMap"); map.put("int32_t", "IntMap"); map.put("int64_t", "LongMap"); map.put("double", "DoubleMap"); map.put("vespalib::string", "StringMap"); mapTypeDefs = Collections.unmodifiableMap(map); } private static final Map<String, String> slimeTypeMap; static { Map<String, String> map = new HashMap<String, String>(); map.put("bool", "Bool"); map.put("int", "Long"); map.put("long", "Long"); map.put("double", "Double"); map.put("string", "String"); map.put("enum", "String"); map.put("file", "String"); map.put("reference", "String"); slimeTypeMap = Collections.unmodifiableMap(map); } public CppClassBuilder(CNode root, NormalizedDefinition nd, File rootDir, String relativePathUnderRoot) { this.root = root; this.nd = nd; this.rootDir = rootDir; this.relativePathUnderRoot = relativePathUnderRoot; } public void createConfigClasses() { generateConfig(root, nd); } String readFile(File f) throws IOException { if (!f.isFile()) return null; StringBuilder sb = new StringBuilder(); try (BufferedReader sr = new BufferedReader(new FileReader(f))) { while (true) { String line = sr.readLine(); if (line == null) break; sb.append(line).append("\n"); } return sb.toString(); } } void writeFile(File f, String content) throws IOException { FileWriter fw = new FileWriter(f); fw.write(content); fw.close(); } void generateConfig(CNode root, NormalizedDefinition nd) { try{ StringWriter headerWriter = new StringWriter(); StringWriter bodyWriter = new StringWriter(); writeHeaderFile(headerWriter, root); writeBodyFile(bodyWriter, root, relativePathUnderRoot, nd); String newHeader = headerWriter.toString(); String newBody = bodyWriter.toString(); File headerFile = new File(rootDir, relativePathUnderRoot + "/" + getFileName(root, "h")); File bodyFile = new File(rootDir, relativePathUnderRoot + "/" + getFileName(root, "cpp")); String oldHeader = readFile(headerFile); String oldBody = readFile(bodyFile); if (oldHeader == null || !oldHeader.equals(newHeader)) { writeFile(headerFile, newHeader); } if (oldBody == null || !oldBody.equals(newBody)) { writeFile(bodyFile, newBody); } } catch (IOException e) { e.printStackTrace(); } } String getFileName(CNode node, String extension) { return "config-" + node.getName() + "." + extension; } static String removeDashesAndUpperCaseAllFirstChars(String source, boolean capitalizeFirst) { // Create upper case chars after each dash String parts[] = source.split("[-_]"); StringBuilder sb = new StringBuilder(); for (String s : parts) { sb.append(s.substring(0, 1).toUpperCase()).append(s.substring(1)); } String result = sb.toString(); if (!capitalizeFirst) { result = result.substring(0,1).toLowerCase() + result.substring(1); } return result; } /** Convert name of type to the name we want to use in macro ifdefs in file. */ String getDefineName(String name) { return name.toUpperCase().replace("-", ""); } /** Convert name of type to the name we want to use as type name in the generated code. */ static String getTypeName(String name) { return removeDashesAndUpperCaseAllFirstChars(name, true); } /** Convert name of an identifier from value in def file to name to use in C++ file. */ String getIdentifier(String name) { return removeDashesAndUpperCaseAllFirstChars(name, false); } void writeHeaderFile(Writer w, CNode root) throws IOException { writeHeaderHeader(w, root); writeHeaderPublic(w, root); writeHeaderFooter(w, root); } void writeHeaderPublic(Writer w, CNode root) throws IOException { w.write("public:\n"); writeHeaderTypeDefs(w, root, " "); writeTypeDeclarations(w, root, " "); writeHeaderFunctionDeclarations(w, getTypeName(root, false), root, " "); writeStaticMemberDeclarations(w, " "); writeMembers(w, root, " "); } String [] generateCppNameSpace(CNode root) { String namespace = root.getNamespace(); if (namespace.contains(".")) { return namespace.split("\\."); } return new String[]{namespace}; } String generateCppNameSpaceString(String[] namespaceList) { StringBuilder str = new StringBuilder(); for (int i = 0; i < namespaceList.length - 1; i++) { str.append(namespaceList[i]); str.append("::"); } str.append(namespaceList[namespaceList.length - 1]); return str.toString(); } String generateCppNameSpaceDefine(String[] namespaceList) { StringBuilder str = new StringBuilder(); for (int i = 0; i < namespaceList.length - 1; i++) { str.append(namespaceList[i].toUpperCase()); str.append("_"); } str.append(namespaceList[namespaceList.length - 1].toUpperCase()); return str.toString(); } void writeNameSpaceBegin(Writer w, String [] namespaceList) throws IOException { w.write("namespace "); w.write(getNestedNameSpace(namespaceList)); w.write(" {\n"); } String getNestedNameSpace(String [] namespaceList) { return Arrays.stream(namespaceList).map(String::toString).collect(Collectors.joining("::")); } void writeNameSpaceEnd(Writer w, String [] namespaceList) throws IOException { w.write("} // namespace "); w.write(getNestedNameSpace(namespaceList)); w.write("\n"); } void writeHeaderHeader(Writer w, CNode root) throws IOException { String [] namespaceList = generateCppNameSpace(root); String namespacePrint = generateCppNameSpaceString(namespaceList); String namespaceDefine = generateCppNameSpaceDefine(namespaceList); String className = getTypeName(root, false); String defineName = namespaceDefine + "_" + getDefineName(className); w.write("" + "/**\n" + " * @class " + namespacePrint + "::" + className + "\n" + " * @ingroup config\n" + " *\n" + " * @brief This is an autogenerated class for handling VESPA config.\n" + " *\n" + " * This class is autogenerated by vespa from a config definition file.\n" + " * To subscribe to config, you need to include the config/config.h header, \n" + " * and create a ConfigSubscriber in order to subscribe for config.\n" ); if (root.getComment().length() > 0) { w.write(" *\n"); StringTokenizer st = new StringTokenizer(root.getComment(), "\n"); while (st.hasMoreTokens()) { w.write(" * " + st.nextToken() + "\n"); } } w.write("" + " */\n" + "#ifndef CLOUD_CONFIG_" + defineName + "_H\n" + "#define CLOUD_CONFIG_" + defineName + "_H\n" + "\n" + "#include <vespa/config/configgen/configinstance.h>\n" + "#include <vespa/vespalib/stllike/string.h>\n" + "#include <vector>\n" + "#include <map>\n" + "\n"); w.write("namespace config {\n"); w.write(" class ConfigValue;\n"); w.write(" class ConfigPayload;\n"); w.write("}\n\n"); w.write("namespace vespalib::slime {\n"); w.write(" struct Inspector;\n"); w.write(" struct Cursor;\n"); w.write("}\n\n"); writeNameSpaceBegin(w, namespaceList); w.write("\nnamespace internal {\n\n"); w.write("" + "/**\n" + " * This class contains the config. DO NOT USE THIS CLASS DIRECTLY. Use the typedeffed\n" + " * versions after this class declaration.\n" + " */\n" + "class Internal" + className + "Type : public ::config::ConfigInstance\n" + "{\n" ); } void writeTypeDeclarations(Writer w, CNode node, String indent) throws IOException { java.util.Set<String> declaredTypes = new java.util.HashSet<String>(); for (CNode child : node.getChildren()) { boolean complexType = (child instanceof InnerCNode || child instanceof LeafCNode.EnumLeaf); if (complexType && !declaredTypes.contains(child.getName())) { String typeName = getTypeName(child, false); declaredTypes.add(child.getName()); if (child instanceof LeafCNode.EnumLeaf) { w.write(indent + "enum " + typeName + " { "); LeafCNode.EnumLeaf leaf = (LeafCNode.EnumLeaf) child; for (int i=0; i<leaf.getLegalValues().length; ++i) { if (i != 0) { w.write(", "); } w.write(leaf.getLegalValues()[i]); } w.write(" };\n" + indent + "typedef std::vector<" + typeName + "> " + typeName + "Vector;" + "\n" + indent + "typedef std::map<vespalib::string, " + typeName + "> " + typeName + "Map;" + "\n" + indent + "static " + typeName + " get" + typeName + "(const vespalib::string&);\n" + indent + "static vespalib::string get" + typeName + "Name(" + typeName + " e);\n" + "\n" ); w.write(indent + "struct Internal" + typeName + "Converter {\n"); w.write(indent + " " + typeName + " operator()(const ::vespalib::string & __fieldName, const ::vespalib::slime::Inspector & __inspector);\n"); w.write(indent + " " + typeName + " operator()(const ::vespalib::slime::Inspector & __inspector);\n"); w.write(indent + " " + typeName + " operator()(const ::vespalib::slime::Inspector & __inspector, " + typeName + " __eDefault);\n"); w.write(indent + "};\n"); } else { w.write(indent + "class " + typeName + " {\n"); w.write(indent + "public:\n"); writeTypeDeclarations(w, child, indent + " "); writeStructFunctionDeclarations(w, getTypeName(child, false), child, indent + " "); writeMembers(w, child, indent + " "); w.write(indent + "};\n"); w.write(indent + "typedef std::vector<" + typeName + "> " + typeName + "Vector;\n\n"); w.write(indent + "typedef std::map<vespalib::string, " + typeName + "> " + typeName + "Map;\n\n"); } } } } void writeHeaderFunctionDeclarations(Writer w, String className, CNode node, String indent) throws IOException { w.write("" + indent + "const vespalib::string & defName() const override { return CONFIG_DEF_NAME; }\n" + indent + "const vespalib::string & defVersion() const { return CONFIG_DEF_VERSION; }\n" // TODO: Remove on Vespa 8 + indent + "const vespalib::string & defMd5() const override { return CONFIG_DEF_MD5; }\n" + indent + "const vespalib::string & defNamespace() const override { return CONFIG_DEF_NAMESPACE; }\n" + indent + "void serialize(::config::ConfigDataBuffer & __buffer) const override;\n"); writeConfigClassFunctionDeclarations(w, "Internal" + className + "Type", node, indent); } void writeConfigClassFunctionDeclarations(Writer w, String className, CNode node, String indent) throws IOException { w.write(indent + className + "(const ::config::ConfigValue & __value);\n"); w.write(indent + className + "(const ::config::ConfigDataBuffer & __value);\n"); w.write(indent + className + "(const ::config::ConfigPayload & __payload);\n"); writeCommonFunctionDeclarations(w, className, node, indent); } void writeStructFunctionDeclarations(Writer w, String className, CNode node, String indent) throws IOException { w.write(indent + className + "(const std::vector<vespalib::string> & __lines);\n"); w.write(indent + className + "(const vespalib::slime::Inspector & __inspector);\n"); w.write(indent + className + "(const ::config::ConfigPayload & __payload);\n"); writeCommonFunctionDeclarations(w, className, node, indent); w.write(indent + "void serialize(vespalib::slime::Cursor & __cursor) const;\n"); } void writeClassCopyConstructorDeclaration(Writer w, String className, String indent) throws IOException { w.write(indent + className + "(const " + className + " & __rhs);\n"); } void writeClassAssignmentOperatorDeclaration(Writer w, String className, String indent) throws IOException { w.write(indent + className + " & operator = (const " + className + " & __rhs);\n"); } void writeConfigClassCopyConstructorDefinition(Writer w, String parent, String className) throws IOException { w.write(parent + "::" + className + "(const " + className + " & __rhs) = default;\n"); } void writeConfigClassAssignmentOperatorDefinition(Writer w, String parent, String className) throws IOException { w.write(parent + " & " + parent + "::" + "operator =(const " + className + " & __rhs) = default;\n"); } void writeClassCopyConstructorDefinition(Writer w, String parent, CNode node) throws IOException { String typeName = getTypeName(node, false); w.write(parent + "::" + typeName + "(const " + typeName + " & __rhs) = default;\n"); } void writeClassAssignmentOperatorDefinition(Writer w, String parent, CNode node) throws IOException { String typeName = getTypeName(node, false); // Write empty constructor w.write(parent + " & " + parent + "::" + "operator = (const " + typeName + " & __rhs) = default;\n"); } void writeDestructor(Writer w, String parent, String className) throws IOException { w.write(parent + "~" + className + "() { } \n"); } void writeCommonFunctionDeclarations(Writer w, String className, CNode node, String indent) throws IOException { w.write("" + indent + className + "();\n"); writeClassCopyConstructorDeclaration(w, className, indent); writeClassAssignmentOperatorDeclaration(w, className, indent); w.write("" + indent + "~" + className + "();\n"); w.write("\n" + indent + "bool operator==(const " + className + "& __rhs) const;\n" + indent + "bool operator!=(const " + className + "& __rhs) const;\n" + "\n" ); } static String getTypeName(CNode node, boolean includeArray) { String type = null; if (node instanceof InnerCNode) { InnerCNode innerNode = (InnerCNode) node; type = getTypeName(innerNode.getName()); } else if (node instanceof LeafCNode) { LeafCNode leaf = (LeafCNode) node; if (leaf.getType().equals("bool")) { type = "bool"; } else if (leaf.getType().equals("int")) { type = "int32_t"; } else if (leaf.getType().equals("long")) { type = "int64_t"; } else if (leaf.getType().equals("double")) { type = "double"; } else if (leaf.getType().equals("enum")) { type = getTypeName(node.getName()); } else if (leaf.getType().equals("string")) { type = "vespalib::string"; } else if (leaf.getType().equals("reference")) { type = "vespalib::string"; } else if (leaf.getType().equals("file")) { type = "vespalib::string"; } else { throw new IllegalArgumentException("Unknown leaf datatype " + leaf.getType()); } } if (type == null) { throw new IllegalArgumentException("Unknown node " + node); } if (node.isArray && includeArray) { if (vectorTypeDefs.containsKey(type)) { type = vectorTypeDefs.get(type); } else { type = type + "Vector"; } } else if (node.isMap && includeArray) { if (mapTypeDefs.containsKey(type)) { type = mapTypeDefs.get(type); } else { type = type + "Map"; } } return type; } void writeStaticMemberDeclarations(Writer w, String indent) throws IOException { w.write("" + indent + "static const vespalib::string CONFIG_DEF_MD5;\n" + indent + "static const vespalib::string CONFIG_DEF_VERSION;\n" + indent + "static const vespalib::string CONFIG_DEF_NAME;\n" + indent + "static const vespalib::string CONFIG_DEF_NAMESPACE;\n" + indent + "static const std::vector<vespalib::string> CONFIG_DEF_SCHEMA;\n" + indent + "static const int64_t CONFIG_DEF_SERIALIZE_VERSION;\n" + "\n" ); } void writeComment(Writer w, String indent, String comment, boolean javadoc) throws IOException { /** If simple one liner comment, write on one line. */ if (javadoc && comment.indexOf('\n') == -1 && comment.length() <= 80 - (indent.length() + 7)) { w.write(indent + "/** " + comment + " */\n"); return; } else if (!javadoc && comment.indexOf('\n') == -1 && comment.length() <= 80 - (indent.length() + 3)) { w.write(indent + "// " + comment + "\n"); return; } /** If not we need to write multi line comment. */ int maxLineLen = 80 - (indent.length() + 3); if (javadoc) w.write(indent + "/**\n"); do { String current; // Extract first line to write int newLine = comment.indexOf('\n'); if (newLine == -1) { current = comment; comment = ""; } else { current = comment.substring(0, newLine); comment = comment.substring(newLine + 1); } // If line too long, cut it in two if (current.length() > maxLineLen) { int spaceIndex = current.lastIndexOf(' ', maxLineLen); if (spaceIndex >= maxLineLen - 15) { comment = current.substring(spaceIndex + 1) + "\n" + comment; current = current.substring(0, spaceIndex); } else { comment = current.substring(maxLineLen) + "\n" + comment; current = current.substring(0, maxLineLen) + "-"; } } w.write(indent + (javadoc ? " * " : "// ") + current + "\n"); } while (comment.length() > 0); if (javadoc) w.write(indent + " */\n"); } void writeMembers(Writer w, CNode node, String indent) throws IOException { for (CNode child : node.getChildren()) { String typeName = getTypeName(child, true); if (child.getComment().length() > 0) { String comment = child.getComment(); int index; do { index = comment.indexOf("\n\n"); if (index == -1) break; String next = comment.substring(0, index); comment = comment.substring(index + 2); w.write("\n"); writeComment(w, indent, next, false); } while (true); w.write("\n"); writeComment(w, indent, comment, true); } w.write(indent + typeName + " " + getIdentifier(child.getName()) + ";"); if (child instanceof LeafCNode) { LeafCNode leaf = (LeafCNode) child; DefaultValue value = leaf.getDefaultValue(); if (value != null) { w.write(" // Default: " + value.getStringRepresentation()); } } w.write("\n"); } } void writeHeaderTypeDefs(Writer w, CNode root, String indent) throws IOException { w.write(indent + "typedef std::unique_ptr<const " + getInternalClassName(root) + "> UP;\n"); for (Map.Entry<String, String> entry : vectorTypeDefs.entrySet()) { String typeName = entry.getKey(); String vectorName = entry.getValue(); String typeDef = "typedef std::vector<" + typeName + "> " + vectorName; w.write(indent + typeDef + ";\n"); } for (Map.Entry<String, String> entry : mapTypeDefs.entrySet()) { String typeName = entry.getKey(); String mapName = entry.getValue(); String typeDef = "typedef std::map<vespalib::string, " + typeName + "> " + mapName; w.write(indent + typeDef + ";\n"); } } private static String getInternalClassName(CNode root) { return "Internal" + getTypeName(root, false) + "Type"; } void writeHeaderFooter(Writer w, CNode root) throws IOException { String [] namespaceList = generateCppNameSpace(root); String namespaceDefine = generateCppNameSpaceDefine(namespaceList); String className = getTypeName(root, false); String defineName = namespaceDefine + "_" + getDefineName(className); w.write("" + "};\n" + "\n" + "} // namespace internal\n\n"); w.write("typedef internal::" + getInternalClassName(root) + " " + className + "ConfigBuilder;\n"); w.write("typedef const internal::" + getInternalClassName(root) + " " + className + "Config;\n"); w.write("\n"); writeNameSpaceEnd(w, namespaceList); w.write("#endif // VESPA_config_" + defineName + "_H\n"); } void writeBodyFile(Writer w, CNode root, String subdir, NormalizedDefinition nd) throws IOException { writeBodyHeader(w, root, subdir); writeStaticMemberDefinitions(w, root, nd); writeDefinition(w, root, null); writeBodyFooter(w, root); } void writeBodyHeader(Writer w, CNode root, String subdir) throws IOException { if (subdir == null) { w.write("#include \"" + getFileName(root, "h") + "\""); } else { w.write("#include <" + subdir + "/" + getFileName(root, "h") + ">"); } w.write("\n"); w.write("#include <vespa/config/common/configvalue.h>\n"); w.write("#include <vespa/config/common/exceptions.h>\n"); w.write("#include <vespa/config/configgen/configpayload.h>\n"); w.write("#include <vespa/config/print/configdatabuffer.h>\n"); w.write("#include <vespa/config/common/configparser.h>\n"); w.write("#include <vespa/config/configgen/vector_inserter.h>\n"); w.write("#include <vespa/config/configgen/map_inserter.h>\n"); w.write("#include <vespa/vespalib/data/slime/convenience.h>\n"); w.write("#include <vespa/vespalib/data/slime/slime.h>\n"); w.write("#include <vespa/vespalib/stllike/asciistream.h>\n"); w.write("\n"); writeNameSpaceBegin(w, generateCppNameSpace(root)); w.write("\nnamespace internal {\n\n"); w.write("using ::config::ConfigParser;\n"); w.write("using ::config::InvalidConfigException;\n"); w.write("using ::config::ConfigInstance;\n"); w.write("using ::config::ConfigValue;\n"); w.write("using namespace vespalib::slime::convenience;\n"); w.write("\n"); } void writeStaticMemberDefinitions(Writer w, CNode root, NormalizedDefinition nd) throws IOException { String typeName = getInternalClassName(root); w.write("const vespalib::string " + typeName + "::CONFIG_DEF_MD5(\"" + root.defMd5 + "\");\n" + "const vespalib::string " + typeName + "::CONFIG_DEF_VERSION(\"" + root.defVersion + "\");\n" + "const vespalib::string " + typeName + "::CONFIG_DEF_NAME(\"" + root.defName + "\");\n" + "const vespalib::string " + typeName + "::CONFIG_DEF_NAMESPACE(\"" + root.getNamespace() + "\");\n" + "const int64_t " + typeName + "::CONFIG_DEF_SERIALIZE_VERSION(1);\n"); w.write("const static vespalib::string __internalDefSchema[] = {\n"); for (String line : nd.getNormalizedContent()) { w.write("\"" + line.replace("\"", "\\\"") + "\",\n"); } w.write("};\n"); w.write("const std::vector<vespalib::string> " + typeName + "::CONFIG_DEF_SCHEMA(__internalDefSchema,\n"); w.write(" __internalDefSchema + (sizeof(__internalDefSchema) / \n"); w.write(" sizeof(__internalDefSchema[0])));\n"); w.write("\n"); } void writeDefinition(Writer w, CNode node, String fullClassName) throws IOException { boolean root = false; if (fullClassName == null) { fullClassName = getInternalClassName(node); root = true; } final String parent = fullClassName + "::"; java.util.Set<String> declaredTypes = new java.util.HashSet<String>(); for (CNode child : node.getChildren()) { boolean complexType = (child instanceof InnerCNode || child instanceof LeafCNode.EnumLeaf); if (complexType && !declaredTypes.contains(child.getName())) { String typeName = getTypeName(child, false); declaredTypes.add(child.getName()); if (child instanceof LeafCNode.EnumLeaf) { LeafCNode.EnumLeaf leaf = (LeafCNode.EnumLeaf) child; // Definition of getType(string) w.write(parent + typeName + "\n" + parent + "get" + typeName + "(const vespalib::string& name)\n" + "{\n" ); for (int i=0; i<leaf.getLegalValues().length; ++i) { w.write(" " + (i != 0 ? "} else " : "")); w.write("if (name == \"" + leaf.getLegalValues()[i] + "\") {\n" + " return " + leaf.getLegalValues()[i] + ";\n"); } w.write(" } else {\n" + " throw InvalidConfigException(\"Illegal enum value '\" + name + \"'\");\n" + " }\n" + "}\n" + "\n" ); // Definition of getTypeName(enum) w.write("vespalib::string\n" + parent + "get" + typeName + "Name(" + typeName + " t)\n" + "{\n" + " switch (t) {\n" ); for (int i=0; i<leaf.getLegalValues().length; ++i) { w.write(" case " + leaf.getLegalValues()[i] + ": return \"" + leaf.getLegalValues()[i] + "\";\n"); } w.write(" default:\n" + " {\n" + " vespalib::asciistream ost;\n" + " ost << \"UNKNOWN(\" << t << \")\";\n" + " return ost.str();\n" + " }\n" + " }\n" + "}\n" + "\n" ); w.write(parent + typeName + " " + parent + "Internal" + typeName + "Converter::operator()(const ::vespalib::string & __fieldName, const ::vespalib::slime::Inspector & __inspector) {\n"); w.write(" if (__inspector.valid()) {\n"); w.write(" return " + parent + "get" + typeName + "(__inspector.asString().make_string());\n"); w.write(" }\n"); w.write(" throw InvalidConfigException(\"Value for '\" + __fieldName + \"' required but not found\");\n"); w.write("}\n"); w.write(parent + typeName + " " + parent + "Internal" + typeName + "Converter::operator()(const ::vespalib::slime::Inspector & __inspector) {\n"); w.write(" return " + parent + "get" + typeName + "(__inspector.asString().make_string());\n"); w.write("}\n"); w.write(parent + typeName + " " + parent + "Internal" + typeName + "Converter::operator()(const ::vespalib::slime::Inspector & __inspector, " + typeName + " __eDefault) {\n"); w.write(" if (__inspector.valid()) {\n"); w.write(" return " + parent + "get" + typeName + "(__inspector.asString().make_string());\n"); w.write(" }\n"); w.write(" return __eDefault;\n"); w.write("}\n\n"); } else { writeDefinition(w, child, parent + typeName); } } } String tmpName = getTypeName(node, false); String typeName = root ? getInternalClassName(node) : tmpName; // Write empty constructor w.write(parent + typeName + "()\n"); for (int i=0; i<node.getChildren().length; ++i) { CNode child = node.getChildren()[i]; String childName = getIdentifier(child.getName()); if (i == 0) { w.write(" : " + childName + "("); } else { w.write("),\n " + childName + "("); } if (child.isArray || child.isMap) { // Default array for empty constructor is empty array. } else if (child instanceof LeafCNode) { // If we have a default value, use that.. LeafCNode leaf = (LeafCNode) child; if (leaf.getDefaultValue() != null) { w.write(getDefaultValue(leaf)); } else { // Defines empty constructor defaults for primitives without default set if (leaf.getType().equals("bool")) { w.write("false"); } else if (leaf.getType().equals("int")) { w.write("0"); } else if (leaf.getType().equals("double")) { w.write("0"); } else if (leaf.getType().equals("string")) { } else if (leaf.getType().equals("enum")) { LeafCNode.EnumLeaf enumNode = (LeafCNode.EnumLeaf) leaf; w.write(enumNode.getLegalValues()[0]); } else if (leaf.getType().equals("reference")) { } else if (leaf.getType().equals("file")) { } } } // If we hit neither else, we're an inner node, thus special type that has its own empty constructor } if (node.getChildren().length > 0) w.write(")\n"); w.write("" + "{\n" + "}\n" + "\n" ); // Write copy constructor if (root) { writeConfigClassCopyConstructorDefinition(w, fullClassName, typeName); writeConfigClassAssignmentOperatorDefinition(w, fullClassName, typeName); } else { writeClassCopyConstructorDefinition(w, fullClassName, node); writeClassAssignmentOperatorDefinition(w, fullClassName, node); } writeDestructor(w, parent, typeName); // Write parsing constructor String indent = " "; if (root) { w.write(typeName + "::" + typeName + "(const ConfigValue & __value)\n" + "{\n" + indent + "try {\n"); indent = " "; w.write(indent + "const std::vector<vespalib::string> & __lines(__value.getLines());\n"); } else { w.write(parent + typeName + "(const std::vector<vespalib::string> & __lines)\n" + "{\n"); } w.write("" + indent + "std::set<vespalib::string> __remainingValuesToParse(" + "__lines.begin(), __lines.end());\n"); w.write(indent + "for(std::set<vespalib::string>::iterator __rVTPiter = __remainingValuesToParse.begin();\n" + indent + " __rVTPiter != __remainingValuesToParse.end();)\n" + indent + "{\n" + indent + " if (ConfigParser::stripWhitespace(*__rVTPiter).empty()) {\n" + indent + " std::set<vespalib::string>::iterator __rVTPiter2 = __rVTPiter++;\n" + indent + " __remainingValuesToParse.erase(__rVTPiter2);\n" + indent + " } else {\n" + indent + " ++__rVTPiter;\n" + indent + " }\n" + indent + "}\n"); for (CNode child : node.getChildren()) { String childType = getTypeName(child, false); String childName = getIdentifier(child.getName()); if (child instanceof LeafCNode.EnumLeaf) { if (child.isArray) { w.write(indent + "std::vector<vespalib::string> " + childName + "__ValueList(\n "); } else if (child.isMap) { w.write(indent + "std::map<vespalib::string, vespalib::string> " + childName + "__ValueMap(\n "); } else { w.write(indent + childName + " = get" + childType + "("); } childType = "vespalib::string"; } else { w.write(indent + childName + " = "); } if (child.isArray) { w.write("ConfigParser::parseArray<" + childType + ">(\"" + child.getName() + "\", __lines)"); } else if (child.isMap) { w.write("ConfigParser::parseMap<" + childType + ">(\"" + child.getName() + "\", __lines)"); } else { if (child instanceof LeafCNode) { w.write("ConfigParser::parse<" + childType + ">(\"" + child.getName() + "\", __lines"); } else { w.write("ConfigParser::parseStruct<" + childType + ">(\"" + child.getName() + "\", __lines"); } if (child instanceof LeafCNode && ((LeafCNode) child).getDefaultValue() != null) { LeafCNode leaf = (LeafCNode) child; if (leaf.getDefaultValue().getValue() != null) { String defaultVal = getDefaultValue(leaf); if (leaf instanceof LeafCNode.EnumLeaf) { defaultVal = '"' + defaultVal + '"'; } w.write(", " + defaultVal); } } w.write(")"); } if (child instanceof LeafCNode.EnumLeaf) { childType = getTypeName(child, false); w.write(");\n"); if (child.isArray) { w.write(indent + childName + ".reserve(" + childName + "__ValueList.size());\n" + indent + "for (std::vector<vespalib::string>::const_iterator __it\n" + indent + " = " + childName + "__ValueList.begin();\n" + indent + " __it != " + childName + "__ValueList.end(); ++__it)\n" + indent + "{\n" + indent + " " + childName + ".push_back(get" + childType + "(*__it));\n" + indent + "}\n" ); } else if (child.isMap) { w.write(indent + "typedef std::map<vespalib::string, vespalib::string> __ValueMap;\n"); w.write(indent + "for (__ValueMap::iterator __it(" + childName + "__ValueMap.begin()), __mt(" + childName + "__ValueMap.end()); __it != __mt; __it++) {\n" + " " + childName + "[__it->first] = get" + childType + "(__it->second);\n" + "}\n" ); } } else { w.write(";\n"); } w.write(indent + "ConfigParser::stripLinesForKey(\"" + child.getName() + "\", " + "__remainingValuesToParse);\n"); } if (root) { indent = " "; w.write(indent + "} catch (InvalidConfigException & __ice) {\n"); w.write(indent + " throw InvalidConfigException(\"Error parsing config '\" + CONFIG_DEF_NAME + \"' in namespace '\" + CONFIG_DEF_NAMESPACE + \"'" + ": \" + __ice.getMessage());\n" + indent + "}\n"); } w.write("}\n" + "\n" ); // Write operator== String lineBreak = (parent.length() + typeName.length() < 50 ? "" : "\n"); w.write("bool\n" + parent + lineBreak + "operator==(const " + typeName + "& __rhs) const\n" + "{\n" + " return (" ); for (int i = 0; i<node.getChildren().length; ++i) { CNode child = node.getChildren()[i]; String childName = getIdentifier(child.getName()); if (i != 0) { w.write(" &&\n "); } w.write(childName + " == __rhs." + childName); } w.write(");\n" + "}\n" + "\n" ); // Write operator!= lineBreak = (parent.length() + typeName.length() < 50 ? "" : "\n"); w.write("bool\n" + parent + lineBreak + "operator!=(const " + typeName + "& __rhs) const\n" + "{\n" + " return !(operator==(__rhs));\n" + "}\n" + "\n" ); writeSlimeEncoder(w, node, parent, root); writeSlimeDecoder(w, node, parent, root); writeSlimeConstructor(w, node, parent, root); } public void writeSlimeEncoder(Writer w, CNode node, String parent, boolean root) throws IOException { String indent = " "; if (root) { w.write("void\n" + parent + "serialize(::config::ConfigDataBuffer & __buffer) const\n" + "{\n"); w.write(indent + "vespalib::Slime & __slime(__buffer.slimeObject());\n"); w.write(indent + "vespalib::slime::Cursor & __croot = __slime.setObject();\n"); w.write(indent + "__croot.setDouble(\"version\", CONFIG_DEF_SERIALIZE_VERSION);\n"); w.write(indent + "vespalib::slime::Cursor & __key = __croot.setObject(\"configKey\");\n"); w.write(indent + "__key.setString(\"defName\", vespalib::Memory(CONFIG_DEF_NAME));\n"); w.write(indent + "__key.setString(\"defNamespace\", vespalib::Memory(CONFIG_DEF_NAMESPACE));\n"); w.write(indent + "__key.setString(\"defMd5\", vespalib::Memory(CONFIG_DEF_MD5));\n"); w.write(indent + "vespalib::slime::Cursor & __keySchema =__key.setArray(\"defSchema\");\n"); w.write(indent + "for (size_t i = 0; i < CONFIG_DEF_SCHEMA.size(); i++) {\n"); w.write(indent + " __keySchema.addString(vespalib::Memory(CONFIG_DEF_SCHEMA[i]));\n"); w.write(indent + "}\n"); w.write(indent + "vespalib::slime::Cursor & __cursor = __croot.setObject(\"configPayload\");\n"); } else { w.write("void\n" + parent + "serialize(vespalib::slime::Cursor & __cursor) const\n" + "{\n"); } for (CNode child : node.getChildren()) { String childName = getIdentifier(child.getName()); String childType = getTypeName(child, false); w.write(indent + "{\n"); indent = " "; w.write(indent + "vespalib::slime::Cursor & __c = __cursor.setObject(\"" + child.getName() + "\");\n"); if (child.isArray) { w.write(indent + "__c.setString(\"type\", \"array\");\n"); w.write(indent + "vespalib::slime::Cursor & __c2 = __c.setArray(\"value\");\n"); w.write(indent + "for (size_t __i = 0; __i < " + childName + ".size(); __i++) {\n"); w.write(indent + " vespalib::slime::Cursor & __c3 = __c2.addObject();\n"); if (child instanceof LeafCNode.EnumLeaf) { String repType = slimeTypeMap.get("enum"); w.write(indent + " __c3.setString(\"type\", \"enum\");\n"); w.write(indent + " __c3.set" + repType); w.write("(\"value\", vespalib::Memory(get" + childType + "Name(" + childName + "[__i])));\n"); } else if (child instanceof LeafCNode) { String type = ((LeafCNode) child).getType(); String repType = slimeTypeMap.get(type); w.write(indent + " __c3.setString(\"type\", \"" + type + "\");\n"); w.write(indent + " __c3.set" + repType); if ("String".equals(repType)) { w.write("(\"value\", vespalib::Memory(" + childName + "[__i]));\n"); } else { w.write("(\"value\", " + childName + "[__i]);\n"); } } else { w.write(indent + " __c3.setString(\"type\", \"struct\");\n"); w.write(indent + " Cursor & __c4 = __c3.setObject(\"value\");\n"); w.write(indent + " " + childName + "[__i].serialize(__c4);\n"); } w.write(indent + "}\n"); } else if (child.isMap) { w.write(indent + "__c.setString(\"type\", \"map\");\n"); w.write(indent + "vespalib::slime::Cursor & __c2 = __c.setArray(\"value\");\n"); String childMapType = getTypeName(child, true); w.write(indent + "for (" + childMapType + "::const_iterator it(" + childName + ".begin()), mt(" + childName + ".end()); it != mt; it++) {\n"); w.write(indent + " vespalib::slime::Cursor & __c3 = __c2.addObject();\n"); w.write(indent + " __c3.setString(\"key\", vespalib::Memory(it->first));\n"); if (child instanceof LeafCNode.EnumLeaf) { String repType = slimeTypeMap.get("enum"); w.write(indent + " __c3.setString(\"type\", \"enum\");\n"); w.write(indent + " __c3.set" + repType); w.write("(\"value\", vespalib::Memory(get" + childType + "Name(it->second)));\n"); } else if (child instanceof LeafCNode) { String type = ((LeafCNode) child).getType(); String repType = slimeTypeMap.get(type); w.write(indent + " __c3.setString(\"type\", \"" + type + "\");\n"); w.write(indent + " __c3.set" + repType); if ("String".equals(repType)) { w.write("(\"value\", vespalib::Memory(it->second));\n"); } else { w.write("(\"value\", it->second);\n"); } } else { w.write(indent + " __c3.setString(\"type\", \"struct\");\n"); w.write(indent + " Cursor & __c4 = __c3.setObject(\"value\");\n"); w.write(indent + " it->second.serialize(__c4);\n"); } w.write(indent + "}\n"); } else { if (child instanceof LeafCNode.EnumLeaf) { String repType = slimeTypeMap.get("enum"); w.write(indent + "__c.setString(\"type\", \"enum\");\n"); w.write(indent + "__c.set" + repType); w.write("(\"value\", vespalib::Memory(get" + childType + "Name(" + childName + ")));\n"); } else if (child instanceof LeafCNode) { String type = ((LeafCNode) child).getType(); String repType = slimeTypeMap.get(type); w.write(indent + "__c.setString(\"type\", \"" + type + "\");\n"); w.write(indent + "__c.set" + repType); if ("String".equals(repType)) { w.write("(\"value\", vespalib::Memory(" + childName + "));\n"); } else { w.write("(\"value\", " + childName + ");\n"); } } else { w.write(indent + "__c.setString(\"type\", \"struct\");\n"); w.write(indent + "Cursor & __c2 = __c.setObject(\"value\");\n"); w.write(indent + childName + ".serialize(__c2);\n"); } } indent = " "; w.write(indent + "}\n"); } w.write("}\n\n"); } public void writeSlimeDecoder(Writer w, CNode node, String parent, boolean root) throws IOException { String tmpName = getTypeName(node, false); String typeName = root ? getInternalClassName(node) : tmpName; String indent = " "; if (root) { w.write("" + typeName + "::" + typeName + "(const ::config::ConfigDataBuffer & __buffer)\n" + "{\n"); w.write(indent + "const vespalib::Slime & __slime(__buffer.slimeObject());\n"); w.write(indent + "vespalib::slime::Inspector & __croot = __slime.get();\n"); w.write(indent + "vespalib::slime::Inspector & __inspector = __croot[\"configPayload\"];\n"); } else { w.write("" + parent + typeName + "(const vespalib::slime::Inspector & __inspector)\n" + "{\n"); } for (CNode child : node.getChildren()) { String childName = getIdentifier(child.getName()); String childType = getTypeName(child, false); String inspectorLine = "__inspector[\"" + child.getName() + "\"][\"value\"]"; if (child.isArray) { w.write(indent + "for (size_t __i = 0; __i < " + inspectorLine + ".children(); __i++) {\n"); w.write(indent + " " + childName + ".push_back("); if (child instanceof LeafCNode.EnumLeaf) { String repType = slimeTypeMap.get("enum"); w.write("get" + childType + "(" + inspectorLine + "[__i][\"value\"].as" + repType + "().make_string())"); } else if (child instanceof LeafCNode) { String type = ((LeafCNode) child).getType(); String repType = slimeTypeMap.get(type); if ("String".equals(repType)) { w.write("" + inspectorLine + "[__i][\"value\"].as" + repType + "().make_string()"); } else { w.write("" + inspectorLine + "[__i][\"value\"].as" + repType + "()"); } } else { w.write(childType + "(" + inspectorLine + "[__i][\"value\"])"); } w.write(");\n"); w.write(indent + "}\n"); } else if (child.isMap) { w.write(indent + "for (size_t __i = 0; __i < " + inspectorLine + ".children(); __i++) {\n"); w.write(indent + " " + childName + "[" + inspectorLine + "[__i][\"key\"].asString().make_string()] = "); if (child instanceof LeafCNode.EnumLeaf) { String repType = slimeTypeMap.get("enum"); w.write("get" + childType + "(" + inspectorLine + "[__i][\"value\"].as" + repType + "().make_string())"); } else if (child instanceof LeafCNode) { String type = ((LeafCNode) child).getType(); String repType = slimeTypeMap.get(type); if ("String".equals(repType)) { w.write("" + inspectorLine + "[__i][\"value\"].as" + repType + "().make_string()"); } else { w.write("" + inspectorLine + "[__i][\"value\"].as" + repType + "()"); } } else { w.write(childType + "(" + inspectorLine + "[__i][\"value\"])"); } w.write(";\n"); w.write(indent + "}\n"); } else { w.write(indent + childName + " = "); if (child instanceof LeafCNode.EnumLeaf) { String repType = slimeTypeMap.get("enum"); w.write("get" + childType + "(" + inspectorLine + ".as" + repType + "().make_string())"); } else if (child instanceof LeafCNode) { String type = ((LeafCNode) child).getType(); String repType = slimeTypeMap.get(type); if ("String".equals(repType)) { w.write("" + inspectorLine + ".as" + repType + "().make_string()"); } else { w.write("" + inspectorLine + ".as" + repType + "()"); } } else { w.write(childType + "(" + inspectorLine + ")"); } w.write(";\n"); } } w.write("}\n\n"); } public void writeSlimeConstructor(Writer w, CNode node, String parent, boolean root) throws IOException { String tmpName = getTypeName(node, false); String typeName = root ? getInternalClassName(node) : tmpName; String indent = " "; if (root) { w.write("" + typeName + "::" + typeName + "(const ::config::ConfigPayload & __payload)\n" + "{\n"); } else { w.write("" + parent + typeName + "(const ::config::ConfigPayload & __payload)\n" + "{\n"); } w.write(indent + "const vespalib::slime::Inspector & __inspector(__payload.get());\n"); for (CNode child : node.getChildren()) { String childName = getIdentifier(child.getName()); String childType = getTypeName(child, false); String childInspector = "__inspector[\"" + child.getName() + "\"]"; if (child.isArray) { String inserterName = "__" + childName + "Inserter"; w.write(indent + "::config::internal::VectorInserter<" + childType); if (child instanceof LeafCNode.EnumLeaf) { w.write(", Internal" + childType + "Converter"); } w.write("> " + inserterName + "(" + childName + ");\n"); w.write(indent + childInspector + ".traverse(" + inserterName + ");\n"); } else if (child.isMap) { String inserterName = "__" + childName + "Inserter"; w.write(indent + "::config::internal::MapInserter<" + childType); if (child instanceof LeafCNode.EnumLeaf) { w.write(", Internal" + childType + "Converter"); } w.write("> " + inserterName + "(" + childName + ");\n"); w.write(indent + childInspector + ".traverse(" + inserterName + ");\n"); } else { w.write(indent + childName + " = "); if (child instanceof LeafCNode.EnumLeaf) { w.write("Internal" + childType + "Converter"); } else { w.write("::config::internal::ValueConverter<" + childType + ">"); } if (child instanceof LeafCNode && ((LeafCNode) child).getDefaultValue() != null) { LeafCNode leaf = (LeafCNode) child; String defaultValue = getDefaultValue(leaf); w.write("()(" + childInspector + ", " + defaultValue + ");\n"); } else if (child instanceof InnerCNode) { w.write("()(" + childInspector + ");\n"); } else { w.write("()(\"" + child.getName() + "\", " + childInspector + ");\n"); } } } w.write("}\n\n"); } void writeBodyFooter(Writer w, CNode root) throws IOException { w.write("} // namespace internal\n\n"); writeNameSpaceEnd(w, generateCppNameSpace(root)); } String getDefaultValue(LeafCNode leaf) { String defaultVal = leaf.getDefaultValue().getStringRepresentation(); if (leaf.getType().equals("string") && defaultVal.equals("null")) throw new CodegenRuntimeException("Default value null not allowed for C++ config"); if (leaf.getType().equals("long") && "-9223372036854775808".equals(defaultVal)) { return "LONG_MIN"; } else if (leaf.getType().equals("int") && "-2147483648".equals(defaultVal)) { return "INT_MIN"; } else { return defaultVal; } } }
Remove unused constant
configgen/src/main/java/com/yahoo/config/codegen/CppClassBuilder.java
Remove unused constant
Java
apache-2.0
4f860a4f9e8b889b7ae4afb3a85870a7d334fce1
0
ShortMap/ShortMap,aseldawy/spatialhadoop,aseldawy/spatialhadoop,aseldawy/spatialhadoop,ShortMap/ShortMap,ShortMap/ShortMap,aseldawy/spatialhadoop,aseldawy/spatialhadoop,ShortMap/ShortMap,aseldawy/spatialhadoop,aseldawy/spatialhadoop,ShortMap/ShortMap,aseldawy/spatialhadoop,ShortMap/ShortMap,ShortMap/ShortMap,ShortMap/ShortMap,ShortMap/ShortMap,aseldawy/spatialhadoop
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import static org.apache.hadoop.mapred.Task.Counter.COMBINE_INPUT_RECORDS; import static org.apache.hadoop.mapred.Task.Counter.COMBINE_OUTPUT_RECORDS; import static org.apache.hadoop.mapred.Task.Counter.MAP_INPUT_BYTES; import static org.apache.hadoop.mapred.Task.Counter.MAP_INPUT_RECORDS; import static org.apache.hadoop.mapred.Task.Counter.MAP_OUTPUT_BYTES; import static org.apache.hadoop.mapred.Task.Counter.MAP_OUTPUT_MATERIALIZED_BYTES; import static org.apache.hadoop.mapred.Task.Counter.MAP_OUTPUT_RECORDS; import java.io.DataInput; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem.Statistics; import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.io.serializer.Deserializer; import org.apache.hadoop.io.serializer.SerializationFactory; import org.apache.hadoop.io.serializer.Serializer; import org.apache.hadoop.mapred.IFile.Writer; import org.apache.hadoop.mapred.Merger.Segment; import org.apache.hadoop.mapred.SortedRanges.SkipRangeIterator; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapreduce.split.JobSplit; import org.apache.hadoop.mapreduce.split.JobSplit.SplitMetaInfo; import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitIndex; import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.IndexedSortable; import org.apache.hadoop.util.IndexedSorter; import org.apache.hadoop.util.Progress; import org.apache.hadoop.util.QuickSort; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; /** A Map task. */ class MapTask extends Task { /** * The size of each record in the index file for the map-outputs. */ public static final int MAP_OUTPUT_INDEX_RECORD_LENGTH = 24; private TaskSplitIndex splitMetaInfo = new TaskSplitIndex(); private final static int APPROX_HEADER_LENGTH = 150; private static final Log LOG = LogFactory.getLog(MapTask.class.getName()); { // set phase for this task setPhase(TaskStatus.Phase.MAP); } public MapTask() { super(); } public MapTask(String jobFile, TaskAttemptID taskId, int partition, TaskSplitIndex splitIndex, int numSlotsRequired) { super(jobFile, taskId, partition, numSlotsRequired); this.splitMetaInfo = splitIndex; } @Override public boolean isMapTask() { return true; } @Override public void localizeConfiguration(JobConf conf) throws IOException { super.localizeConfiguration(conf); // split.info file is used only by IsolationRunner. // Write the split file to the local disk if it is a normal map task (not a // job-setup or a job-cleanup task) and if the user wishes to run // IsolationRunner either by setting keep.failed.tasks.files to true or by // using keep.tasks.files.pattern if (supportIsolationRunner(conf) && isMapOrReduce()) { // localize the split meta-information Path localSplitMeta = new LocalDirAllocator("mapred.local.dir").getLocalPathForWrite( TaskTracker.getLocalSplitFile(conf.getUser(), getJobID() .toString(), getTaskID().toString()), conf); LOG.debug("Writing local split to " + localSplitMeta); DataOutputStream out = FileSystem.getLocal(conf).create(localSplitMeta); splitMetaInfo.write(out); out.close(); } } @Override public TaskRunner createRunner(TaskTracker tracker, TaskTracker.TaskInProgress tip, TaskTracker.RunningJob rjob ) throws IOException { return new MapTaskRunner(tip, tracker, this.conf, rjob); } @Override public void write(DataOutput out) throws IOException { super.write(out); if (isMapOrReduce()) { if (splitMetaInfo != null) { splitMetaInfo.write(out); } else { new TaskSplitIndex().write(out); } //TODO do we really need to set this to null? splitMetaInfo = null; } } @Override public void readFields(DataInput in) throws IOException { super.readFields(in); if (isMapOrReduce()) { splitMetaInfo.readFields(in); } } /** * This class wraps the user's record reader to update the counters and * progress as records are read. * @param <K> * @param <V> */ class TrackedRecordReader<K, V> implements RecordReader<K,V> { private RecordReader<K,V> rawIn; private Counters.Counter inputByteCounter; private Counters.Counter inputRecordCounter; private Counters.Counter fileInputByteCounter; private InputSplit split; private TaskReporter reporter; private long beforePos = -1; private long afterPos = -1; private long bytesInPrev = -1; private long bytesInCurr = -1; private final Statistics fsStats; TrackedRecordReader(InputSplit split, JobConf job, TaskReporter reporter) throws IOException { inputRecordCounter = reporter.getCounter(MAP_INPUT_RECORDS); inputByteCounter = reporter.getCounter(MAP_INPUT_BYTES); fileInputByteCounter = reporter .getCounter(FileInputFormat.Counter.BYTES_READ); Statistics matchedStats = null; if (split instanceof FileSplit) { matchedStats = getFsStatistics(((FileSplit) split).getPath(), job); } fsStats = matchedStats; bytesInPrev = getInputBytes(fsStats); rawIn = job.getInputFormat().getRecordReader(split, job, reporter); bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); this.reporter = reporter; this.split = split; conf = job; } public K createKey() { return rawIn.createKey(); } public V createValue() { return rawIn.createValue(); } public synchronized boolean next(K key, V value) throws IOException { boolean ret = moveToNext(key, value); if (ret) { incrCounters(); } return ret; } protected void incrCounters() { inputRecordCounter.increment(1); inputByteCounter.increment(afterPos - beforePos); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); } protected synchronized boolean moveToNext(K key, V value) throws IOException { boolean ret = false; try { reporter.setProgress(getProgress()); beforePos = getPos(); bytesInPrev = getInputBytes(fsStats); ret = rawIn.next(key, value); afterPos = getPos(); bytesInCurr = getInputBytes(fsStats); } catch (IOException ioe) { if (split instanceof FileSplit) { LOG.error("IO error in map input file " + conf.get("map.input.file")); throw new IOException("IO error in map input file " + conf.get("map.input.file"), ioe); } throw ioe; } return ret; } public long getPos() throws IOException { return rawIn.getPos(); } public void close() throws IOException { bytesInPrev = getInputBytes(fsStats); rawIn.close(); bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); } public float getProgress() throws IOException { return rawIn.getProgress(); } TaskReporter getTaskReporter() { return reporter; } private long getInputBytes(Statistics stats) { return stats == null ? 0 : stats.getBytesRead(); } } /** * This class skips the records based on the failed ranges from previous * attempts. */ class SkippingRecordReader<K, V> extends TrackedRecordReader<K,V> { private SkipRangeIterator skipIt; private SequenceFile.Writer skipWriter; private boolean toWriteSkipRecs; private TaskUmbilicalProtocol umbilical; private Counters.Counter skipRecCounter; private long recIndex = -1; SkippingRecordReader(InputSplit split, TaskUmbilicalProtocol umbilical, TaskReporter reporter) throws IOException { super(split, conf, reporter); this.umbilical = umbilical; this.skipRecCounter = reporter.getCounter(Counter.MAP_SKIPPED_RECORDS); this.toWriteSkipRecs = toWriteSkipRecs() && SkipBadRecords.getSkipOutputPath(conf)!=null; skipIt = getSkipRanges().skipRangeIterator(); } public synchronized boolean next(K key, V value) throws IOException { if(!skipIt.hasNext()) { LOG.warn("Further records got skipped."); return false; } boolean ret = moveToNext(key, value); long nextRecIndex = skipIt.next(); long skip = 0; while(recIndex<nextRecIndex && ret) { if(toWriteSkipRecs) { writeSkippedRec(key, value); } ret = moveToNext(key, value); skip++; } //close the skip writer once all the ranges are skipped if(skip>0 && skipIt.skippedAllRanges() && skipWriter!=null) { skipWriter.close(); } skipRecCounter.increment(skip); reportNextRecordRange(umbilical, recIndex); if (ret) { incrCounters(); } return ret; } protected synchronized boolean moveToNext(K key, V value) throws IOException { recIndex++; return super.moveToNext(key, value); } @SuppressWarnings("unchecked") private void writeSkippedRec(K key, V value) throws IOException{ if(skipWriter==null) { Path skipDir = SkipBadRecords.getSkipOutputPath(conf); Path skipFile = new Path(skipDir, getTaskID().toString()); skipWriter = SequenceFile.createWriter( skipFile.getFileSystem(conf), conf, skipFile, (Class<K>) createKey().getClass(), (Class<V>) createValue().getClass(), CompressionType.BLOCK, getTaskReporter()); } skipWriter.append(key, value); } } @Override public void run(final JobConf job, final TaskUmbilicalProtocol umbilical) throws IOException, ClassNotFoundException, InterruptedException { this.umbilical = umbilical; // start thread that will handle communication with parent TaskReporter reporter = new TaskReporter(getProgress(), umbilical); reporter.startCommunicationThread(); boolean useNewApi = job.getUseNewMapper(); initialize(job, getJobID(), reporter, useNewApi); // check if it is a cleanupJobTask if (jobCleanup) { runJobCleanupTask(umbilical, reporter); return; } if (jobSetup) { runJobSetupTask(umbilical, reporter); return; } if (taskCleanup) { runTaskCleanupTask(umbilical, reporter); return; } if (useNewApi) { runNewMapper(job, splitMetaInfo, umbilical, reporter); } else { runOldMapper(job, splitMetaInfo, umbilical, reporter); } done(umbilical, reporter); } @SuppressWarnings("unchecked") private <T> T getSplitDetails(Path file, long offset) throws IOException { FileSystem fs = file.getFileSystem(conf); FSDataInputStream inFile = fs.open(file); inFile.seek(offset); String className = Text.readString(inFile); Class<T> cls; try { cls = (Class<T>) conf.getClassByName(className); } catch (ClassNotFoundException ce) { IOException wrap = new IOException("Split class " + className + " not found"); wrap.initCause(ce); throw wrap; } SerializationFactory factory = new SerializationFactory(conf); Deserializer<T> deserializer = (Deserializer<T>) factory.getDeserializer(cls); deserializer.open(inFile); T split = deserializer.deserialize(null); long pos = inFile.getPos(); getCounters().findCounter( Task.Counter.SPLIT_RAW_BYTES).increment(pos - offset); inFile.close(); return split; } @SuppressWarnings("unchecked") private <INKEY,INVALUE,OUTKEY,OUTVALUE> void runOldMapper(final JobConf job, final TaskSplitIndex splitIndex, final TaskUmbilicalProtocol umbilical, TaskReporter reporter ) throws IOException, InterruptedException, ClassNotFoundException { InputSplit inputSplit = getSplitDetails(new Path(splitIndex.getSplitLocation()), splitIndex.getStartOffset()); updateJobWithSplit(job, inputSplit); reporter.setInputSplit(inputSplit); RecordReader<INKEY,INVALUE> in = isSkipping() ? new SkippingRecordReader<INKEY,INVALUE>(inputSplit, umbilical, reporter) : new TrackedRecordReader<INKEY,INVALUE>(inputSplit, job, reporter); job.setBoolean("mapred.skip.on", isSkipping()); int numReduceTasks = conf.getNumReduceTasks(); LOG.info("numReduceTasks: " + numReduceTasks); MapOutputCollector collector = null; if (numReduceTasks > 0) { collector = new MapOutputBuffer(umbilical, job, reporter); } else { collector = new DirectMapOutputCollector(umbilical, job, reporter); } MapRunnable<INKEY,INVALUE,OUTKEY,OUTVALUE> runner = ReflectionUtils.newInstance(job.getMapRunnerClass(), job); try { runner.run(in, new OldOutputCollector(collector, conf), reporter); collector.flush(); } finally { //close in.close(); // close input collector.close(); } } /** * Update the job with details about the file split * @param job the job configuration to update * @param inputSplit the file split */ private void updateJobWithSplit(final JobConf job, InputSplit inputSplit) { if (inputSplit instanceof FileSplit) { FileSplit fileSplit = (FileSplit) inputSplit; job.set("map.input.file", fileSplit.getPath().toString()); job.setLong("map.input.start", fileSplit.getStart()); job.setLong("map.input.length", fileSplit.getLength()); } } static class NewTrackingRecordReader<K,V> extends org.apache.hadoop.mapreduce.RecordReader<K,V> { private final org.apache.hadoop.mapreduce.RecordReader<K,V> real; private final org.apache.hadoop.mapreduce.Counter inputRecordCounter; private final org.apache.hadoop.mapreduce.Counter fileInputByteCounter; private final TaskReporter reporter; private org.apache.hadoop.mapreduce.InputSplit inputSplit; private final JobConf job; private final Statistics fsStats; NewTrackingRecordReader(org.apache.hadoop.mapreduce.InputSplit split, org.apache.hadoop.mapreduce.InputFormat inputFormat, TaskReporter reporter, JobConf job, org.apache.hadoop.mapreduce.TaskAttemptContext taskContext) throws IOException, InterruptedException { this.reporter = reporter; this.inputSplit = split; this.job = job; this.inputRecordCounter = reporter.getCounter(MAP_INPUT_RECORDS); this.fileInputByteCounter = reporter .getCounter(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.Counter.BYTES_READ); Statistics matchedStats = null; if (split instanceof org.apache.hadoop.mapreduce.lib.input.FileSplit) { matchedStats = getFsStatistics(((org.apache.hadoop.mapreduce.lib.input.FileSplit) split) .getPath(), job); } fsStats = matchedStats; long bytesInPrev = getInputBytes(fsStats); this.real = inputFormat.createRecordReader(split, taskContext); long bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); } @Override public void close() throws IOException { long bytesInPrev = getInputBytes(fsStats); real.close(); long bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); } @Override public K getCurrentKey() throws IOException, InterruptedException { return real.getCurrentKey(); } @Override public V getCurrentValue() throws IOException, InterruptedException { return real.getCurrentValue(); } @Override public float getProgress() throws IOException, InterruptedException { return real.getProgress(); } @Override public void initialize(org.apache.hadoop.mapreduce.InputSplit split, org.apache.hadoop.mapreduce.TaskAttemptContext context ) throws IOException, InterruptedException { long bytesInPrev = getInputBytes(fsStats); real.initialize(split, context); long bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); } @Override public boolean nextKeyValue() throws IOException, InterruptedException { boolean result = false; try { long bytesInPrev = getInputBytes(fsStats); result = real.nextKeyValue(); long bytesInCurr = getInputBytes(fsStats); if (result) { inputRecordCounter.increment(1); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); } reporter.setProgress(getProgress()); } catch (IOException ioe) { if (inputSplit instanceof FileSplit) { FileSplit fileSplit = (FileSplit) inputSplit; LOG.error("IO error in map input file " + fileSplit.getPath().toString()); throw new IOException("IO error in map input file " + fileSplit.getPath().toString(), ioe); } throw ioe; } return result; } private long getInputBytes(Statistics stats) { return stats == null ? 0 : stats.getBytesRead(); } } /** * Since the mapred and mapreduce Partitioners don't share a common interface * (JobConfigurable is deprecated and a subtype of mapred.Partitioner), the * partitioner lives in Old/NewOutputCollector. Note that, for map-only jobs, * the configured partitioner should not be called. It's common for * partitioners to compute a result mod numReduces, which causes a div0 error */ private static class OldOutputCollector<K,V> implements OutputCollector<K,V> { private final Partitioner<K,V> partitioner; private final MapOutputCollector<K,V> collector; private final int numPartitions; @SuppressWarnings("unchecked") OldOutputCollector(MapOutputCollector<K,V> collector, JobConf conf) { numPartitions = conf.getNumReduceTasks(); if (numPartitions > 0) { partitioner = (Partitioner<K,V>) ReflectionUtils.newInstance(conf.getPartitionerClass(), conf); } else { partitioner = new Partitioner<K,V>() { @Override public void configure(JobConf job) { } @Override public int getPartition(K key, V value, int numPartitions) { return -1; } }; } this.collector = collector; } @Override public void collect(K key, V value) throws IOException { try { collector.collect(key, value, partitioner.getPartition(key, value, numPartitions)); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); throw new IOException("interrupt exception", ie); } } } private class NewDirectOutputCollector<K,V> extends org.apache.hadoop.mapreduce.RecordWriter<K,V> { private final org.apache.hadoop.mapreduce.RecordWriter out; private final TaskReporter reporter; private final Counters.Counter mapOutputRecordCounter; private final Counters.Counter fileOutputByteCounter; private final Statistics fsStats; @SuppressWarnings("unchecked") NewDirectOutputCollector(org.apache.hadoop.mapreduce.JobContext jobContext, JobConf job, TaskUmbilicalProtocol umbilical, TaskReporter reporter) throws IOException, ClassNotFoundException, InterruptedException { this.reporter = reporter; Statistics matchedStats = null; if (outputFormat instanceof org.apache.hadoop.mapreduce.lib.output.FileOutputFormat) { matchedStats = getFsStatistics(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat .getOutputPath(jobContext), job); } fsStats = matchedStats; mapOutputRecordCounter = reporter.getCounter(MAP_OUTPUT_RECORDS); fileOutputByteCounter = reporter .getCounter(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.Counter.BYTES_WRITTEN); long bytesOutPrev = getOutputBytes(fsStats); out = outputFormat.getRecordWriter(taskContext); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); } @Override @SuppressWarnings("unchecked") public void write(K key, V value) throws IOException, InterruptedException { reporter.progress(); long bytesOutPrev = getOutputBytes(fsStats); out.write(key, value); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); mapOutputRecordCounter.increment(1); } @Override public void close(TaskAttemptContext context) throws IOException,InterruptedException { reporter.progress(); if (out != null) { long bytesOutPrev = getOutputBytes(fsStats); out.close(context); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); } } private long getOutputBytes(Statistics stats) { return stats == null ? 0 : stats.getBytesWritten(); } } private class NewOutputCollector<K,V> extends org.apache.hadoop.mapreduce.RecordWriter<K,V> { private final MapOutputCollector<K,V> collector; private final org.apache.hadoop.mapreduce.Partitioner<K,V> partitioner; private final int partitions; @SuppressWarnings("unchecked") NewOutputCollector(org.apache.hadoop.mapreduce.JobContext jobContext, JobConf job, TaskUmbilicalProtocol umbilical, TaskReporter reporter ) throws IOException, ClassNotFoundException { collector = new MapOutputBuffer<K,V>(umbilical, job, reporter); partitions = jobContext.getNumReduceTasks(); if (partitions > 0) { partitioner = (org.apache.hadoop.mapreduce.Partitioner<K,V>) ReflectionUtils.newInstance(jobContext.getPartitionerClass(), job); } else { partitioner = new org.apache.hadoop.mapreduce.Partitioner<K,V>() { @Override public int getPartition(K key, V value, int numPartitions) { return -1; } }; } } @Override public void write(K key, V value) throws IOException, InterruptedException { collector.collect(key, value, partitioner.getPartition(key, value, partitions)); } @Override public void close(TaskAttemptContext context ) throws IOException,InterruptedException { try { collector.flush(); } catch (ClassNotFoundException cnf) { throw new IOException("can't find class ", cnf); } collector.close(); } } @SuppressWarnings("unchecked") private <INKEY,INVALUE,OUTKEY,OUTVALUE> void runNewMapper(final JobConf job, final TaskSplitIndex splitIndex, final TaskUmbilicalProtocol umbilical, TaskReporter reporter ) throws IOException, ClassNotFoundException, InterruptedException { // make a task context so we can get the classes org.apache.hadoop.mapreduce.TaskAttemptContext taskContext = new org.apache.hadoop.mapreduce.TaskAttemptContext(job, getTaskID()); // make a mapper org.apache.hadoop.mapreduce.Mapper<INKEY,INVALUE,OUTKEY,OUTVALUE> mapper = (org.apache.hadoop.mapreduce.Mapper<INKEY,INVALUE,OUTKEY,OUTVALUE>) ReflectionUtils.newInstance(taskContext.getMapperClass(), job); // make the input format org.apache.hadoop.mapreduce.InputFormat<INKEY,INVALUE> inputFormat = (org.apache.hadoop.mapreduce.InputFormat<INKEY,INVALUE>) ReflectionUtils.newInstance(taskContext.getInputFormatClass(), job); // rebuild the input split org.apache.hadoop.mapreduce.InputSplit split = null; split = getSplitDetails(new Path(splitIndex.getSplitLocation()), splitIndex.getStartOffset()); org.apache.hadoop.mapreduce.RecordReader<INKEY,INVALUE> input = new NewTrackingRecordReader<INKEY,INVALUE> (split, inputFormat, reporter, job, taskContext); job.setBoolean("mapred.skip.on", isSkipping()); org.apache.hadoop.mapreduce.RecordWriter output = null; org.apache.hadoop.mapreduce.Mapper<INKEY,INVALUE,OUTKEY,OUTVALUE>.Context mapperContext = null; try { Constructor<org.apache.hadoop.mapreduce.Mapper.Context> contextConstructor = org.apache.hadoop.mapreduce.Mapper.Context.class.getConstructor (new Class[]{org.apache.hadoop.mapreduce.Mapper.class, Configuration.class, org.apache.hadoop.mapreduce.TaskAttemptID.class, org.apache.hadoop.mapreduce.RecordReader.class, org.apache.hadoop.mapreduce.RecordWriter.class, org.apache.hadoop.mapreduce.OutputCommitter.class, org.apache.hadoop.mapreduce.StatusReporter.class, org.apache.hadoop.mapreduce.InputSplit.class}); // get an output object if (job.getNumReduceTasks() == 0) { output = new NewDirectOutputCollector(taskContext, job, umbilical, reporter); } else { output = new NewOutputCollector(taskContext, job, umbilical, reporter); } mapperContext = contextConstructor.newInstance(mapper, job, getTaskID(), input, output, committer, reporter, split); input.initialize(split, mapperContext); mapper.run(mapperContext); input.close(); output.close(mapperContext); } catch (NoSuchMethodException e) { throw new IOException("Can't find Context constructor", e); } catch (InstantiationException e) { throw new IOException("Can't create Context", e); } catch (InvocationTargetException e) { throw new IOException("Can't invoke Context constructor", e); } catch (IllegalAccessException e) { throw new IOException("Can't invoke Context constructor", e); } } interface MapOutputCollector<K, V> { public void collect(K key, V value, int partition ) throws IOException, InterruptedException; public void close() throws IOException, InterruptedException; public void flush() throws IOException, InterruptedException, ClassNotFoundException; } class DirectMapOutputCollector<K, V> implements MapOutputCollector<K, V> { private RecordWriter<K, V> out = null; private TaskReporter reporter = null; private final Counters.Counter mapOutputRecordCounter; private final Counters.Counter fileOutputByteCounter; private final Statistics fsStats; @SuppressWarnings("unchecked") public DirectMapOutputCollector(TaskUmbilicalProtocol umbilical, JobConf job, TaskReporter reporter) throws IOException { this.reporter = reporter; String finalName = getOutputName(getPartition()); FileSystem fs = FileSystem.get(job); OutputFormat<K, V> outputFormat = job.getOutputFormat(); Statistics matchedStats = null; if (outputFormat instanceof FileOutputFormat) { matchedStats = getFsStatistics(FileOutputFormat.getOutputPath(job), job); } fsStats = matchedStats; mapOutputRecordCounter = reporter.getCounter(MAP_OUTPUT_RECORDS); fileOutputByteCounter = reporter .getCounter(FileOutputFormat.Counter.BYTES_WRITTEN); long bytesOutPrev = getOutputBytes(fsStats); out = job.getOutputFormat().getRecordWriter(fs, job, finalName, reporter); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); } public void close() throws IOException { if (this.out != null) { long bytesOutPrev = getOutputBytes(fsStats); out.close(this.reporter); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); } } public void flush() throws IOException, InterruptedException, ClassNotFoundException { } public void collect(K key, V value, int partition) throws IOException { reporter.progress(); long bytesOutPrev = getOutputBytes(fsStats); out.write(key, value); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); mapOutputRecordCounter.increment(1); } private long getOutputBytes(Statistics stats) { return stats == null ? 0 : stats.getBytesWritten(); } } class MapOutputBuffer<K extends Object, V extends Object> implements MapOutputCollector<K, V>, IndexedSortable { private final int partitions; private final JobConf job; private final TaskReporter reporter; private final Class<K> keyClass; private final Class<V> valClass; private final RawComparator<K> comparator; private final SerializationFactory serializationFactory; private final Serializer<K> keySerializer; private final Serializer<V> valSerializer; private final CombinerRunner<K,V> combinerRunner; private final CombineOutputCollector<K, V> combineCollector; // Compression for map-outputs private CompressionCodec codec = null; // k/v accounting private volatile int kvstart = 0; // marks beginning of spill private volatile int kvend = 0; // marks beginning of collectable private int kvindex = 0; // marks end of collected private final int[] kvoffsets; // indices into kvindices private final int[] kvindices; // partition, k/v offsets into kvbuffer private volatile int bufstart = 0; // marks beginning of spill private volatile int bufend = 0; // marks beginning of collectable private volatile int bufvoid = 0; // marks the point where we should stop // reading at the end of the buffer private int bufindex = 0; // marks end of collected private int bufmark = 0; // marks end of record private byte[] kvbuffer; // main output buffer private static final int PARTITION = 0; // partition offset in acct private static final int KEYSTART = 1; // key offset in acct private static final int VALSTART = 2; // val offset in acct private static final int ACCTSIZE = 3; // total #fields in acct private static final int RECSIZE = (ACCTSIZE + 1) * 4; // acct bytes per record // spill accounting private volatile int numSpills = 0; private volatile Throwable sortSpillException = null; private final int softRecordLimit; private final int softBufferLimit; private final int minSpillsForCombine; private final IndexedSorter sorter; private final ReentrantLock spillLock = new ReentrantLock(); private final Condition spillDone = spillLock.newCondition(); private final Condition spillReady = spillLock.newCondition(); private final BlockingBuffer bb = new BlockingBuffer(); private volatile boolean spillThreadRunning = false; private final SpillThread spillThread = new SpillThread(); private final FileSystem localFs; private final FileSystem rfs; private final Counters.Counter mapOutputByteCounter; private final Counters.Counter mapOutputRecordCounter; private final Counters.Counter combineOutputCounter; private final Counters.Counter fileOutputByteCounter; private ArrayList<SpillRecord> indexCacheList; private int totalIndexCacheMemory; private static final int INDEX_CACHE_MEMORY_LIMIT = 1024 * 1024; @SuppressWarnings("unchecked") public MapOutputBuffer(TaskUmbilicalProtocol umbilical, JobConf job, TaskReporter reporter ) throws IOException, ClassNotFoundException { this.job = job; this.reporter = reporter; localFs = FileSystem.getLocal(job); partitions = job.getNumReduceTasks(); rfs = ((LocalFileSystem)localFs).getRaw(); indexCacheList = new ArrayList<SpillRecord>(); //sanity checks final float spillper = job.getFloat("io.sort.spill.percent",(float)0.8); final float recper = job.getFloat("io.sort.record.percent",(float)0.05); final int sortmb = job.getInt("io.sort.mb", 100); if (spillper > (float)1.0 || spillper < (float)0.0) { throw new IOException("Invalid \"io.sort.spill.percent\": " + spillper); } if (recper > (float)1.0 || recper < (float)0.01) { throw new IOException("Invalid \"io.sort.record.percent\": " + recper); } if ((sortmb & 0x7FF) != sortmb) { throw new IOException("Invalid \"io.sort.mb\": " + sortmb); } sorter = ReflectionUtils.newInstance( job.getClass("map.sort.class", QuickSort.class, IndexedSorter.class), job); LOG.info("io.sort.mb = " + sortmb); // buffers and accounting int maxMemUsage = sortmb << 20; int recordCapacity = (int)(maxMemUsage * recper); recordCapacity -= recordCapacity % RECSIZE; kvbuffer = new byte[maxMemUsage - recordCapacity]; bufvoid = kvbuffer.length; recordCapacity /= RECSIZE; kvoffsets = new int[recordCapacity]; kvindices = new int[recordCapacity * ACCTSIZE]; softBufferLimit = (int)(kvbuffer.length * spillper); softRecordLimit = (int)(kvoffsets.length * spillper); LOG.info("data buffer = " + softBufferLimit + "/" + kvbuffer.length); LOG.info("record buffer = " + softRecordLimit + "/" + kvoffsets.length); // k/v serialization comparator = job.getOutputKeyComparator(); keyClass = (Class<K>)job.getMapOutputKeyClass(); valClass = (Class<V>)job.getMapOutputValueClass(); serializationFactory = new SerializationFactory(job); keySerializer = serializationFactory.getSerializer(keyClass); keySerializer.open(bb); valSerializer = serializationFactory.getSerializer(valClass); valSerializer.open(bb); // counters mapOutputByteCounter = reporter.getCounter(MAP_OUTPUT_BYTES); mapOutputRecordCounter = reporter.getCounter(MAP_OUTPUT_RECORDS); Counters.Counter combineInputCounter = reporter.getCounter(COMBINE_INPUT_RECORDS); combineOutputCounter = reporter.getCounter(COMBINE_OUTPUT_RECORDS); fileOutputByteCounter = reporter.getCounter(MAP_OUTPUT_MATERIALIZED_BYTES); // compression if (job.getCompressMapOutput()) { Class<? extends CompressionCodec> codecClass = job.getMapOutputCompressorClass(DefaultCodec.class); codec = ReflectionUtils.newInstance(codecClass, job); } // combiner combinerRunner = CombinerRunner.create(job, getTaskID(), combineInputCounter, reporter, null); if (combinerRunner != null) { combineCollector= new CombineOutputCollector<K,V>(combineOutputCounter); } else { combineCollector = null; } minSpillsForCombine = job.getInt("min.num.spills.for.combine", 3); spillThread.setDaemon(true); spillThread.setName("SpillThread"); spillLock.lock(); try { spillThread.start(); while (!spillThreadRunning) { spillDone.await(); } } catch (InterruptedException e) { throw (IOException)new IOException("Spill thread failed to initialize" ).initCause(sortSpillException); } finally { spillLock.unlock(); } if (sortSpillException != null) { throw (IOException)new IOException("Spill thread failed to initialize" ).initCause(sortSpillException); } } public synchronized void collect(K key, V value, int partition ) throws IOException { reporter.progress(); if (key.getClass() != keyClass) { throw new IOException("Type mismatch in key from map: expected " + keyClass.getName() + ", recieved " + key.getClass().getName()); } if (value.getClass() != valClass) { throw new IOException("Type mismatch in value from map: expected " + valClass.getName() + ", recieved " + value.getClass().getName()); } final int kvnext = (kvindex + 1) % kvoffsets.length; spillLock.lock(); try { boolean kvfull; do { if (sortSpillException != null) { throw (IOException)new IOException("Spill failed" ).initCause(sortSpillException); } // sufficient acct space kvfull = kvnext == kvstart; final boolean kvsoftlimit = ((kvnext > kvend) ? kvnext - kvend > softRecordLimit : kvend - kvnext <= kvoffsets.length - softRecordLimit); if (kvstart == kvend && kvsoftlimit) { LOG.info("Spilling map output: record full = " + kvsoftlimit); startSpill(); } if (kvfull) { try { while (kvstart != kvend) { reporter.progress(); spillDone.await(); } } catch (InterruptedException e) { throw (IOException)new IOException( "Collector interrupted while waiting for the writer" ).initCause(e); } } } while (kvfull); } finally { spillLock.unlock(); } try { // serialize key bytes into buffer int keystart = bufindex; keySerializer.serialize(key); if (bufindex < keystart) { // wrapped the key; reset required bb.reset(); keystart = 0; } // serialize value bytes into buffer final int valstart = bufindex; valSerializer.serialize(value); int valend = bb.markRecord(); if (partition < 0 || partition >= partitions) { throw new IOException("Illegal partition for " + key + " (" + partition + ")"); } mapOutputRecordCounter.increment(1); mapOutputByteCounter.increment(valend >= keystart ? valend - keystart : (bufvoid - keystart) + valend); // update accounting info int ind = kvindex * ACCTSIZE; kvoffsets[kvindex] = ind; kvindices[ind + PARTITION] = partition; kvindices[ind + KEYSTART] = keystart; kvindices[ind + VALSTART] = valstart; kvindex = kvnext; } catch (MapBufferTooSmallException e) { LOG.info("Record too large for in-memory buffer: " + e.getMessage()); spillSingleRecord(key, value, partition); mapOutputRecordCounter.increment(1); return; } } /** * Compare logical range, st i, j MOD offset capacity. * Compare by partition, then by key. * @see IndexedSortable#compare */ public int compare(int i, int j) { final int ii = kvoffsets[i % kvoffsets.length]; final int ij = kvoffsets[j % kvoffsets.length]; // sort by partition if (kvindices[ii + PARTITION] != kvindices[ij + PARTITION]) { return kvindices[ii + PARTITION] - kvindices[ij + PARTITION]; } // sort by key return comparator.compare(kvbuffer, kvindices[ii + KEYSTART], kvindices[ii + VALSTART] - kvindices[ii + KEYSTART], kvbuffer, kvindices[ij + KEYSTART], kvindices[ij + VALSTART] - kvindices[ij + KEYSTART]); } /** * Swap logical indices st i, j MOD offset capacity. * @see IndexedSortable#swap */ public void swap(int i, int j) { i %= kvoffsets.length; j %= kvoffsets.length; int tmp = kvoffsets[i]; kvoffsets[i] = kvoffsets[j]; kvoffsets[j] = tmp; } /** * Inner class managing the spill of serialized records to disk. */ protected class BlockingBuffer extends DataOutputStream { public BlockingBuffer() { this(new Buffer()); } private BlockingBuffer(OutputStream out) { super(out); } /** * Mark end of record. Note that this is required if the buffer is to * cut the spill in the proper place. */ public int markRecord() { bufmark = bufindex; return bufindex; } /** * Set position from last mark to end of writable buffer, then rewrite * the data between last mark and kvindex. * This handles a special case where the key wraps around the buffer. * If the key is to be passed to a RawComparator, then it must be * contiguous in the buffer. This recopies the data in the buffer back * into itself, but starting at the beginning of the buffer. Note that * reset() should <b>only</b> be called immediately after detecting * this condition. To call it at any other time is undefined and would * likely result in data loss or corruption. * @see #markRecord() */ protected synchronized void reset() throws IOException { // spillLock unnecessary; If spill wraps, then // bufindex < bufstart < bufend so contention is impossible // a stale value for bufstart does not affect correctness, since // we can only get false negatives that force the more // conservative path int headbytelen = bufvoid - bufmark; bufvoid = bufmark; if (bufindex + headbytelen < bufstart) { System.arraycopy(kvbuffer, 0, kvbuffer, headbytelen, bufindex); System.arraycopy(kvbuffer, bufvoid, kvbuffer, 0, headbytelen); bufindex += headbytelen; } else { byte[] keytmp = new byte[bufindex]; System.arraycopy(kvbuffer, 0, keytmp, 0, bufindex); bufindex = 0; out.write(kvbuffer, bufmark, headbytelen); out.write(keytmp); } } } public class Buffer extends OutputStream { private final byte[] scratch = new byte[1]; @Override public synchronized void write(int v) throws IOException { scratch[0] = (byte)v; write(scratch, 0, 1); } /** * Attempt to write a sequence of bytes to the collection buffer. * This method will block if the spill thread is running and it * cannot write. * @throws MapBufferTooSmallException if record is too large to * deserialize into the collection buffer. */ @Override public synchronized void write(byte b[], int off, int len) throws IOException { boolean buffull = false; boolean wrap = false; spillLock.lock(); try { do { if (sortSpillException != null) { throw (IOException)new IOException("Spill failed" ).initCause(sortSpillException); } // sufficient buffer space? if (bufstart <= bufend && bufend <= bufindex) { buffull = bufindex + len > bufvoid; wrap = (bufvoid - bufindex) + bufstart > len; } else { // bufindex <= bufstart <= bufend // bufend <= bufindex <= bufstart wrap = false; buffull = bufindex + len > bufstart; } if (kvstart == kvend) { // spill thread not running if (kvend != kvindex) { // we have records we can spill final boolean bufsoftlimit = (bufindex > bufend) ? bufindex - bufend > softBufferLimit : bufend - bufindex < bufvoid - softBufferLimit; if (bufsoftlimit || (buffull && !wrap)) { LOG.info("Spilling map output: buffer full= " + bufsoftlimit); startSpill(); } } else if (buffull && !wrap) { // We have no buffered records, and this record is too large // to write into kvbuffer. We must spill it directly from // collect final int size = ((bufend <= bufindex) ? bufindex - bufend : (bufvoid - bufend) + bufindex) + len; bufstart = bufend = bufindex = bufmark = 0; kvstart = kvend = kvindex = 0; bufvoid = kvbuffer.length; throw new MapBufferTooSmallException(size + " bytes"); } } if (buffull && !wrap) { try { while (kvstart != kvend) { reporter.progress(); spillDone.await(); } } catch (InterruptedException e) { throw (IOException)new IOException( "Buffer interrupted while waiting for the writer" ).initCause(e); } } } while (buffull && !wrap); } finally { spillLock.unlock(); } // here, we know that we have sufficient space to write if (buffull) { final int gaplen = bufvoid - bufindex; System.arraycopy(b, off, kvbuffer, bufindex, gaplen); len -= gaplen; off += gaplen; bufindex = 0; } System.arraycopy(b, off, kvbuffer, bufindex, len); bufindex += len; } } public synchronized void flush() throws IOException, ClassNotFoundException, InterruptedException { LOG.info("Starting flush of map output"); spillLock.lock(); try { while (kvstart != kvend) { reporter.progress(); spillDone.await(); } if (sortSpillException != null) { throw (IOException)new IOException("Spill failed" ).initCause(sortSpillException); } if (kvend != kvindex) { kvend = kvindex; bufend = bufmark; sortAndSpill(); } } catch (InterruptedException e) { throw (IOException)new IOException( "Buffer interrupted while waiting for the writer" ).initCause(e); } finally { spillLock.unlock(); } assert !spillLock.isHeldByCurrentThread(); // shut down spill thread and wait for it to exit. Since the preceding // ensures that it is finished with its work (and sortAndSpill did not // throw), we elect to use an interrupt instead of setting a flag. // Spilling simultaneously from this thread while the spill thread // finishes its work might be both a useful way to extend this and also // sufficient motivation for the latter approach. try { spillThread.interrupt(); spillThread.join(); } catch (InterruptedException e) { throw (IOException)new IOException("Spill failed" ).initCause(e); } // release sort buffer before the merge kvbuffer = null; mergeParts(); Path outputPath = mapOutputFile.getOutputFile(); fileOutputByteCounter.increment(rfs.getFileStatus(outputPath).getLen()); } public void close() { } protected class SpillThread extends Thread { @Override public void run() { spillLock.lock(); spillThreadRunning = true; try { while (true) { spillDone.signal(); while (kvstart == kvend) { spillReady.await(); } try { spillLock.unlock(); sortAndSpill(); } catch (Exception e) { sortSpillException = e; } catch (Throwable t) { sortSpillException = t; String logMsg = "Task " + getTaskID() + " failed : " + StringUtils.stringifyException(t); reportFatalError(getTaskID(), t, logMsg); } finally { spillLock.lock(); if (bufend < bufindex && bufindex < bufstart) { bufvoid = kvbuffer.length; } kvstart = kvend; bufstart = bufend; } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } finally { spillLock.unlock(); spillThreadRunning = false; } } } private synchronized void startSpill() { LOG.info("bufstart = " + bufstart + "; bufend = " + bufmark + "; bufvoid = " + bufvoid); LOG.info("kvstart = " + kvstart + "; kvend = " + kvindex + "; length = " + kvoffsets.length); kvend = kvindex; bufend = bufmark; spillReady.signal(); } private void sortAndSpill() throws IOException, ClassNotFoundException, InterruptedException { //approximate the length of the output file to be the length of the //buffer + header lengths for the partitions long size = (bufend >= bufstart ? bufend - bufstart : (bufvoid - bufend) + bufstart) + partitions * APPROX_HEADER_LENGTH; FSDataOutputStream out = null; try { // create spill file final SpillRecord spillRec = new SpillRecord(partitions); final Path filename = mapOutputFile.getSpillFileForWrite(numSpills, size); out = rfs.create(filename); final int endPosition = (kvend > kvstart) ? kvend : kvoffsets.length + kvend; sorter.sort(MapOutputBuffer.this, kvstart, endPosition, reporter); int spindex = kvstart; IndexRecord rec = new IndexRecord(); InMemValBytes value = new InMemValBytes(); for (int i = 0; i < partitions; ++i) { IFile.Writer<K, V> writer = null; try { long segmentStart = out.getPos(); writer = new Writer<K, V>(job, out, keyClass, valClass, codec, spilledRecordsCounter); if (combinerRunner == null) { // spill directly DataInputBuffer key = new DataInputBuffer(); while (spindex < endPosition && kvindices[kvoffsets[spindex % kvoffsets.length] + PARTITION] == i) { final int kvoff = kvoffsets[spindex % kvoffsets.length]; getVBytesForOffset(kvoff, value); key.reset(kvbuffer, kvindices[kvoff + KEYSTART], (kvindices[kvoff + VALSTART] - kvindices[kvoff + KEYSTART])); writer.append(key, value); ++spindex; } } else { int spstart = spindex; while (spindex < endPosition && kvindices[kvoffsets[spindex % kvoffsets.length] + PARTITION] == i) { ++spindex; } // Note: we would like to avoid the combiner if we've fewer // than some threshold of records for a partition if (spstart != spindex) { combineCollector.setWriter(writer); RawKeyValueIterator kvIter = new MRResultIterator(spstart, spindex); combinerRunner.combine(kvIter, combineCollector); } } // close the writer writer.close(); // record offsets rec.startOffset = segmentStart; rec.rawLength = writer.getRawLength(); rec.partLength = writer.getCompressedLength(); spillRec.putIndex(rec, i); writer = null; } finally { if (null != writer) writer.close(); } } if (totalIndexCacheMemory >= INDEX_CACHE_MEMORY_LIMIT) { // create spill index file Path indexFilename = mapOutputFile.getSpillIndexFileForWrite(numSpills, partitions * MAP_OUTPUT_INDEX_RECORD_LENGTH); spillRec.writeToFile(indexFilename, job); } else { indexCacheList.add(spillRec); totalIndexCacheMemory += spillRec.size() * MAP_OUTPUT_INDEX_RECORD_LENGTH; } LOG.info("Finished spill " + numSpills); ++numSpills; } finally { if (out != null) out.close(); } } /** * Handles the degenerate case where serialization fails to fit in * the in-memory buffer, so we must spill the record from collect * directly to a spill file. Consider this "losing". */ private void spillSingleRecord(final K key, final V value, int partition) throws IOException { long size = kvbuffer.length + partitions * APPROX_HEADER_LENGTH; FSDataOutputStream out = null; try { // create spill file final SpillRecord spillRec = new SpillRecord(partitions); final Path filename = mapOutputFile.getSpillFileForWrite(numSpills, size); out = rfs.create(filename); // we don't run the combiner for a single record IndexRecord rec = new IndexRecord(); for (int i = 0; i < partitions; ++i) { IFile.Writer<K, V> writer = null; try { long segmentStart = out.getPos(); // Create a new codec, don't care! writer = new IFile.Writer<K,V>(job, out, keyClass, valClass, codec, spilledRecordsCounter); if (i == partition) { final long recordStart = out.getPos(); writer.append(key, value); // Note that our map byte count will not be accurate with // compression mapOutputByteCounter.increment(out.getPos() - recordStart); } writer.close(); // record offsets rec.startOffset = segmentStart; rec.rawLength = writer.getRawLength(); rec.partLength = writer.getCompressedLength(); spillRec.putIndex(rec, i); writer = null; } catch (IOException e) { if (null != writer) writer.close(); throw e; } } if (totalIndexCacheMemory >= INDEX_CACHE_MEMORY_LIMIT) { // create spill index file Path indexFilename = mapOutputFile.getSpillIndexFileForWrite(numSpills, partitions * MAP_OUTPUT_INDEX_RECORD_LENGTH); spillRec.writeToFile(indexFilename, job); } else { indexCacheList.add(spillRec); totalIndexCacheMemory += spillRec.size() * MAP_OUTPUT_INDEX_RECORD_LENGTH; } ++numSpills; } finally { if (out != null) out.close(); } } /** * Given an offset, populate vbytes with the associated set of * deserialized value bytes. Should only be called during a spill. */ private void getVBytesForOffset(int kvoff, InMemValBytes vbytes) { final int nextindex = (kvoff / ACCTSIZE == (kvend - 1 + kvoffsets.length) % kvoffsets.length) ? bufend : kvindices[(kvoff + ACCTSIZE + KEYSTART) % kvindices.length]; int vallen = (nextindex >= kvindices[kvoff + VALSTART]) ? nextindex - kvindices[kvoff + VALSTART] : (bufvoid - kvindices[kvoff + VALSTART]) + nextindex; vbytes.reset(kvbuffer, kvindices[kvoff + VALSTART], vallen); } /** * Inner class wrapping valuebytes, used for appendRaw. */ protected class InMemValBytes extends DataInputBuffer { private byte[] buffer; private int start; private int length; public void reset(byte[] buffer, int start, int length) { this.buffer = buffer; this.start = start; this.length = length; if (start + length > bufvoid) { this.buffer = new byte[this.length]; final int taillen = bufvoid - start; System.arraycopy(buffer, start, this.buffer, 0, taillen); System.arraycopy(buffer, 0, this.buffer, taillen, length-taillen); this.start = 0; } super.reset(this.buffer, this.start, this.length); } } protected class MRResultIterator implements RawKeyValueIterator { private final DataInputBuffer keybuf = new DataInputBuffer(); private final InMemValBytes vbytes = new InMemValBytes(); private final int end; private int current; public MRResultIterator(int start, int end) { this.end = end; current = start - 1; } public boolean next() throws IOException { return ++current < end; } public DataInputBuffer getKey() throws IOException { final int kvoff = kvoffsets[current % kvoffsets.length]; keybuf.reset(kvbuffer, kvindices[kvoff + KEYSTART], kvindices[kvoff + VALSTART] - kvindices[kvoff + KEYSTART]); return keybuf; } public DataInputBuffer getValue() throws IOException { getVBytesForOffset(kvoffsets[current % kvoffsets.length], vbytes); return vbytes; } public Progress getProgress() { return null; } public void close() { } } private void mergeParts() throws IOException, InterruptedException, ClassNotFoundException { // get the approximate size of the final output/index files long finalOutFileSize = 0; long finalIndexFileSize = 0; final Path[] filename = new Path[numSpills]; final TaskAttemptID mapId = getTaskID(); for(int i = 0; i < numSpills; i++) { filename[i] = mapOutputFile.getSpillFile(i); finalOutFileSize += rfs.getFileStatus(filename[i]).getLen(); } if (numSpills == 1) { //the spill is the final output rfs.rename(filename[0], new Path(filename[0].getParent(), "file.out")); if (indexCacheList.size() == 0) { rfs.rename(mapOutputFile.getSpillIndexFile(0), new Path(filename[0].getParent(),"file.out.index")); } else { indexCacheList.get(0).writeToFile( new Path(filename[0].getParent(),"file.out.index"), job); } return; } // read in paged indices for (int i = indexCacheList.size(); i < numSpills; ++i) { Path indexFileName = mapOutputFile.getSpillIndexFile(i); indexCacheList.add(new SpillRecord(indexFileName, job, null)); } //make correction in the length to include the sequence file header //lengths for each partition finalOutFileSize += partitions * APPROX_HEADER_LENGTH; finalIndexFileSize = partitions * MAP_OUTPUT_INDEX_RECORD_LENGTH; Path finalOutputFile = mapOutputFile.getOutputFileForWrite(finalOutFileSize); Path finalIndexFile = mapOutputFile.getOutputIndexFileForWrite(finalIndexFileSize); //The output stream for the final single output file FSDataOutputStream finalOut = rfs.create(finalOutputFile, true, 4096); if (numSpills == 0) { //create dummy files IndexRecord rec = new IndexRecord(); SpillRecord sr = new SpillRecord(partitions); try { for (int i = 0; i < partitions; i++) { long segmentStart = finalOut.getPos(); Writer<K, V> writer = new Writer<K, V>(job, finalOut, keyClass, valClass, codec, null); writer.close(); rec.startOffset = segmentStart; rec.rawLength = writer.getRawLength(); rec.partLength = writer.getCompressedLength(); sr.putIndex(rec, i); } sr.writeToFile(finalIndexFile, job); } finally { finalOut.close(); } return; } { IndexRecord rec = new IndexRecord(); final SpillRecord spillRec = new SpillRecord(partitions); for (int parts = 0; parts < partitions; parts++) { //create the segments to be merged List<Segment<K,V>> segmentList = new ArrayList<Segment<K, V>>(numSpills); for(int i = 0; i < numSpills; i++) { IndexRecord indexRecord = indexCacheList.get(i).getIndex(parts); Segment<K,V> s = new Segment<K,V>(job, rfs, filename[i], indexRecord.startOffset, indexRecord.partLength, codec, true); segmentList.add(i, s); if (LOG.isDebugEnabled()) { LOG.debug("MapId=" + mapId + " Reducer=" + parts + "Spill =" + i + "(" + indexRecord.startOffset + "," + indexRecord.rawLength + ", " + indexRecord.partLength + ")"); } } //merge @SuppressWarnings("unchecked") RawKeyValueIterator kvIter = Merger.merge(job, rfs, keyClass, valClass, codec, segmentList, job.getInt("io.sort.factor", 100), new Path(mapId.toString()), job.getOutputKeyComparator(), reporter, null, spilledRecordsCounter); //write merged output to disk long segmentStart = finalOut.getPos(); Writer<K, V> writer = new Writer<K, V>(job, finalOut, keyClass, valClass, codec, spilledRecordsCounter); if (combinerRunner == null || numSpills < minSpillsForCombine) { Merger.writeFile(kvIter, writer, reporter, job); } else { combineCollector.setWriter(writer); combinerRunner.combine(kvIter, combineCollector); } //close writer.close(); // record offsets rec.startOffset = segmentStart; rec.rawLength = writer.getRawLength(); rec.partLength = writer.getCompressedLength(); spillRec.putIndex(rec, parts); } spillRec.writeToFile(finalIndexFile, job); finalOut.close(); for(int i = 0; i < numSpills; i++) { rfs.delete(filename[i],true); } } } } // MapOutputBuffer /** * Exception indicating that the allocated sort buffer is insufficient * to hold the current record. */ @SuppressWarnings("serial") private static class MapBufferTooSmallException extends IOException { public MapBufferTooSmallException(String s) { super(s); } } }
src/mapred/org/apache/hadoop/mapred/MapTask.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import static org.apache.hadoop.mapred.Task.Counter.COMBINE_INPUT_RECORDS; import static org.apache.hadoop.mapred.Task.Counter.COMBINE_OUTPUT_RECORDS; import static org.apache.hadoop.mapred.Task.Counter.MAP_INPUT_BYTES; import static org.apache.hadoop.mapred.Task.Counter.MAP_INPUT_RECORDS; import static org.apache.hadoop.mapred.Task.Counter.MAP_OUTPUT_BYTES; import static org.apache.hadoop.mapred.Task.Counter.MAP_OUTPUT_MATERIALIZED_BYTES; import static org.apache.hadoop.mapred.Task.Counter.MAP_OUTPUT_RECORDS; import java.io.DataInput; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem.Statistics; import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.io.serializer.Deserializer; import org.apache.hadoop.io.serializer.SerializationFactory; import org.apache.hadoop.io.serializer.Serializer; import org.apache.hadoop.mapred.IFile.Writer; import org.apache.hadoop.mapred.Merger.Segment; import org.apache.hadoop.mapred.SortedRanges.SkipRangeIterator; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapreduce.split.JobSplit; import org.apache.hadoop.mapreduce.split.JobSplit.SplitMetaInfo; import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitIndex; import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.IndexedSortable; import org.apache.hadoop.util.IndexedSorter; import org.apache.hadoop.util.Progress; import org.apache.hadoop.util.QuickSort; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; /** A Map task. */ class MapTask extends Task { /** * The size of each record in the index file for the map-outputs. */ public static final int MAP_OUTPUT_INDEX_RECORD_LENGTH = 24; private TaskSplitIndex splitMetaInfo = new TaskSplitIndex(); private final static int APPROX_HEADER_LENGTH = 150; private static final Log LOG = LogFactory.getLog(MapTask.class.getName()); { // set phase for this task setPhase(TaskStatus.Phase.MAP); } public MapTask() { super(); } public MapTask(String jobFile, TaskAttemptID taskId, int partition, TaskSplitIndex splitIndex, int numSlotsRequired) { super(jobFile, taskId, partition, numSlotsRequired); this.splitMetaInfo = splitIndex; } @Override public boolean isMapTask() { return true; } @Override public void localizeConfiguration(JobConf conf) throws IOException { super.localizeConfiguration(conf); // split.info file is used only by IsolationRunner. // Write the split file to the local disk if it is a normal map task (not a // job-setup or a job-cleanup task) and if the user wishes to run // IsolationRunner either by setting keep.failed.tasks.files to true or by // using keep.tasks.files.pattern if (supportIsolationRunner(conf) && isMapOrReduce()) { // localize the split meta-information Path localSplitMeta = new LocalDirAllocator("mapred.local.dir").getLocalPathForWrite( TaskTracker.getLocalSplitFile(conf.getUser(), getJobID() .toString(), getTaskID().toString()), conf); LOG.debug("Writing local split to " + localSplitMeta); DataOutputStream out = FileSystem.getLocal(conf).create(localSplitMeta); splitMetaInfo.write(out); out.close(); } } @Override public TaskRunner createRunner(TaskTracker tracker, TaskTracker.TaskInProgress tip, TaskTracker.RunningJob rjob ) throws IOException { return new MapTaskRunner(tip, tracker, this.conf, rjob); } @Override public void write(DataOutput out) throws IOException { super.write(out); if (isMapOrReduce()) { if (splitMetaInfo != null) { splitMetaInfo.write(out); } else { new TaskSplitIndex().write(out); } //TODO do we really need to set this to null? splitMetaInfo = null; } } @Override public void readFields(DataInput in) throws IOException { super.readFields(in); if (isMapOrReduce()) { splitMetaInfo.readFields(in); } } /** * This class wraps the user's record reader to update the counters and progress * as records are read. * @param <K> * @param <V> */ class TrackedRecordReader<K, V> implements RecordReader<K,V> { private RecordReader<K,V> rawIn; private Counters.Counter inputByteCounter; private Counters.Counter inputRecordCounter; private Counters.Counter fileInputByteCounter; private InputSplit split; private TaskReporter reporter; private long beforePos = -1; private long afterPos = -1; private long bytesInPrev = -1; private long bytesInCurr = -1; private final Statistics fsStats; TrackedRecordReader(InputSplit split, JobConf job, TaskReporter reporter) throws IOException { inputRecordCounter = reporter.getCounter(MAP_INPUT_RECORDS); inputByteCounter = reporter.getCounter(MAP_INPUT_BYTES); fileInputByteCounter = reporter .getCounter(FileInputFormat.Counter.BYTES_READ); Statistics matchedStats = null; if (split instanceof FileSplit) { matchedStats = getFsStatistics(((FileSplit) split).getPath(), job); } fsStats = matchedStats; bytesInPrev = getInputBytes(fsStats); rawIn = job.getInputFormat().getRecordReader(split, job, reporter); bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); this.reporter = reporter; this.split = split; conf = job; } public K createKey() { return rawIn.createKey(); } public V createValue() { return rawIn.createValue(); } public synchronized boolean next(K key, V value) throws IOException { boolean ret = moveToNext(key, value); if (ret) { incrCounters(); } return ret; } protected void incrCounters() { inputRecordCounter.increment(1); inputByteCounter.increment(afterPos - beforePos); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); } protected synchronized boolean moveToNext(K key, V value) throws IOException { boolean ret = false; try { reporter.setProgress(getProgress()); beforePos = getPos(); bytesInPrev = getInputBytes(fsStats); ret = rawIn.next(key, value); afterPos = getPos(); bytesInCurr = getInputBytes(fsStats); } catch (IOException ioe) { if (split instanceof FileSplit) { LOG.error("IO error in map input file " + conf.get("map.input.file")); throw new IOException("IO error in map input file " + conf.get("map.input.file"), ioe); } throw ioe; } return ret; } public long getPos() throws IOException { return rawIn.getPos(); } public void close() throws IOException { bytesInPrev = getInputBytes(fsStats); rawIn.close(); bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); } public float getProgress() throws IOException { return rawIn.getProgress(); } TaskReporter getTaskReporter() { return reporter; } private long getInputBytes(Statistics stats) { return stats == null ? 0 : stats.getBytesRead(); } } /** * This class skips the records based on the failed ranges from previous * attempts. */ class SkippingRecordReader<K, V> extends TrackedRecordReader<K,V> { private SkipRangeIterator skipIt; private SequenceFile.Writer skipWriter; private boolean toWriteSkipRecs; private TaskUmbilicalProtocol umbilical; private Counters.Counter skipRecCounter; private long recIndex = -1; SkippingRecordReader(InputSplit split, TaskUmbilicalProtocol umbilical, TaskReporter reporter) throws IOException { super(split, conf, reporter); this.umbilical = umbilical; this.skipRecCounter = reporter.getCounter(Counter.MAP_SKIPPED_RECORDS); this.toWriteSkipRecs = toWriteSkipRecs() && SkipBadRecords.getSkipOutputPath(conf)!=null; skipIt = getSkipRanges().skipRangeIterator(); } public synchronized boolean next(K key, V value) throws IOException { if(!skipIt.hasNext()) { LOG.warn("Further records got skipped."); return false; } boolean ret = moveToNext(key, value); long nextRecIndex = skipIt.next(); long skip = 0; while(recIndex<nextRecIndex && ret) { if(toWriteSkipRecs) { writeSkippedRec(key, value); } ret = moveToNext(key, value); skip++; } //close the skip writer once all the ranges are skipped if(skip>0 && skipIt.skippedAllRanges() && skipWriter!=null) { skipWriter.close(); } skipRecCounter.increment(skip); reportNextRecordRange(umbilical, recIndex); if (ret) { incrCounters(); } return ret; } protected synchronized boolean moveToNext(K key, V value) throws IOException { recIndex++; return super.moveToNext(key, value); } @SuppressWarnings("unchecked") private void writeSkippedRec(K key, V value) throws IOException{ if(skipWriter==null) { Path skipDir = SkipBadRecords.getSkipOutputPath(conf); Path skipFile = new Path(skipDir, getTaskID().toString()); skipWriter = SequenceFile.createWriter( skipFile.getFileSystem(conf), conf, skipFile, (Class<K>) createKey().getClass(), (Class<V>) createValue().getClass(), CompressionType.BLOCK, getTaskReporter()); } skipWriter.append(key, value); } } @Override public void run(final JobConf job, final TaskUmbilicalProtocol umbilical) throws IOException, ClassNotFoundException, InterruptedException { this.umbilical = umbilical; // start thread that will handle communication with parent TaskReporter reporter = new TaskReporter(getProgress(), umbilical); reporter.startCommunicationThread(); boolean useNewApi = job.getUseNewMapper(); initialize(job, getJobID(), reporter, useNewApi); // check if it is a cleanupJobTask if (jobCleanup) { runJobCleanupTask(umbilical, reporter); return; } if (jobSetup) { runJobSetupTask(umbilical, reporter); return; } if (taskCleanup) { runTaskCleanupTask(umbilical, reporter); return; } if (useNewApi) { runNewMapper(job, splitMetaInfo, umbilical, reporter); } else { runOldMapper(job, splitMetaInfo, umbilical, reporter); } done(umbilical, reporter); } @SuppressWarnings("unchecked") private <T> T getSplitDetails(Path file, long offset) throws IOException { FileSystem fs = file.getFileSystem(conf); FSDataInputStream inFile = fs.open(file); inFile.seek(offset); String className = Text.readString(inFile); Class<T> cls; try { cls = (Class<T>) conf.getClassByName(className); } catch (ClassNotFoundException ce) { IOException wrap = new IOException("Split class " + className + " not found"); wrap.initCause(ce); throw wrap; } SerializationFactory factory = new SerializationFactory(conf); Deserializer<T> deserializer = (Deserializer<T>) factory.getDeserializer(cls); deserializer.open(inFile); T split = deserializer.deserialize(null); long pos = inFile.getPos(); getCounters().findCounter( Task.Counter.SPLIT_RAW_BYTES).increment(pos - offset); inFile.close(); return split; } @SuppressWarnings("unchecked") private <INKEY,INVALUE,OUTKEY,OUTVALUE> void runOldMapper(final JobConf job, final TaskSplitIndex splitIndex, final TaskUmbilicalProtocol umbilical, TaskReporter reporter ) throws IOException, InterruptedException, ClassNotFoundException { InputSplit inputSplit = getSplitDetails(new Path(splitIndex.getSplitLocation()), splitIndex.getStartOffset()); updateJobWithSplit(job, inputSplit); reporter.setInputSplit(inputSplit); RecordReader<INKEY,INVALUE> in = isSkipping() ? new SkippingRecordReader<INKEY,INVALUE>(inputSplit, umbilical, reporter) : new TrackedRecordReader<INKEY,INVALUE>(inputSplit, job, reporter); job.setBoolean("mapred.skip.on", isSkipping()); int numReduceTasks = conf.getNumReduceTasks(); LOG.info("numReduceTasks: " + numReduceTasks); MapOutputCollector collector = null; if (numReduceTasks > 0) { collector = new MapOutputBuffer(umbilical, job, reporter); } else { collector = new DirectMapOutputCollector(umbilical, job, reporter); } MapRunnable<INKEY,INVALUE,OUTKEY,OUTVALUE> runner = ReflectionUtils.newInstance(job.getMapRunnerClass(), job); try { runner.run(in, new OldOutputCollector(collector, conf), reporter); collector.flush(); } finally { //close in.close(); // close input collector.close(); } } /** * Update the job with details about the file split * @param job the job configuration to update * @param inputSplit the file split */ private void updateJobWithSplit(final JobConf job, InputSplit inputSplit) { if (inputSplit instanceof FileSplit) { FileSplit fileSplit = (FileSplit) inputSplit; job.set("map.input.file", fileSplit.getPath().toString()); job.setLong("map.input.start", fileSplit.getStart()); job.setLong("map.input.length", fileSplit.getLength()); } } static class NewTrackingRecordReader<K,V> extends org.apache.hadoop.mapreduce.RecordReader<K,V> { private final org.apache.hadoop.mapreduce.RecordReader<K,V> real; private final org.apache.hadoop.mapreduce.Counter inputRecordCounter; private final org.apache.hadoop.mapreduce.Counter fileInputByteCounter; private final TaskReporter reporter; private org.apache.hadoop.mapreduce.InputSplit inputSplit; private final JobConf job; private final Statistics fsStats; NewTrackingRecordReader(org.apache.hadoop.mapreduce.InputSplit split, org.apache.hadoop.mapreduce.InputFormat inputFormat, TaskReporter reporter, JobConf job, org.apache.hadoop.mapreduce.TaskAttemptContext taskContext) throws IOException, InterruptedException { this.reporter = reporter; this.inputSplit = split; this.job = job; this.inputRecordCounter = reporter.getCounter(MAP_INPUT_RECORDS); this.fileInputByteCounter = reporter .getCounter(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.Counter.BYTES_READ); Statistics matchedStats = null; if (split instanceof org.apache.hadoop.mapreduce.lib.input.FileSplit) { matchedStats = getFsStatistics(((org.apache.hadoop.mapreduce.lib.input.FileSplit) split) .getPath(), job); } fsStats = matchedStats; long bytesInPrev = getInputBytes(fsStats); this.real = inputFormat.createRecordReader(split, taskContext); long bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); } @Override public void close() throws IOException { long bytesInPrev = getInputBytes(fsStats); real.close(); long bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); } @Override public K getCurrentKey() throws IOException, InterruptedException { return real.getCurrentKey(); } @Override public V getCurrentValue() throws IOException, InterruptedException { return real.getCurrentValue(); } @Override public float getProgress() throws IOException, InterruptedException { return real.getProgress(); } @Override public void initialize(org.apache.hadoop.mapreduce.InputSplit split, org.apache.hadoop.mapreduce.TaskAttemptContext context ) throws IOException, InterruptedException { long bytesInPrev = getInputBytes(fsStats); real.initialize(split, context); long bytesInCurr = getInputBytes(fsStats); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); } @Override public boolean nextKeyValue() throws IOException, InterruptedException { boolean result = false; try { long bytesInPrev = getInputBytes(fsStats); result = real.nextKeyValue(); long bytesInCurr = getInputBytes(fsStats); if (result) { inputRecordCounter.increment(1); fileInputByteCounter.increment(bytesInCurr - bytesInPrev); } reporter.setProgress(getProgress()); } catch (IOException ioe) { if (inputSplit instanceof FileSplit) { FileSplit fileSplit = (FileSplit) inputSplit; LOG.error("IO error in map input file " + fileSplit.getPath().toString()); throw new IOException("IO error in map input file " + fileSplit.getPath().toString(), ioe); } throw ioe; } return result; } private long getInputBytes(Statistics stats) { return stats == null ? 0 : stats.getBytesRead(); } } /** * Since the mapred and mapreduce Partitioners don't share a common interface * (JobConfigurable is deprecated and a subtype of mapred.Partitioner), the * partitioner lives in Old/NewOutputCollector. Note that, for map-only jobs, * the configured partitioner should not be called. It's common for * partitioners to compute a result mod numReduces, which causes a div0 error */ private static class OldOutputCollector<K,V> implements OutputCollector<K,V> { private final Partitioner<K,V> partitioner; private final MapOutputCollector<K,V> collector; private final int numPartitions; @SuppressWarnings("unchecked") OldOutputCollector(MapOutputCollector<K,V> collector, JobConf conf) { numPartitions = conf.getNumReduceTasks(); if (numPartitions > 0) { partitioner = (Partitioner<K,V>) ReflectionUtils.newInstance(conf.getPartitionerClass(), conf); } else { partitioner = new Partitioner<K,V>() { @Override public void configure(JobConf job) { } @Override public int getPartition(K key, V value, int numPartitions) { return -1; } }; } this.collector = collector; } @Override public void collect(K key, V value) throws IOException { try { collector.collect(key, value, partitioner.getPartition(key, value, numPartitions)); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); throw new IOException("interrupt exception", ie); } } } private class NewDirectOutputCollector<K,V> extends org.apache.hadoop.mapreduce.RecordWriter<K,V> { private final org.apache.hadoop.mapreduce.RecordWriter out; private final TaskReporter reporter; private final Counters.Counter mapOutputRecordCounter; private final Counters.Counter fileOutputByteCounter; private final Statistics fsStats; @SuppressWarnings("unchecked") NewDirectOutputCollector(org.apache.hadoop.mapreduce.JobContext jobContext, JobConf job, TaskUmbilicalProtocol umbilical, TaskReporter reporter) throws IOException, ClassNotFoundException, InterruptedException { this.reporter = reporter; Statistics matchedStats = null; if (outputFormat instanceof org.apache.hadoop.mapreduce.lib.output.FileOutputFormat) { matchedStats = getFsStatistics(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat .getOutputPath(jobContext), job); } fsStats = matchedStats; mapOutputRecordCounter = reporter.getCounter(MAP_OUTPUT_RECORDS); fileOutputByteCounter = reporter .getCounter(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.Counter.BYTES_WRITTEN); long bytesOutPrev = getOutputBytes(fsStats); out = outputFormat.getRecordWriter(taskContext); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); } @Override @SuppressWarnings("unchecked") public void write(K key, V value) throws IOException, InterruptedException { reporter.progress(); long bytesOutPrev = getOutputBytes(fsStats); out.write(key, value); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); mapOutputRecordCounter.increment(1); } @Override public void close(TaskAttemptContext context) throws IOException,InterruptedException { reporter.progress(); if (out != null) { long bytesOutPrev = getOutputBytes(fsStats); out.close(context); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); } } private long getOutputBytes(Statistics stats) { return stats == null ? 0 : stats.getBytesWritten(); } } private class NewOutputCollector<K,V> extends org.apache.hadoop.mapreduce.RecordWriter<K,V> { private final MapOutputCollector<K,V> collector; private final org.apache.hadoop.mapreduce.Partitioner<K,V> partitioner; private final int partitions; @SuppressWarnings("unchecked") NewOutputCollector(org.apache.hadoop.mapreduce.JobContext jobContext, JobConf job, TaskUmbilicalProtocol umbilical, TaskReporter reporter ) throws IOException, ClassNotFoundException { collector = new MapOutputBuffer<K,V>(umbilical, job, reporter); partitions = jobContext.getNumReduceTasks(); if (partitions > 0) { partitioner = (org.apache.hadoop.mapreduce.Partitioner<K,V>) ReflectionUtils.newInstance(jobContext.getPartitionerClass(), job); } else { partitioner = new org.apache.hadoop.mapreduce.Partitioner<K,V>() { @Override public int getPartition(K key, V value, int numPartitions) { return -1; } }; } } @Override public void write(K key, V value) throws IOException, InterruptedException { collector.collect(key, value, partitioner.getPartition(key, value, partitions)); } @Override public void close(TaskAttemptContext context ) throws IOException,InterruptedException { try { collector.flush(); } catch (ClassNotFoundException cnf) { throw new IOException("can't find class ", cnf); } collector.close(); } } @SuppressWarnings("unchecked") private <INKEY,INVALUE,OUTKEY,OUTVALUE> void runNewMapper(final JobConf job, final TaskSplitIndex splitIndex, final TaskUmbilicalProtocol umbilical, TaskReporter reporter ) throws IOException, ClassNotFoundException, InterruptedException { // make a task context so we can get the classes org.apache.hadoop.mapreduce.TaskAttemptContext taskContext = new org.apache.hadoop.mapreduce.TaskAttemptContext(job, getTaskID()); // make a mapper org.apache.hadoop.mapreduce.Mapper<INKEY,INVALUE,OUTKEY,OUTVALUE> mapper = (org.apache.hadoop.mapreduce.Mapper<INKEY,INVALUE,OUTKEY,OUTVALUE>) ReflectionUtils.newInstance(taskContext.getMapperClass(), job); // make the input format org.apache.hadoop.mapreduce.InputFormat<INKEY,INVALUE> inputFormat = (org.apache.hadoop.mapreduce.InputFormat<INKEY,INVALUE>) ReflectionUtils.newInstance(taskContext.getInputFormatClass(), job); // rebuild the input split org.apache.hadoop.mapreduce.InputSplit split = null; split = getSplitDetails(new Path(splitIndex.getSplitLocation()), splitIndex.getStartOffset()); org.apache.hadoop.mapreduce.RecordReader<INKEY,INVALUE> input = new NewTrackingRecordReader<INKEY,INVALUE> (split, inputFormat, reporter, job, taskContext); job.setBoolean("mapred.skip.on", isSkipping()); org.apache.hadoop.mapreduce.RecordWriter output = null; org.apache.hadoop.mapreduce.Mapper<INKEY,INVALUE,OUTKEY,OUTVALUE>.Context mapperContext = null; try { Constructor<org.apache.hadoop.mapreduce.Mapper.Context> contextConstructor = org.apache.hadoop.mapreduce.Mapper.Context.class.getConstructor (new Class[]{org.apache.hadoop.mapreduce.Mapper.class, Configuration.class, org.apache.hadoop.mapreduce.TaskAttemptID.class, org.apache.hadoop.mapreduce.RecordReader.class, org.apache.hadoop.mapreduce.RecordWriter.class, org.apache.hadoop.mapreduce.OutputCommitter.class, org.apache.hadoop.mapreduce.StatusReporter.class, org.apache.hadoop.mapreduce.InputSplit.class}); // get an output object if (job.getNumReduceTasks() == 0) { output = new NewDirectOutputCollector(taskContext, job, umbilical, reporter); } else { output = new NewOutputCollector(taskContext, job, umbilical, reporter); } mapperContext = contextConstructor.newInstance(mapper, job, getTaskID(), input, output, committer, reporter, split); input.initialize(split, mapperContext); mapper.run(mapperContext); input.close(); output.close(mapperContext); } catch (NoSuchMethodException e) { throw new IOException("Can't find Context constructor", e); } catch (InstantiationException e) { throw new IOException("Can't create Context", e); } catch (InvocationTargetException e) { throw new IOException("Can't invoke Context constructor", e); } catch (IllegalAccessException e) { throw new IOException("Can't invoke Context constructor", e); } } interface MapOutputCollector<K, V> { public void collect(K key, V value, int partition ) throws IOException, InterruptedException; public void close() throws IOException, InterruptedException; public void flush() throws IOException, InterruptedException, ClassNotFoundException; } class DirectMapOutputCollector<K, V> implements MapOutputCollector<K, V> { private RecordWriter<K, V> out = null; private TaskReporter reporter = null; private final Counters.Counter mapOutputRecordCounter; private final Counters.Counter fileOutputByteCounter; private final Statistics fsStats; @SuppressWarnings("unchecked") public DirectMapOutputCollector(TaskUmbilicalProtocol umbilical, JobConf job, TaskReporter reporter) throws IOException { this.reporter = reporter; String finalName = getOutputName(getPartition()); FileSystem fs = FileSystem.get(job); OutputFormat<K, V> outputFormat = job.getOutputFormat(); Statistics matchedStats = null; if (outputFormat instanceof FileOutputFormat) { matchedStats = getFsStatistics(FileOutputFormat.getOutputPath(job), job); } fsStats = matchedStats; mapOutputRecordCounter = reporter.getCounter(MAP_OUTPUT_RECORDS); fileOutputByteCounter = reporter .getCounter(FileOutputFormat.Counter.BYTES_WRITTEN); long bytesOutPrev = getOutputBytes(fsStats); out = job.getOutputFormat().getRecordWriter(fs, job, finalName, reporter); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); } public void close() throws IOException { if (this.out != null) { long bytesOutPrev = getOutputBytes(fsStats); out.close(this.reporter); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); } } public void flush() throws IOException, InterruptedException, ClassNotFoundException { } public void collect(K key, V value, int partition) throws IOException { reporter.progress(); long bytesOutPrev = getOutputBytes(fsStats); out.write(key, value); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); mapOutputRecordCounter.increment(1); } private long getOutputBytes(Statistics stats) { return stats == null ? 0 : stats.getBytesWritten(); } } class MapOutputBuffer<K extends Object, V extends Object> implements MapOutputCollector<K, V>, IndexedSortable { private final int partitions; private final JobConf job; private final TaskReporter reporter; private final Class<K> keyClass; private final Class<V> valClass; private final RawComparator<K> comparator; private final SerializationFactory serializationFactory; private final Serializer<K> keySerializer; private final Serializer<V> valSerializer; private final CombinerRunner<K,V> combinerRunner; private final CombineOutputCollector<K, V> combineCollector; // Compression for map-outputs private CompressionCodec codec = null; // k/v accounting private volatile int kvstart = 0; // marks beginning of spill private volatile int kvend = 0; // marks beginning of collectable private int kvindex = 0; // marks end of collected private final int[] kvoffsets; // indices into kvindices private final int[] kvindices; // partition, k/v offsets into kvbuffer private volatile int bufstart = 0; // marks beginning of spill private volatile int bufend = 0; // marks beginning of collectable private volatile int bufvoid = 0; // marks the point where we should stop // reading at the end of the buffer private int bufindex = 0; // marks end of collected private int bufmark = 0; // marks end of record private byte[] kvbuffer; // main output buffer private static final int PARTITION = 0; // partition offset in acct private static final int KEYSTART = 1; // key offset in acct private static final int VALSTART = 2; // val offset in acct private static final int ACCTSIZE = 3; // total #fields in acct private static final int RECSIZE = (ACCTSIZE + 1) * 4; // acct bytes per record // spill accounting private volatile int numSpills = 0; private volatile Throwable sortSpillException = null; private final int softRecordLimit; private final int softBufferLimit; private final int minSpillsForCombine; private final IndexedSorter sorter; private final ReentrantLock spillLock = new ReentrantLock(); private final Condition spillDone = spillLock.newCondition(); private final Condition spillReady = spillLock.newCondition(); private final BlockingBuffer bb = new BlockingBuffer(); private volatile boolean spillThreadRunning = false; private final SpillThread spillThread = new SpillThread(); private final FileSystem localFs; private final FileSystem rfs; private final Counters.Counter mapOutputByteCounter; private final Counters.Counter mapOutputRecordCounter; private final Counters.Counter combineOutputCounter; private final Counters.Counter fileOutputByteCounter; private ArrayList<SpillRecord> indexCacheList; private int totalIndexCacheMemory; private static final int INDEX_CACHE_MEMORY_LIMIT = 1024 * 1024; @SuppressWarnings("unchecked") public MapOutputBuffer(TaskUmbilicalProtocol umbilical, JobConf job, TaskReporter reporter ) throws IOException, ClassNotFoundException { this.job = job; this.reporter = reporter; localFs = FileSystem.getLocal(job); partitions = job.getNumReduceTasks(); rfs = ((LocalFileSystem)localFs).getRaw(); indexCacheList = new ArrayList<SpillRecord>(); //sanity checks final float spillper = job.getFloat("io.sort.spill.percent",(float)0.8); final float recper = job.getFloat("io.sort.record.percent",(float)0.05); final int sortmb = job.getInt("io.sort.mb", 100); if (spillper > (float)1.0 || spillper < (float)0.0) { throw new IOException("Invalid \"io.sort.spill.percent\": " + spillper); } if (recper > (float)1.0 || recper < (float)0.01) { throw new IOException("Invalid \"io.sort.record.percent\": " + recper); } if ((sortmb & 0x7FF) != sortmb) { throw new IOException("Invalid \"io.sort.mb\": " + sortmb); } sorter = ReflectionUtils.newInstance( job.getClass("map.sort.class", QuickSort.class, IndexedSorter.class), job); LOG.info("io.sort.mb = " + sortmb); // buffers and accounting int maxMemUsage = sortmb << 20; int recordCapacity = (int)(maxMemUsage * recper); recordCapacity -= recordCapacity % RECSIZE; kvbuffer = new byte[maxMemUsage - recordCapacity]; bufvoid = kvbuffer.length; recordCapacity /= RECSIZE; kvoffsets = new int[recordCapacity]; kvindices = new int[recordCapacity * ACCTSIZE]; softBufferLimit = (int)(kvbuffer.length * spillper); softRecordLimit = (int)(kvoffsets.length * spillper); LOG.info("data buffer = " + softBufferLimit + "/" + kvbuffer.length); LOG.info("record buffer = " + softRecordLimit + "/" + kvoffsets.length); // k/v serialization comparator = job.getOutputKeyComparator(); keyClass = (Class<K>)job.getMapOutputKeyClass(); valClass = (Class<V>)job.getMapOutputValueClass(); serializationFactory = new SerializationFactory(job); keySerializer = serializationFactory.getSerializer(keyClass); keySerializer.open(bb); valSerializer = serializationFactory.getSerializer(valClass); valSerializer.open(bb); // counters mapOutputByteCounter = reporter.getCounter(MAP_OUTPUT_BYTES); mapOutputRecordCounter = reporter.getCounter(MAP_OUTPUT_RECORDS); Counters.Counter combineInputCounter = reporter.getCounter(COMBINE_INPUT_RECORDS); combineOutputCounter = reporter.getCounter(COMBINE_OUTPUT_RECORDS); fileOutputByteCounter = reporter.getCounter(MAP_OUTPUT_MATERIALIZED_BYTES); // compression if (job.getCompressMapOutput()) { Class<? extends CompressionCodec> codecClass = job.getMapOutputCompressorClass(DefaultCodec.class); codec = ReflectionUtils.newInstance(codecClass, job); } // combiner combinerRunner = CombinerRunner.create(job, getTaskID(), combineInputCounter, reporter, null); if (combinerRunner != null) { combineCollector= new CombineOutputCollector<K,V>(combineOutputCounter); } else { combineCollector = null; } minSpillsForCombine = job.getInt("min.num.spills.for.combine", 3); spillThread.setDaemon(true); spillThread.setName("SpillThread"); spillLock.lock(); try { spillThread.start(); while (!spillThreadRunning) { spillDone.await(); } } catch (InterruptedException e) { throw (IOException)new IOException("Spill thread failed to initialize" ).initCause(sortSpillException); } finally { spillLock.unlock(); } if (sortSpillException != null) { throw (IOException)new IOException("Spill thread failed to initialize" ).initCause(sortSpillException); } } public synchronized void collect(K key, V value, int partition ) throws IOException { reporter.progress(); if (key.getClass() != keyClass) { throw new IOException("Type mismatch in key from map: expected " + keyClass.getName() + ", recieved " + key.getClass().getName()); } if (value.getClass() != valClass) { throw new IOException("Type mismatch in value from map: expected " + valClass.getName() + ", recieved " + value.getClass().getName()); } final int kvnext = (kvindex + 1) % kvoffsets.length; spillLock.lock(); try { boolean kvfull; do { if (sortSpillException != null) { throw (IOException)new IOException("Spill failed" ).initCause(sortSpillException); } // sufficient acct space kvfull = kvnext == kvstart; final boolean kvsoftlimit = ((kvnext > kvend) ? kvnext - kvend > softRecordLimit : kvend - kvnext <= kvoffsets.length - softRecordLimit); if (kvstart == kvend && kvsoftlimit) { LOG.info("Spilling map output: record full = " + kvsoftlimit); startSpill(); } if (kvfull) { try { while (kvstart != kvend) { reporter.progress(); spillDone.await(); } } catch (InterruptedException e) { throw (IOException)new IOException( "Collector interrupted while waiting for the writer" ).initCause(e); } } } while (kvfull); } finally { spillLock.unlock(); } try { // serialize key bytes into buffer int keystart = bufindex; keySerializer.serialize(key); if (bufindex < keystart) { // wrapped the key; reset required bb.reset(); keystart = 0; } // serialize value bytes into buffer final int valstart = bufindex; valSerializer.serialize(value); int valend = bb.markRecord(); if (partition < 0 || partition >= partitions) { throw new IOException("Illegal partition for " + key + " (" + partition + ")"); } mapOutputRecordCounter.increment(1); mapOutputByteCounter.increment(valend >= keystart ? valend - keystart : (bufvoid - keystart) + valend); // update accounting info int ind = kvindex * ACCTSIZE; kvoffsets[kvindex] = ind; kvindices[ind + PARTITION] = partition; kvindices[ind + KEYSTART] = keystart; kvindices[ind + VALSTART] = valstart; kvindex = kvnext; } catch (MapBufferTooSmallException e) { LOG.info("Record too large for in-memory buffer: " + e.getMessage()); spillSingleRecord(key, value, partition); mapOutputRecordCounter.increment(1); return; } } /** * Compare logical range, st i, j MOD offset capacity. * Compare by partition, then by key. * @see IndexedSortable#compare */ public int compare(int i, int j) { final int ii = kvoffsets[i % kvoffsets.length]; final int ij = kvoffsets[j % kvoffsets.length]; // sort by partition if (kvindices[ii + PARTITION] != kvindices[ij + PARTITION]) { return kvindices[ii + PARTITION] - kvindices[ij + PARTITION]; } // sort by key return comparator.compare(kvbuffer, kvindices[ii + KEYSTART], kvindices[ii + VALSTART] - kvindices[ii + KEYSTART], kvbuffer, kvindices[ij + KEYSTART], kvindices[ij + VALSTART] - kvindices[ij + KEYSTART]); } /** * Swap logical indices st i, j MOD offset capacity. * @see IndexedSortable#swap */ public void swap(int i, int j) { i %= kvoffsets.length; j %= kvoffsets.length; int tmp = kvoffsets[i]; kvoffsets[i] = kvoffsets[j]; kvoffsets[j] = tmp; } /** * Inner class managing the spill of serialized records to disk. */ protected class BlockingBuffer extends DataOutputStream { public BlockingBuffer() { this(new Buffer()); } private BlockingBuffer(OutputStream out) { super(out); } /** * Mark end of record. Note that this is required if the buffer is to * cut the spill in the proper place. */ public int markRecord() { bufmark = bufindex; return bufindex; } /** * Set position from last mark to end of writable buffer, then rewrite * the data between last mark and kvindex. * This handles a special case where the key wraps around the buffer. * If the key is to be passed to a RawComparator, then it must be * contiguous in the buffer. This recopies the data in the buffer back * into itself, but starting at the beginning of the buffer. Note that * reset() should <b>only</b> be called immediately after detecting * this condition. To call it at any other time is undefined and would * likely result in data loss or corruption. * @see #markRecord() */ protected synchronized void reset() throws IOException { // spillLock unnecessary; If spill wraps, then // bufindex < bufstart < bufend so contention is impossible // a stale value for bufstart does not affect correctness, since // we can only get false negatives that force the more // conservative path int headbytelen = bufvoid - bufmark; bufvoid = bufmark; if (bufindex + headbytelen < bufstart) { System.arraycopy(kvbuffer, 0, kvbuffer, headbytelen, bufindex); System.arraycopy(kvbuffer, bufvoid, kvbuffer, 0, headbytelen); bufindex += headbytelen; } else { byte[] keytmp = new byte[bufindex]; System.arraycopy(kvbuffer, 0, keytmp, 0, bufindex); bufindex = 0; out.write(kvbuffer, bufmark, headbytelen); out.write(keytmp); } } } public class Buffer extends OutputStream { private final byte[] scratch = new byte[1]; @Override public synchronized void write(int v) throws IOException { scratch[0] = (byte)v; write(scratch, 0, 1); } /** * Attempt to write a sequence of bytes to the collection buffer. * This method will block if the spill thread is running and it * cannot write. * @throws MapBufferTooSmallException if record is too large to * deserialize into the collection buffer. */ @Override public synchronized void write(byte b[], int off, int len) throws IOException { boolean buffull = false; boolean wrap = false; spillLock.lock(); try { do { if (sortSpillException != null) { throw (IOException)new IOException("Spill failed" ).initCause(sortSpillException); } // sufficient buffer space? if (bufstart <= bufend && bufend <= bufindex) { buffull = bufindex + len > bufvoid; wrap = (bufvoid - bufindex) + bufstart > len; } else { // bufindex <= bufstart <= bufend // bufend <= bufindex <= bufstart wrap = false; buffull = bufindex + len > bufstart; } if (kvstart == kvend) { // spill thread not running if (kvend != kvindex) { // we have records we can spill final boolean bufsoftlimit = (bufindex > bufend) ? bufindex - bufend > softBufferLimit : bufend - bufindex < bufvoid - softBufferLimit; if (bufsoftlimit || (buffull && !wrap)) { LOG.info("Spilling map output: buffer full= " + bufsoftlimit); startSpill(); } } else if (buffull && !wrap) { // We have no buffered records, and this record is too large // to write into kvbuffer. We must spill it directly from // collect final int size = ((bufend <= bufindex) ? bufindex - bufend : (bufvoid - bufend) + bufindex) + len; bufstart = bufend = bufindex = bufmark = 0; kvstart = kvend = kvindex = 0; bufvoid = kvbuffer.length; throw new MapBufferTooSmallException(size + " bytes"); } } if (buffull && !wrap) { try { while (kvstart != kvend) { reporter.progress(); spillDone.await(); } } catch (InterruptedException e) { throw (IOException)new IOException( "Buffer interrupted while waiting for the writer" ).initCause(e); } } } while (buffull && !wrap); } finally { spillLock.unlock(); } // here, we know that we have sufficient space to write if (buffull) { final int gaplen = bufvoid - bufindex; System.arraycopy(b, off, kvbuffer, bufindex, gaplen); len -= gaplen; off += gaplen; bufindex = 0; } System.arraycopy(b, off, kvbuffer, bufindex, len); bufindex += len; } } public synchronized void flush() throws IOException, ClassNotFoundException, InterruptedException { LOG.info("Starting flush of map output"); spillLock.lock(); try { while (kvstart != kvend) { reporter.progress(); spillDone.await(); } if (sortSpillException != null) { throw (IOException)new IOException("Spill failed" ).initCause(sortSpillException); } if (kvend != kvindex) { kvend = kvindex; bufend = bufmark; sortAndSpill(); } } catch (InterruptedException e) { throw (IOException)new IOException( "Buffer interrupted while waiting for the writer" ).initCause(e); } finally { spillLock.unlock(); } assert !spillLock.isHeldByCurrentThread(); // shut down spill thread and wait for it to exit. Since the preceding // ensures that it is finished with its work (and sortAndSpill did not // throw), we elect to use an interrupt instead of setting a flag. // Spilling simultaneously from this thread while the spill thread // finishes its work might be both a useful way to extend this and also // sufficient motivation for the latter approach. try { spillThread.interrupt(); spillThread.join(); } catch (InterruptedException e) { throw (IOException)new IOException("Spill failed" ).initCause(e); } // release sort buffer before the merge kvbuffer = null; mergeParts(); Path outputPath = mapOutputFile.getOutputFile(); fileOutputByteCounter.increment(rfs.getFileStatus(outputPath).getLen()); } public void close() { } protected class SpillThread extends Thread { @Override public void run() { spillLock.lock(); spillThreadRunning = true; try { while (true) { spillDone.signal(); while (kvstart == kvend) { spillReady.await(); } try { spillLock.unlock(); sortAndSpill(); } catch (Exception e) { sortSpillException = e; } catch (Throwable t) { sortSpillException = t; String logMsg = "Task " + getTaskID() + " failed : " + StringUtils.stringifyException(t); reportFatalError(getTaskID(), t, logMsg); } finally { spillLock.lock(); if (bufend < bufindex && bufindex < bufstart) { bufvoid = kvbuffer.length; } kvstart = kvend; bufstart = bufend; } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } finally { spillLock.unlock(); spillThreadRunning = false; } } } private synchronized void startSpill() { LOG.info("bufstart = " + bufstart + "; bufend = " + bufmark + "; bufvoid = " + bufvoid); LOG.info("kvstart = " + kvstart + "; kvend = " + kvindex + "; length = " + kvoffsets.length); kvend = kvindex; bufend = bufmark; spillReady.signal(); } private void sortAndSpill() throws IOException, ClassNotFoundException, InterruptedException { //approximate the length of the output file to be the length of the //buffer + header lengths for the partitions long size = (bufend >= bufstart ? bufend - bufstart : (bufvoid - bufend) + bufstart) + partitions * APPROX_HEADER_LENGTH; FSDataOutputStream out = null; try { // create spill file final SpillRecord spillRec = new SpillRecord(partitions); final Path filename = mapOutputFile.getSpillFileForWrite(numSpills, size); out = rfs.create(filename); final int endPosition = (kvend > kvstart) ? kvend : kvoffsets.length + kvend; sorter.sort(MapOutputBuffer.this, kvstart, endPosition, reporter); int spindex = kvstart; IndexRecord rec = new IndexRecord(); InMemValBytes value = new InMemValBytes(); for (int i = 0; i < partitions; ++i) { IFile.Writer<K, V> writer = null; try { long segmentStart = out.getPos(); writer = new Writer<K, V>(job, out, keyClass, valClass, codec, spilledRecordsCounter); if (combinerRunner == null) { // spill directly DataInputBuffer key = new DataInputBuffer(); while (spindex < endPosition && kvindices[kvoffsets[spindex % kvoffsets.length] + PARTITION] == i) { final int kvoff = kvoffsets[spindex % kvoffsets.length]; getVBytesForOffset(kvoff, value); key.reset(kvbuffer, kvindices[kvoff + KEYSTART], (kvindices[kvoff + VALSTART] - kvindices[kvoff + KEYSTART])); writer.append(key, value); ++spindex; } } else { int spstart = spindex; while (spindex < endPosition && kvindices[kvoffsets[spindex % kvoffsets.length] + PARTITION] == i) { ++spindex; } // Note: we would like to avoid the combiner if we've fewer // than some threshold of records for a partition if (spstart != spindex) { combineCollector.setWriter(writer); RawKeyValueIterator kvIter = new MRResultIterator(spstart, spindex); combinerRunner.combine(kvIter, combineCollector); } } // close the writer writer.close(); // record offsets rec.startOffset = segmentStart; rec.rawLength = writer.getRawLength(); rec.partLength = writer.getCompressedLength(); spillRec.putIndex(rec, i); writer = null; } finally { if (null != writer) writer.close(); } } if (totalIndexCacheMemory >= INDEX_CACHE_MEMORY_LIMIT) { // create spill index file Path indexFilename = mapOutputFile.getSpillIndexFileForWrite(numSpills, partitions * MAP_OUTPUT_INDEX_RECORD_LENGTH); spillRec.writeToFile(indexFilename, job); } else { indexCacheList.add(spillRec); totalIndexCacheMemory += spillRec.size() * MAP_OUTPUT_INDEX_RECORD_LENGTH; } LOG.info("Finished spill " + numSpills); ++numSpills; } finally { if (out != null) out.close(); } } /** * Handles the degenerate case where serialization fails to fit in * the in-memory buffer, so we must spill the record from collect * directly to a spill file. Consider this "losing". */ private void spillSingleRecord(final K key, final V value, int partition) throws IOException { long size = kvbuffer.length + partitions * APPROX_HEADER_LENGTH; FSDataOutputStream out = null; try { // create spill file final SpillRecord spillRec = new SpillRecord(partitions); final Path filename = mapOutputFile.getSpillFileForWrite(numSpills, size); out = rfs.create(filename); // we don't run the combiner for a single record IndexRecord rec = new IndexRecord(); for (int i = 0; i < partitions; ++i) { IFile.Writer<K, V> writer = null; try { long segmentStart = out.getPos(); // Create a new codec, don't care! writer = new IFile.Writer<K,V>(job, out, keyClass, valClass, codec, spilledRecordsCounter); if (i == partition) { final long recordStart = out.getPos(); writer.append(key, value); // Note that our map byte count will not be accurate with // compression mapOutputByteCounter.increment(out.getPos() - recordStart); } writer.close(); // record offsets rec.startOffset = segmentStart; rec.rawLength = writer.getRawLength(); rec.partLength = writer.getCompressedLength(); spillRec.putIndex(rec, i); writer = null; } catch (IOException e) { if (null != writer) writer.close(); throw e; } } if (totalIndexCacheMemory >= INDEX_CACHE_MEMORY_LIMIT) { // create spill index file Path indexFilename = mapOutputFile.getSpillIndexFileForWrite(numSpills, partitions * MAP_OUTPUT_INDEX_RECORD_LENGTH); spillRec.writeToFile(indexFilename, job); } else { indexCacheList.add(spillRec); totalIndexCacheMemory += spillRec.size() * MAP_OUTPUT_INDEX_RECORD_LENGTH; } ++numSpills; } finally { if (out != null) out.close(); } } /** * Given an offset, populate vbytes with the associated set of * deserialized value bytes. Should only be called during a spill. */ private void getVBytesForOffset(int kvoff, InMemValBytes vbytes) { final int nextindex = (kvoff / ACCTSIZE == (kvend - 1 + kvoffsets.length) % kvoffsets.length) ? bufend : kvindices[(kvoff + ACCTSIZE + KEYSTART) % kvindices.length]; int vallen = (nextindex >= kvindices[kvoff + VALSTART]) ? nextindex - kvindices[kvoff + VALSTART] : (bufvoid - kvindices[kvoff + VALSTART]) + nextindex; vbytes.reset(kvbuffer, kvindices[kvoff + VALSTART], vallen); } /** * Inner class wrapping valuebytes, used for appendRaw. */ protected class InMemValBytes extends DataInputBuffer { private byte[] buffer; private int start; private int length; public void reset(byte[] buffer, int start, int length) { this.buffer = buffer; this.start = start; this.length = length; if (start + length > bufvoid) { this.buffer = new byte[this.length]; final int taillen = bufvoid - start; System.arraycopy(buffer, start, this.buffer, 0, taillen); System.arraycopy(buffer, 0, this.buffer, taillen, length-taillen); this.start = 0; } super.reset(this.buffer, this.start, this.length); } } protected class MRResultIterator implements RawKeyValueIterator { private final DataInputBuffer keybuf = new DataInputBuffer(); private final InMemValBytes vbytes = new InMemValBytes(); private final int end; private int current; public MRResultIterator(int start, int end) { this.end = end; current = start - 1; } public boolean next() throws IOException { return ++current < end; } public DataInputBuffer getKey() throws IOException { final int kvoff = kvoffsets[current % kvoffsets.length]; keybuf.reset(kvbuffer, kvindices[kvoff + KEYSTART], kvindices[kvoff + VALSTART] - kvindices[kvoff + KEYSTART]); return keybuf; } public DataInputBuffer getValue() throws IOException { getVBytesForOffset(kvoffsets[current % kvoffsets.length], vbytes); return vbytes; } public Progress getProgress() { return null; } public void close() { } } private void mergeParts() throws IOException, InterruptedException, ClassNotFoundException { // get the approximate size of the final output/index files long finalOutFileSize = 0; long finalIndexFileSize = 0; final Path[] filename = new Path[numSpills]; final TaskAttemptID mapId = getTaskID(); for(int i = 0; i < numSpills; i++) { filename[i] = mapOutputFile.getSpillFile(i); finalOutFileSize += rfs.getFileStatus(filename[i]).getLen(); } if (numSpills == 1) { //the spill is the final output rfs.rename(filename[0], new Path(filename[0].getParent(), "file.out")); if (indexCacheList.size() == 0) { rfs.rename(mapOutputFile.getSpillIndexFile(0), new Path(filename[0].getParent(),"file.out.index")); } else { indexCacheList.get(0).writeToFile( new Path(filename[0].getParent(),"file.out.index"), job); } return; } // read in paged indices for (int i = indexCacheList.size(); i < numSpills; ++i) { Path indexFileName = mapOutputFile.getSpillIndexFile(i); indexCacheList.add(new SpillRecord(indexFileName, job, null)); } //make correction in the length to include the sequence file header //lengths for each partition finalOutFileSize += partitions * APPROX_HEADER_LENGTH; finalIndexFileSize = partitions * MAP_OUTPUT_INDEX_RECORD_LENGTH; Path finalOutputFile = mapOutputFile.getOutputFileForWrite(finalOutFileSize); Path finalIndexFile = mapOutputFile.getOutputIndexFileForWrite(finalIndexFileSize); //The output stream for the final single output file FSDataOutputStream finalOut = rfs.create(finalOutputFile, true, 4096); if (numSpills == 0) { //create dummy files IndexRecord rec = new IndexRecord(); SpillRecord sr = new SpillRecord(partitions); try { for (int i = 0; i < partitions; i++) { long segmentStart = finalOut.getPos(); Writer<K, V> writer = new Writer<K, V>(job, finalOut, keyClass, valClass, codec, null); writer.close(); rec.startOffset = segmentStart; rec.rawLength = writer.getRawLength(); rec.partLength = writer.getCompressedLength(); sr.putIndex(rec, i); } sr.writeToFile(finalIndexFile, job); } finally { finalOut.close(); } return; } { IndexRecord rec = new IndexRecord(); final SpillRecord spillRec = new SpillRecord(partitions); for (int parts = 0; parts < partitions; parts++) { //create the segments to be merged List<Segment<K,V>> segmentList = new ArrayList<Segment<K, V>>(numSpills); for(int i = 0; i < numSpills; i++) { IndexRecord indexRecord = indexCacheList.get(i).getIndex(parts); Segment<K,V> s = new Segment<K,V>(job, rfs, filename[i], indexRecord.startOffset, indexRecord.partLength, codec, true); segmentList.add(i, s); if (LOG.isDebugEnabled()) { LOG.debug("MapId=" + mapId + " Reducer=" + parts + "Spill =" + i + "(" + indexRecord.startOffset + "," + indexRecord.rawLength + ", " + indexRecord.partLength + ")"); } } //merge @SuppressWarnings("unchecked") RawKeyValueIterator kvIter = Merger.merge(job, rfs, keyClass, valClass, codec, segmentList, job.getInt("io.sort.factor", 100), new Path(mapId.toString()), job.getOutputKeyComparator(), reporter, null, spilledRecordsCounter); //write merged output to disk long segmentStart = finalOut.getPos(); Writer<K, V> writer = new Writer<K, V>(job, finalOut, keyClass, valClass, codec, spilledRecordsCounter); if (combinerRunner == null || numSpills < minSpillsForCombine) { Merger.writeFile(kvIter, writer, reporter, job); } else { combineCollector.setWriter(writer); combinerRunner.combine(kvIter, combineCollector); } //close writer.close(); // record offsets rec.startOffset = segmentStart; rec.rawLength = writer.getRawLength(); rec.partLength = writer.getCompressedLength(); spillRec.putIndex(rec, parts); } spillRec.writeToFile(finalIndexFile, job); finalOut.close(); for(int i = 0; i < numSpills; i++) { rfs.delete(filename[i],true); } } } } // MapOutputBuffer /** * Exception indicating that the allocated sort buffer is insufficient * to hold the current record. */ @SuppressWarnings("serial") private static class MapBufferTooSmallException extends IOException { public MapBufferTooSmallException(String s) { super(s); } } }
commit 9968b5cce5b4e41a02e7577d99b2dad05ff7a155 Author: Owen O'Malley <omalley@apache.org> Date: Thu Mar 3 10:07:03 2011 -0800 Fixed comment. git-svn-id: 7e7a2d564dde2945c3a26e08d1235fcc0cb7aba1@1077796 13f79535-47bb-0310-9956-ffa450edef68
src/mapred/org/apache/hadoop/mapred/MapTask.java
commit 9968b5cce5b4e41a02e7577d99b2dad05ff7a155 Author: Owen O'Malley <omalley@apache.org> Date: Thu Mar 3 10:07:03 2011 -0800
Java
apache-2.0
32eb8851e2c6cb4fc6125ed0f04edf4344b01fe6
0
freedesktop-unofficial-mirror/wayland__wayland-java
package examples; import org.freedesktop.wayland.client.*; import org.freedesktop.wayland.shared.WlShmFormat; import org.freedesktop.wayland.util.Fixed; import javax.annotation.Nonnull; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.IntBuffer; public class Window { public class Buffer { private final ShmPool shmPool; private WlBufferProxy bufferProxy; private final ByteBuffer byteBuffer; public Buffer() { try { this.shmPool = new ShmPool(Window.this.width * Window.this.height * 4); WlShmPoolProxy pool = Window.this.display.getShmProxy() .createPool(new WlShmPoolEvents() { }, this.shmPool.getFileDescriptor(), Window.this.width * Window.this.height * 4); this.bufferProxy = pool.createBuffer(new WlBufferEvents() { @Override public void release(final WlBufferProxy emitter) { } }, 0, Window.this.width, Window.this.height, Window.this.width * 4, WlShmFormat.XRGB8888.getValue()); pool.destroy(); this.byteBuffer = this.shmPool.asByteBuffer(); } catch (IOException e) { throw new RuntimeException(e); } } public ByteBuffer getByteBuffer() { return this.byteBuffer; } public WlBufferProxy getProxy() { return this.bufferProxy; } } private final Display display; private final int width; private final int height; private WlSurfaceProxy surfaceProxy; private WlCallbackProxy callbackProxy; private Buffer buffer; public Window(Display display, int width, int height) { this.display = display; this.width = width; this.height = height; this.buffer = new Buffer(); this.surfaceProxy = display.getCompositorProxy() .createSurface(new WlSurfaceEvents() { @Override public void enter(final WlSurfaceProxy emitter, @Nonnull final WlOutputProxy output) { } @Override public void leave(final WlSurfaceProxy emitter, @Nonnull final WlOutputProxy output) { } }); this.surfaceProxy.damage(0, 0, width, height); final WlRegionProxy inputRegion = display.getCompositorProxy() .createRegion(new WlRegionEvents() { }); inputRegion.add(0, 0, width, height); this.surfaceProxy.setInputRegion(inputRegion); } public void destroy() { if (this.callbackProxy != null) { this.callbackProxy.destroy(); } this.surfaceProxy.destroy(); } private int abs(int i) { return i < 0 ? -i : i; } private void paintPixels(ByteBuffer buffer, int padding, int time) { final int halfh = padding + (this.height - padding * 2) / 2; final int halfw = padding + (this.width - padding * 2) / 2; int ir; int or; IntBuffer image = buffer.asIntBuffer(); image.clear(); for (int i = 0; i < this.width * this.height; ++i) { image.put(0xffffffff); } image.clear(); /* squared radii thresholds */ or = (halfw < halfh ? halfw : halfh) - 8; ir = or - 32; or = or * or; ir = ir * ir; image.position(padding * this.width); for (int y = padding; y < this.height - padding; y++) { int y2 = (y - halfh) * (y - halfh); image.position(image.position() + padding); for (int x = padding; x < this.width - padding; x++) { int v; int r2 = (x - halfw) * (x - halfw) + y2; if (r2 < ir) { v = (r2 / 32 + time / 64) * 0x0080401; } else if (r2 < or) { v = (y + time / 32) * 0x0080401; } else { v = (x + time / 16) * 0x0080401; } v &= 0x00ffffff; if (abs(x - y) > 6 && abs(x + y - this.height) > 6) { v |= 0xff000000; } image.put(v); } image.position(image.position() + padding); } } public void redraw(final int time) { paintPixels(this.buffer.getByteBuffer(), 20, time); this.display.getSeatProxy().getPointer(new WlPointerEventsV3() { @Override public void enter(final WlPointerProxy emitter, @Nonnull final int serial, @Nonnull final WlSurfaceProxy surface, @Nonnull final Fixed surfaceX, @Nonnull final Fixed surfaceY) { } @Override public void leave(final WlPointerProxy emitter, @Nonnull final int serial, @Nonnull final WlSurfaceProxy surface) { } @Override public void motion(final WlPointerProxy emitter, @Nonnull final int time, @Nonnull final Fixed surfaceX, @Nonnull final Fixed surfaceY) { } @Override public void button(final WlPointerProxy emitter, @Nonnull final int serial, @Nonnull final int time, @Nonnull final int button, @Nonnull final int state) { } @Override public void axis(final WlPointerProxy emitter, @Nonnull final int time, @Nonnull final int axis, @Nonnull final Fixed value) { } }); this.surfaceProxy.attach(this.buffer.getProxy(), 0, 0); this.surfaceProxy.damage(20, 20, this.height - 40, this.height - 40); final WlCallbackEvents wlCallbackEvents = new WlCallbackEvents() { @Override public void done(final WlCallbackProxy emitter, final int callbackData) { Window.this.callbackProxy.destroy(); redraw(callbackData); } }; this.callbackProxy = this.surfaceProxy.frame(wlCallbackEvents); this.surfaceProxy.commit(); } }
examples/src/main/java/examples/Window.java
package examples; import org.freedesktop.wayland.client.*; import org.freedesktop.wayland.shared.WlShmFormat; import javax.annotation.Nonnull; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.IntBuffer; public class Window { public class Buffer { private final ShmPool shmPool; private WlBufferProxy bufferProxy; private final ByteBuffer byteBuffer; public Buffer() { try { shmPool = new ShmPool(width * height * 4); WlShmPoolProxy pool = display.getShmProxy() .createPool(new WlShmPoolEvents() { }, shmPool.getFileDescriptor(), width * height * 4); bufferProxy = pool.createBuffer(new WlBufferEvents() { @Override public void release(final WlBufferProxy emitter) { } }, 0, width, height, width * 4, WlShmFormat.XRGB8888.getValue()); pool.destroy(); byteBuffer = shmPool.asByteBuffer(); } catch (IOException e) { throw new RuntimeException(e); } } public ByteBuffer getByteBuffer() { return byteBuffer; } public WlBufferProxy getProxy() { return bufferProxy; } } private final Display display; private final int width; private final int height; private WlSurfaceProxy surfaceProxy; private WlCallbackProxy callbackProxy; private Buffer buffer; public Window(Display display, int width, int height) { this.display = display; this.width = width; this.height = height; buffer = new Buffer(); surfaceProxy = display.getCompositorProxy() .createSurface(new WlSurfaceEvents() { @Override public void enter(final WlSurfaceProxy emitter, @Nonnull final WlOutputProxy output) { } @Override public void leave(final WlSurfaceProxy emitter, @Nonnull final WlOutputProxy output) { } }); surfaceProxy.damage(0, 0, width, height); } public void destroy() { if (callbackProxy != null) { callbackProxy.destroy(); } surfaceProxy.destroy(); } private int abs(int i) { return i < 0 ? -i : i; } private void paintPixels(ByteBuffer buffer, int padding, int time) { final int halfh = padding + (height - padding * 2) / 2; final int halfw = padding + (width - padding * 2) / 2; int ir; int or; IntBuffer image = buffer.asIntBuffer(); image.clear(); for (int i = 0; i < width * height; ++i) { image.put(0xffffffff); } image.clear(); /* squared radii thresholds */ or = (halfw < halfh ? halfw : halfh) - 8; ir = or - 32; or = or * or; ir = ir * ir; image.position(padding * width); for (int y = padding; y < height - padding; y++) { int y2 = (y - halfh) * (y - halfh); image.position(image.position() + padding); for (int x = padding; x < width - padding; x++) { int v; int r2 = (x - halfw) * (x - halfw) + y2; if (r2 < ir) { v = (r2 / 32 + time / 64) * 0x0080401; } else if (r2 < or) { v = (y + time / 32) * 0x0080401; } else { v = (x + time / 16) * 0x0080401; } v &= 0x00ffffff; if (abs(x - y) > 6 && abs(x + y - height) > 6) { v |= 0xff000000; } image.put(v); } image.position(image.position() + padding); } } public void redraw(final int time) { paintPixels(buffer.getByteBuffer(), 20, time); surfaceProxy.attach(buffer.getProxy(), 0, 0); surfaceProxy.damage(20, 20, height - 40, height - 40); final WlCallbackEvents wlCallbackEvents = new WlCallbackEvents() { @Override public void done(final WlCallbackProxy emitter, final int callbackData) { callbackProxy.destroy(); redraw(callbackData); } }; callbackProxy = surfaceProxy.frame(wlCallbackEvents); surfaceProxy.commit(); } }
let window listen for pointer events
examples/src/main/java/examples/Window.java
let window listen for pointer events
Java
apache-2.0
807d17642b0923f524207afba2c8152886ef99fd
0
apache/jmeter,apache/jmeter,etnetera/jmeter,ham1/jmeter,ham1/jmeter,benbenw/jmeter,etnetera/jmeter,etnetera/jmeter,benbenw/jmeter,ham1/jmeter,etnetera/jmeter,etnetera/jmeter,apache/jmeter,ham1/jmeter,apache/jmeter,ham1/jmeter,benbenw/jmeter,apache/jmeter,benbenw/jmeter
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * */ package org.apache.jmeter.visualizers; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Vector; import javax.swing.BoxLayout; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComponent; import javax.swing.JFileChooser; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTable; import javax.swing.border.Border; import javax.swing.border.EmptyBorder; import javax.swing.table.TableCellRenderer; import org.apache.jmeter.gui.action.ActionNames; import org.apache.jmeter.gui.action.ActionRouter; import org.apache.jmeter.gui.action.SaveGraphics; import org.apache.jmeter.gui.util.FileDialoger; import org.apache.jmeter.gui.util.HorizontalPanel; import org.apache.jmeter.gui.util.VerticalPanel; import org.apache.jmeter.samplers.Clearable; import org.apache.jmeter.samplers.SampleResult; import org.apache.jmeter.save.CSVSaveService; import org.apache.jmeter.util.JMeterUtils; import org.apache.jmeter.visualizers.gui.AbstractVisualizer; import org.apache.jorphan.gui.JLabeledChoice; import org.apache.jorphan.gui.JLabeledTextField; import org.apache.jorphan.gui.NumberRenderer; import org.apache.jorphan.gui.ObjectTableModel; import org.apache.jorphan.gui.RateRenderer; import org.apache.jorphan.gui.RendererUtils; import org.apache.jorphan.logging.LoggingManager; import org.apache.jorphan.reflect.Functor; import org.apache.jorphan.util.JOrphanUtils; import org.apache.log.Logger; /** * Aggregrate Table-Based Reporting Visualizer for JMeter. Props to the people * who've done the other visualizers ahead of me (Stefano Mazzocchi), who I * borrowed code from to start me off (and much code may still exist). Thank * you! * */ public class StatGraphVisualizer extends AbstractVisualizer implements Clearable, ActionListener { private static final Logger log = LoggingManager.getLoggerForClass(); private final String[] COLUMNS = { JMeterUtils.getResString("sampler_label"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_count"), //$NON-NLS-1$ JMeterUtils.getResString("average"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_median"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_90%_line"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_min"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_max"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_error%"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_rate"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_bandwidth") }; //$NON-NLS-1$ private final String[] GRAPH_COLUMNS = {JMeterUtils.getResString("average"),//$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_median"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_90%_line"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_min"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_max")}; //$NON-NLS-1$ private final String TOTAL_ROW_LABEL = JMeterUtils.getResString("aggregate_report_total_label"); //$NON-NLS-1$ protected JTable myJTable; protected JScrollPane myScrollPane; private transient ObjectTableModel model; Map tableRows = Collections.synchronizedMap(new HashMap()); protected AxisGraph graphPanel = null; protected VerticalPanel graph = null; protected JScrollPane graphScroll = null; protected JSplitPane spane = null; protected JLabeledChoice columns = new JLabeledChoice(JMeterUtils.getResString("aggregate_graph_column"),GRAPH_COLUMNS);//$NON-NLS-1$ //NOT USED protected double[][] data = null; protected JButton displayButton = new JButton(JMeterUtils.getResString("aggregate_graph_display")); //$NON-NLS-1$ protected JButton saveGraph = new JButton(JMeterUtils.getResString("aggregate_graph_save")); //$NON-NLS-1$ protected JButton saveTable = new JButton(JMeterUtils.getResString("aggregate_graph_save_table")); //$NON-NLS-1$ private JCheckBox saveHeaders = // should header be saved with the data? new JCheckBox(JMeterUtils.getResString("aggregate_graph_save_table_header")); //$NON-NLS-1$ JLabeledTextField graphTitle = new JLabeledTextField(JMeterUtils.getResString("aggregate_graph_user_title")); //$NON-NLS-1$ JLabeledTextField maxLengthXAxisLabel = new JLabeledTextField(JMeterUtils.getResString("aggregate_graph_max_length_xaxis_label"));//$NON-NLS-1$ JLabeledTextField graphWidth = new JLabeledTextField(JMeterUtils.getResString("aggregate_graph_width")); //$NON-NLS-1$ JLabeledTextField graphHeight = new JLabeledTextField(JMeterUtils.getResString("aggregate_graph_height")); //$NON-NLS-1$ protected String yAxisLabel = JMeterUtils.getResString("aggregate_graph_response_time");//$NON-NLS-1$ protected String yAxisTitle = JMeterUtils.getResString("aggregate_graph_ms"); //$NON-NLS-1$ protected boolean saveGraphToFile = false; protected int defaultWidth = 400; protected int defaultHeight = 300; public StatGraphVisualizer() { super(); model = new ObjectTableModel(COLUMNS, SamplingStatCalculator.class, new Functor[] { new Functor("getLabel"), //$NON-NLS-1$ new Functor("getCount"), //$NON-NLS-1$ new Functor("getMeanAsNumber"), //$NON-NLS-1$ new Functor("getMedian"), //$NON-NLS-1$ new Functor("getPercentPoint", //$NON-NLS-1$ new Object[] { new Float(.900) }), new Functor("getMin"), //$NON-NLS-1$ new Functor("getMax"), //$NON-NLS-1$ new Functor("getErrorPercentage"), //$NON-NLS-1$ new Functor("getRate"), //$NON-NLS-1$ new Functor("getKBPerSecond") }, //$NON-NLS-1$ new Functor[] { null, null, null, null, null, null, null, null, null, null }, new Class[] { String.class, Long.class, Long.class, Long.class, Long.class, Long.class, Long.class, String.class, String.class, String.class }); clearData(); init(); } // Column renderers private static final TableCellRenderer[] RENDERERS = new TableCellRenderer[]{ null, // Label null, // count null, // Mean null, // median null, // 90% null, // Min null, // Max new NumberRenderer("#0.00%"), // Error %age new RateRenderer("#.0"), // Throughpur new NumberRenderer("#.0"), // pageSize }; public static boolean testFunctors(){ StatGraphVisualizer instance = new StatGraphVisualizer(); return instance.model.checkFunctors(null,instance.getClass()); } public String getLabelResource() { return "aggregate_graph_title"; //$NON-NLS-1$ } public void add(SampleResult res) { SamplingStatCalculator row = null; final String sampleLabel = res.getSampleLabel(); synchronized (tableRows) { row = (SamplingStatCalculator) tableRows.get(sampleLabel); if (row == null) { row = new SamplingStatCalculator(sampleLabel); tableRows.put(row.getLabel(), row); model.insertRow(row, model.getRowCount() - 1); } } row.addSample(res); ((SamplingStatCalculator) tableRows.get(TOTAL_ROW_LABEL)).addSample(res); model.fireTableDataChanged(); } /** * Clears this visualizer and its model, and forces a repaint of the table. */ public void clearData() { model.clearData(); tableRows.clear(); tableRows.put(TOTAL_ROW_LABEL, new SamplingStatCalculator(TOTAL_ROW_LABEL)); model.addRow(tableRows.get(TOTAL_ROW_LABEL)); } /** * Main visualizer setup. */ private void init() { this.setLayout(new BorderLayout()); // MAIN PANEL JPanel mainPanel = new JPanel(); Border margin = new EmptyBorder(10, 10, 5, 10); Border margin2 = new EmptyBorder(10, 10, 5, 10); mainPanel.setBorder(margin); mainPanel.setLayout(new BoxLayout(mainPanel, BoxLayout.Y_AXIS)); mainPanel.add(makeTitlePanel()); myJTable = new JTable(model); myJTable.setPreferredScrollableViewportSize(new Dimension(500, 80)); RendererUtils.applyRenderers(myJTable, RENDERERS); myScrollPane = new JScrollPane(myJTable); graph = new VerticalPanel(); graph.setBorder(margin2); JLabel graphLabel = new JLabel(JMeterUtils.getResString("aggregate_graph")); //$NON-NLS-1$ graphPanel = new AxisGraph(); graphPanel.setPreferredSize(new Dimension(defaultWidth,defaultHeight)); // horizontal panel for the buttons HorizontalPanel buttonpanel = new HorizontalPanel(); buttonpanel.add(columns); buttonpanel.add(displayButton); buttonpanel.add(saveGraph); buttonpanel.add(saveTable); buttonpanel.add(saveHeaders); graph.add(graphLabel); graph.add(graphTitle); graph.add(maxLengthXAxisLabel); graph.add(graphWidth); graph.add(graphHeight); graph.add(buttonpanel); graph.add(graphPanel); displayButton.addActionListener(this); saveGraph.addActionListener(this); saveTable.addActionListener(this); graphScroll = new JScrollPane(graph); graphScroll.setAutoscrolls(true); spane = new JSplitPane(JSplitPane.VERTICAL_SPLIT); spane.setLeftComponent(myScrollPane); spane.setRightComponent(graphScroll); spane.setResizeWeight(.2); spane.setContinuousLayout(true); this.add(mainPanel, BorderLayout.NORTH); this.add(spane,BorderLayout.CENTER); } public void makeGraph() { String wstr = graphWidth.getText(); String hstr = graphHeight.getText(); String lstr = maxLengthXAxisLabel.getText(); if (wstr.length() == 0) { wstr = "450";//$NON-NLS-1$ } if (hstr.length() == 0) { hstr = "250";//$NON-NLS-1$ } if (lstr.length() == 0) { lstr = "20";//$NON-NLS-1$ } int width = Integer.parseInt(wstr); int height = Integer.parseInt(hstr); int maxLength = Integer.parseInt(lstr); graphPanel.setData(this.getData()); graphPanel.setHeight(height); graphPanel.setWidth(width); graphPanel.setTitle(graphTitle.getText()); graphPanel.setMaxLength(maxLength); graphPanel.setXAxisLabels(getAxisLabels()); graphPanel.setXAxisTitle(columns.getText()); graphPanel.setYAxisLabels(this.yAxisLabel); graphPanel.setYAxisTitle(this.yAxisTitle); graphPanel.setPreferredSize(new Dimension(width,height)); graph.setSize(new Dimension(graph.getWidth(), height + 120)); spane.repaint(); } public double[][] getData() { if (model.getRowCount() > 1) { int count = model.getRowCount() -1; int col = model.findColumn(columns.getText()); double[][] data = new double[1][count]; for (int idx=0; idx < count; idx++) { data[0][idx] = ((Number)model.getValueAt(idx,col)).doubleValue(); } return data; } return new double[][]{ { 250, 45, 36, 66, 145, 80, 55 } }; } public String[] getAxisLabels() { if (model.getRowCount() > 1) { int count = model.getRowCount() -1; String[] labels = new String[count]; for (int idx=0; idx < count; idx++) { labels[idx] = (String)model.getValueAt(idx,0); } return labels; } return new String[]{ "/", "/samples", "/jsp-samples", "/manager", "/manager/status", "/hello", "/world" }; } /** * We use this method to get the data, since we are using * ObjectTableModel, so the calling getDataVector doesn't * work as expected. * @return the data from the model */ public Vector getAllTableData() { Vector data = new Vector(); if (model.getRowCount() > 0) { for (int rw=0; rw < model.getRowCount(); rw++) { int cols = model.getColumnCount(); Vector column = new Vector(); data.add(column); for (int idx=0; idx < cols; idx++) { Object val = model.getValueAt(rw,idx); column.add(val); } } } return data; } public void actionPerformed(ActionEvent event) { if (event.getSource() == displayButton) { makeGraph(); } else if (event.getSource() == saveGraph) { saveGraphToFile = true; try { ActionRouter.getInstance().getAction( ActionNames.SAVE_GRAPHICS,SaveGraphics.class.getName()).doAction( new ActionEvent(this,1,ActionNames.SAVE_GRAPHICS)); } catch (Exception e) { log.error(e.getMessage()); } } else if (event.getSource() == saveTable) { JFileChooser chooser = FileDialoger.promptToSaveFile("statistics.csv"); //$NON-NLS-1$ if (chooser == null) { return; } FileWriter writer = null; try { writer = new FileWriter(chooser.getSelectedFile()); Vector data = this.getAllTableData(); CSVSaveService.saveCSVStats(data,writer,saveHeaders.isSelected() ? COLUMNS : null); } catch (FileNotFoundException e) { log.warn(e.getMessage()); } catch (IOException e) { log.warn(e.getMessage()); } finally { JOrphanUtils.closeQuietly(writer); } } } public JComponent getPrintableComponent() { if (saveGraphToFile == true) { saveGraphToFile = false; graphPanel.setBounds(graphPanel.getLocation().x,graphPanel.getLocation().y, graphPanel.width,graphPanel.height); return graphPanel; } return this; } }
src/components/org/apache/jmeter/visualizers/StatGraphVisualizer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * */ package org.apache.jmeter.visualizers; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Vector; import javax.swing.BoxLayout; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComponent; import javax.swing.JFileChooser; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTable; import javax.swing.border.Border; import javax.swing.border.EmptyBorder; import javax.swing.table.TableCellRenderer; import org.apache.jmeter.gui.action.ActionNames; import org.apache.jmeter.gui.action.ActionRouter; import org.apache.jmeter.gui.action.SaveGraphics; import org.apache.jmeter.gui.util.FileDialoger; import org.apache.jmeter.gui.util.HeaderAsPropertyRenderer; import org.apache.jmeter.gui.util.HorizontalPanel; import org.apache.jmeter.gui.util.VerticalPanel; import org.apache.jmeter.samplers.Clearable; import org.apache.jmeter.samplers.SampleResult; import org.apache.jmeter.save.CSVSaveService; import org.apache.jmeter.util.JMeterUtils; import org.apache.jmeter.visualizers.gui.AbstractVisualizer; import org.apache.jorphan.gui.JLabeledChoice; import org.apache.jorphan.gui.JLabeledTextField; import org.apache.jorphan.gui.NumberRenderer; import org.apache.jorphan.gui.ObjectTableModel; import org.apache.jorphan.gui.RateRenderer; import org.apache.jorphan.gui.RendererUtils; import org.apache.jorphan.logging.LoggingManager; import org.apache.jorphan.reflect.Functor; import org.apache.jorphan.util.JOrphanUtils; import org.apache.log.Logger; /** * Aggregrate Table-Based Reporting Visualizer for JMeter. Props to the people * who've done the other visualizers ahead of me (Stefano Mazzocchi), who I * borrowed code from to start me off (and much code may still exist). Thank * you! * */ public class StatGraphVisualizer extends AbstractVisualizer implements Clearable, ActionListener { private static final Logger log = LoggingManager.getLoggerForClass(); // Column resource names private static final String[] COLUMNS = { "sampler_label", //$NON-NLS-1$ "aggregate_report_count", //$NON-NLS-1$ "average", //$NON-NLS-1$ "aggregate_report_median", //$NON-NLS-1$ "aggregate_report_90%_line", //$NON-NLS-1$ "aggregate_report_min", //$NON-NLS-1$ "aggregate_report_max", //$NON-NLS-1$ "aggregate_report_error%", //$NON-NLS-1$ "aggregate_report_rate", //$NON-NLS-1$ "aggregate_report_bandwidth" }; //$NON-NLS-1$ private final String[] GRAPH_COLUMNS = {JMeterUtils.getResString("average"),//$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_median"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_90%_line"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_min"), //$NON-NLS-1$ JMeterUtils.getResString("aggregate_report_max")}; //$NON-NLS-1$ private final String TOTAL_ROW_LABEL = JMeterUtils.getResString("aggregate_report_total_label"); //$NON-NLS-1$ protected JTable myJTable; protected JScrollPane myScrollPane; private transient ObjectTableModel model; Map tableRows = Collections.synchronizedMap(new HashMap()); protected AxisGraph graphPanel = null; protected VerticalPanel graph = null; protected JScrollPane graphScroll = null; protected JSplitPane spane = null; protected JLabeledChoice columns = new JLabeledChoice(JMeterUtils.getResString("aggregate_graph_column"),GRAPH_COLUMNS);//$NON-NLS-1$ //NOT USED protected double[][] data = null; protected JButton displayButton = new JButton(JMeterUtils.getResString("aggregate_graph_display")); //$NON-NLS-1$ protected JButton saveGraph = new JButton(JMeterUtils.getResString("aggregate_graph_save")); //$NON-NLS-1$ protected JButton saveTable = new JButton(JMeterUtils.getResString("aggregate_graph_save_table")); //$NON-NLS-1$ private JCheckBox saveHeaders = // should header be saved with the data? new JCheckBox(JMeterUtils.getResString("aggregate_graph_save_table_header")); //$NON-NLS-1$ JLabeledTextField graphTitle = new JLabeledTextField(JMeterUtils.getResString("aggregate_graph_user_title")); //$NON-NLS-1$ JLabeledTextField maxLengthXAxisLabel = new JLabeledTextField(JMeterUtils.getResString("aggregate_graph_max_length_xaxis_label"));//$NON-NLS-1$ JLabeledTextField graphWidth = new JLabeledTextField(JMeterUtils.getResString("aggregate_graph_width")); //$NON-NLS-1$ JLabeledTextField graphHeight = new JLabeledTextField(JMeterUtils.getResString("aggregate_graph_height")); //$NON-NLS-1$ protected String yAxisLabel = JMeterUtils.getResString("aggregate_graph_response_time");//$NON-NLS-1$ protected String yAxisTitle = JMeterUtils.getResString("aggregate_graph_ms"); //$NON-NLS-1$ protected boolean saveGraphToFile = false; protected int defaultWidth = 400; protected int defaultHeight = 300; public StatGraphVisualizer() { super(); model = new ObjectTableModel(COLUMNS, SamplingStatCalculator.class, new Functor[] { new Functor("getLabel"), //$NON-NLS-1$ new Functor("getCount"), //$NON-NLS-1$ new Functor("getMeanAsNumber"), //$NON-NLS-1$ new Functor("getMedian"), //$NON-NLS-1$ new Functor("getPercentPoint", //$NON-NLS-1$ new Object[] { new Float(.900) }), new Functor("getMin"), //$NON-NLS-1$ new Functor("getMax"), //$NON-NLS-1$ new Functor("getErrorPercentage"), //$NON-NLS-1$ new Functor("getRate"), //$NON-NLS-1$ new Functor("getKBPerSecond") }, //$NON-NLS-1$ new Functor[] { null, null, null, null, null, null, null, null, null, null }, new Class[] { String.class, Long.class, Long.class, Long.class, Long.class, Long.class, Long.class, String.class, String.class, String.class }); clearData(); init(); } // Column renderers private static final TableCellRenderer[] RENDERERS = new TableCellRenderer[]{ null, // Label null, // count null, // Mean null, // median null, // 90% null, // Min null, // Max new NumberRenderer("#0.00%"), // Error %age new RateRenderer("#.0"), // Throughpur new NumberRenderer("#.0"), // pageSize }; public static boolean testFunctors(){ StatGraphVisualizer instance = new StatGraphVisualizer(); return instance.model.checkFunctors(null,instance.getClass()); } public String getLabelResource() { return "aggregate_graph_title"; //$NON-NLS-1$ } public void add(SampleResult res) { SamplingStatCalculator row = null; final String sampleLabel = res.getSampleLabel(); synchronized (tableRows) { row = (SamplingStatCalculator) tableRows.get(sampleLabel); if (row == null) { row = new SamplingStatCalculator(sampleLabel); tableRows.put(row.getLabel(), row); model.insertRow(row, model.getRowCount() - 1); } } row.addSample(res); ((SamplingStatCalculator) tableRows.get(TOTAL_ROW_LABEL)).addSample(res); model.fireTableDataChanged(); } /** * Clears this visualizer and its model, and forces a repaint of the table. */ public void clearData() { model.clearData(); tableRows.clear(); tableRows.put(TOTAL_ROW_LABEL, new SamplingStatCalculator(TOTAL_ROW_LABEL)); model.addRow(tableRows.get(TOTAL_ROW_LABEL)); } /** * Main visualizer setup. */ private void init() { this.setLayout(new BorderLayout()); // MAIN PANEL JPanel mainPanel = new JPanel(); Border margin = new EmptyBorder(10, 10, 5, 10); Border margin2 = new EmptyBorder(10, 10, 5, 10); mainPanel.setBorder(margin); mainPanel.setLayout(new BoxLayout(mainPanel, BoxLayout.Y_AXIS)); mainPanel.add(makeTitlePanel()); myJTable = new JTable(model); myJTable.getTableHeader().setDefaultRenderer(new HeaderAsPropertyRenderer()); myJTable.setPreferredScrollableViewportSize(new Dimension(500, 80)); RendererUtils.applyRenderers(myJTable, RENDERERS); myScrollPane = new JScrollPane(myJTable); graph = new VerticalPanel(); graph.setBorder(margin2); JLabel graphLabel = new JLabel(JMeterUtils.getResString("aggregate_graph")); //$NON-NLS-1$ graphPanel = new AxisGraph(); graphPanel.setPreferredSize(new Dimension(defaultWidth,defaultHeight)); // horizontal panel for the buttons HorizontalPanel buttonpanel = new HorizontalPanel(); buttonpanel.add(columns); buttonpanel.add(displayButton); buttonpanel.add(saveGraph); buttonpanel.add(saveTable); buttonpanel.add(saveHeaders); graph.add(graphLabel); graph.add(graphTitle); graph.add(maxLengthXAxisLabel); graph.add(graphWidth); graph.add(graphHeight); graph.add(buttonpanel); graph.add(graphPanel); displayButton.addActionListener(this); saveGraph.addActionListener(this); saveTable.addActionListener(this); graphScroll = new JScrollPane(graph); graphScroll.setAutoscrolls(true); spane = new JSplitPane(JSplitPane.VERTICAL_SPLIT); spane.setLeftComponent(myScrollPane); spane.setRightComponent(graphScroll); spane.setResizeWeight(.2); spane.setContinuousLayout(true); this.add(mainPanel, BorderLayout.NORTH); this.add(spane,BorderLayout.CENTER); } public void makeGraph() { String wstr = graphWidth.getText(); String hstr = graphHeight.getText(); String lstr = maxLengthXAxisLabel.getText(); if (wstr.length() == 0) { wstr = "450";//$NON-NLS-1$ } if (hstr.length() == 0) { hstr = "250";//$NON-NLS-1$ } if (lstr.length() == 0) { lstr = "20";//$NON-NLS-1$ } int width = Integer.parseInt(wstr); int height = Integer.parseInt(hstr); int maxLength = Integer.parseInt(lstr); graphPanel.setData(this.getData()); graphPanel.setHeight(height); graphPanel.setWidth(width); graphPanel.setTitle(graphTitle.getText()); graphPanel.setMaxLength(maxLength); graphPanel.setXAxisLabels(getAxisLabels()); graphPanel.setXAxisTitle(columns.getText()); graphPanel.setYAxisLabels(this.yAxisLabel); graphPanel.setYAxisTitle(this.yAxisTitle); graphPanel.setPreferredSize(new Dimension(width,height)); graph.setSize(new Dimension(graph.getWidth(), height + 120)); spane.repaint(); } public double[][] getData() { if (model.getRowCount() > 1) { int count = model.getRowCount() -1; int col = model.findColumn(columns.getText()); // TODO is this locale-safe? double[][] data = new double[1][count]; for (int idx=0; idx < count; idx++) { data[0][idx] = ((Number)model.getValueAt(idx,col)).doubleValue(); } return data; } return new double[][]{ { 250, 45, 36, 66, 145, 80, 55 } }; } public String[] getAxisLabels() { if (model.getRowCount() > 1) { int count = model.getRowCount() -1; String[] labels = new String[count]; for (int idx=0; idx < count; idx++) { labels[idx] = (String)model.getValueAt(idx,0); } return labels; } return new String[]{ "/", "/samples", "/jsp-samples", "/manager", "/manager/status", "/hello", "/world" }; } /** * We use this method to get the data, since we are using * ObjectTableModel, so the calling getDataVector doesn't * work as expected. * @return the data from the model */ public Vector getAllTableData() { Vector data = new Vector(); if (model.getRowCount() > 0) { for (int rw=0; rw < model.getRowCount(); rw++) { int cols = model.getColumnCount(); Vector column = new Vector(); data.add(column); for (int idx=0; idx < cols; idx++) { Object val = model.getValueAt(rw,idx); column.add(val); } } } return data; } public void actionPerformed(ActionEvent event) { if (event.getSource() == displayButton) { makeGraph(); } else if (event.getSource() == saveGraph) { saveGraphToFile = true; try { ActionRouter.getInstance().getAction( ActionNames.SAVE_GRAPHICS,SaveGraphics.class.getName()).doAction( new ActionEvent(this,1,ActionNames.SAVE_GRAPHICS)); } catch (Exception e) { log.error(e.getMessage()); } } else if (event.getSource() == saveTable) { JFileChooser chooser = FileDialoger.promptToSaveFile("statistics.csv"); //$NON-NLS-1$ if (chooser == null) { return; } FileWriter writer = null; try { writer = new FileWriter(chooser.getSelectedFile()); Vector data = this.getAllTableData(); CSVSaveService.saveCSVStats(data,writer,saveHeaders.isSelected() ? getColumnNames() : null); } catch (FileNotFoundException e) { log.warn(e.getMessage()); } catch (IOException e) { log.warn(e.getMessage()); } finally { JOrphanUtils.closeQuietly(writer); } } } // TODO consider using the table methods to return data and headers in actual order private String[] getColumnNames(){ String [] names = new String[COLUMNS.length]; for (int i=0; i < COLUMNS.length; i++){ names[i]=JMeterUtils.getResString(COLUMNS[i]); } return names; } public JComponent getPrintableComponent() { if (saveGraphToFile == true) { saveGraphToFile = false; graphPanel.setBounds(graphPanel.getLocation().x,graphPanel.getLocation().y, graphPanel.width,graphPanel.height); return graphPanel; } return this; } }
I18N change was wrong git-svn-id: https://svn.apache.org/repos/asf/jakarta/jmeter/trunk@718828 13f79535-47bb-0310-9956-ffa450edef68 Former-commit-id: 727a613737f34159d3069767ea5976f2a54df71b
src/components/org/apache/jmeter/visualizers/StatGraphVisualizer.java
I18N change was wrong
Java
bsd-3-clause
c712ddbb824aaaa23639c589123209513198d179
0
MengZhang/acmo-dssat
package org.agmip.translators.acmo; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.CharArrayReader; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import static org.agmip.translators.acmo.AcmoCommonOutput.*; import static org.agmip.util.MapUtil.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * DSSAT AFile Data I/O API Class * * @author Meng Zhang * @version 1.0 */ public class AcmoCsvTranslator { private static final Logger log = LoggerFactory.getLogger(AcmoCommonInput.class); private File outputFile; /** * Get output file object */ public File getOutputFile() { return outputFile; } /** * Generate ACMO CSV file * * @param outputCsvPath The path for output csv file * @param inputFilePath The path for input zip file which contains *.OUT and * acmo.json */ public void writeCsvFile(String outputCsvPath, String inputFilePath) throws IOException { // Read input zip file HashMap brMap = AcmoCommonInput.getBufferReader(inputFilePath); // Get input csv file from zip Object buf = brMap.get("CSV"); BufferedReader brCsv; // If Output File File is no been found if (buf == null) { log.error("CSV FILE IS MISSING IN THE INPUT ZIP PACKAGE"); return; } else { if (buf instanceof char[]) { brCsv = new BufferedReader(new CharArrayReader((char[]) buf)); } else { brCsv = (BufferedReader) buf; } } // Get input dssat simulation ouput files from zip AcmoDssatOutputFileInput dssatReader = new AcmoDssatOutputFileInput(); HashMap sumData = dssatReader.readSummary(brMap); ArrayList<HashMap> sumSubArr = getObjectOr(sumData, "data", new ArrayList<HashMap>()); HashMap sumSubData; ArrayList<HashMap> ovwSubArr = dssatReader.readOverview(brMap); HashMap ovwSubData; // Get simulation output values from output files by experiment id HashMap<String, String> sumValMap = new HashMap(); // ArrayList<String> sumValArr = new ArrayList(); String version = getObjectOr(sumData, "vevsion", "Ver. N/A"); StringBuilder sbData; for (int i = 0; i < sumSubArr.size(); i++) { sbData = new StringBuilder(); sumSubData = sumSubArr.get(i); ovwSubData = ovwSubArr.get(i); String runno_sum = getObjectOr(sumSubData, "runno", "sum"); String runno_ovw = getObjectOr(ovwSubData, "runno", "ovm"); String trno = getObjectOr(ovwSubData, "trno", "1"); String pdat = formatDateStr(getObjectOr(sumSubData, "pdat", ""), ""); String exp_id = getObjectOr(ovwSubData, "exp_id", ""); String key = exp_id + "__" + trno + "," + pdat; if (Integer.parseInt(runno_sum) > 999) { runno_ovw = (Integer.parseInt(runno_ovw)) + 1000 + ""; } if (!runno_sum.equals(runno_ovw)) { log.warn("THE ORDER OF No." + (i + 1) + " RECORD [" + exp_id + "] IS NOT MATCHED BETWEEN SUMMARY AND OVERVIEW OUTPUT FILE"); continue; } // Create CSV data if (!sumValMap.containsKey(key)) { sbData.append(",\"DSSAT\",\"DSSAT ").append(getObjectOr(sumSubData, "model", "")).append(" ").append(version).append("\""); // MODEL_VER sbData.append(",\"").append(getObjectOr(sumSubData, "hwah", "")).append("\""); // HWAH sbData.append(",\"").append(getObjectOr(sumSubData, "cwam", "")).append("\""); // CWAH sbData.append(",\"").append(formatDateStr(getObjectOr(sumSubData, "adat", ""))).append("\""); // ADAT sbData.append(",\"").append(formatDateStr(getObjectOr(sumSubData, "mdat", ""))).append("\""); // MDAT sbData.append(",\"").append(formatDateStr(getObjectOr(sumSubData, "hdat", ""))).append("\""); // HDATE sbData.append(",\"").append(getObjectOr(sumSubData, "laix", "")).append("\""); // LAIX sbData.append(",\"").append(getObjectOr(sumSubData, "prcp", "")).append("\""); // PRCP sbData.append(",\"").append(getObjectOr(sumSubData, "etcp", "")).append("\""); // ETCP sbData.append(",\"").append(getObjectOr(sumSubData, "nucm", "")).append("\""); // NUCM sbData.append(",\"").append(getObjectOr(sumSubData, "nlcm", "")).append("\""); // NLCM sumValMap.put(key, sbData.toString()); // P.S. since non-DSSAT model won't have multiple treament, thus trno is not used as the part of key } else { log.warn("REPEATED RECORD IN SUMMARY FILE WITH SAME PDAT AND EXNAME"); } // sumValArr.add(sbData.toString()); } // Write CSV File outputCsvPath = revisePath(outputCsvPath); outputFile = new File(outputCsvPath + "ACMO.csv"); BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile)); String line; String titleLine = ""; String[] titles; int curDataLineNo = 1; // Write titles while ((line = brCsv.readLine()) != null) { if (line.startsWith("*") || line.startsWith("\"*\"")) { break; } else { bw.write(line); bw.write("\r\n"); curDataLineNo++; titleLine = line; } } // Get titles if (titleLine.endsWith("\"")) { titleLine = titleLine.substring(0, titleLine.length() - 1); } titles = titleLine.split("\"?,\"?"); // Get key item position int pdateCol = getIndex(titles, "PDATE"); int exnameCol = getIndex(titles, "EXNAME"); int cropModelCol = getIndex(titles, "CROP_MODEL"); if (pdateCol < 0 || exnameCol < 0 || cropModelCol < 0) { log.error("MISSING TITLE FOR PDATE, EXNAME OR CROP_MODEL IN LINE " + (curDataLineNo - 1)); bw.write("MISSING TITLE FOR PDATE, EXNAME OR CROP_MODEL IN LINE " + (curDataLineNo - 1)); bw.close(); return; } // Write data while (line != null) { // currently exname (exp_id) is located in the 3rd spot of row String[] tmp = line.split(","); if (tmp.length < pdateCol + 1 || tmp[exnameCol].trim().equals("") || tmp[pdateCol].trim().equals("")) { bw.write(line); log.warn("MISSING EXNAME OR SDAT IN LINE " + curDataLineNo); } else { tmp[pdateCol] = tmp[pdateCol].replaceAll("/", ""); // remove the comma for blank cell which will be filled with output value line = trimComma(tmp, cropModelCol); bw.write(line); // wirte simulation output info if (!tmp[exnameCol].matches("\\w+_+\\d+")) { tmp[exnameCol] += "__1"; } else if (!tmp[exnameCol].matches("\\w+__\\d+")) { tmp[exnameCol] = tmp[exnameCol].replaceAll("_+", "__"); } String scvKey = tmp[exnameCol] + "," + tmp[pdateCol]; if (sumValMap.containsKey(scvKey)) { bw.write(sumValMap.remove(scvKey)); // P.S. temporal way for multiple treatment } else { bw.write(",\"DSSAT\""); log.warn("THE SIMULATION OUTPUT DATA FOR [" + scvKey + "] IS MISSING"); // if (curDataLineNo - 4 < sumValArr.size()) { // bw.write(sumValArr.get(curDataLineNo - 4)); // } else { // log.warn("THE SIMULATION OUTPUT DATA FOR [" + tmp[2] + "] IS MISSING"); // } } } bw.write("\r\n"); curDataLineNo++; line = brCsv.readLine(); } bw.close(); } /** * Remove the comma in the end of the line and combine to a new String * * @param strs input array of string which is splited by comma * @param length the expected length of that array * @return */ private String trimComma(String[] strs, int length) { StringBuilder sb = new StringBuilder(); sb.append(strs[0]); int min = Math.min(strs.length, length); for (int i = 1; i < min; i++) { sb.append(",").append(strs[i]); } for (int i = min; i < length; i++) { sb.append(","); } return sb.toString(); } /** * Get the index number for the targeted title * * @param titles The array of titles * @param name The name of title * @return The index of title in the line */ private int getIndex(String[] titles, String name) { for (int i = 0; i < titles.length; i++) { if (titles[i].equals(name)) { return i; } } return -1; } }
src/main/java/org/agmip/translators/acmo/AcmoCsvTranslator.java
package org.agmip.translators.acmo; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.CharArrayReader; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import static org.agmip.translators.acmo.AcmoCommonOutput.*; import static org.agmip.util.MapUtil.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * DSSAT AFile Data I/O API Class * * @author Meng Zhang * @version 1.0 */ public class AcmoCsvTranslator { private static final Logger log = LoggerFactory.getLogger(AcmoCommonInput.class); private File outputFile; /** * Get output file object */ public File getOutputFile() { return outputFile; } /** * Generate ACMO CSV file * * @param outputCsvPath The path for output csv file * @param inputFilePath The path for input zip file which contains *.OUT and * acmo.json */ public void writeCsvFile(String outputCsvPath, String inputFilePath) throws IOException { // Read input zip file HashMap brMap = AcmoCommonInput.getBufferReader(inputFilePath); // Get input csv file from zip Object buf = brMap.get("CSV"); BufferedReader brCsv; // If Output File File is no been found if (buf == null) { log.error("CSV FILE IS MISSING IN THE INPUT ZIP PACKAGE"); return; } else { if (buf instanceof char[]) { brCsv = new BufferedReader(new CharArrayReader((char[]) buf)); } else { brCsv = (BufferedReader) buf; } } // Get input dssat simulation ouput files from zip AcmoDssatOutputFileInput dssatReader = new AcmoDssatOutputFileInput(); HashMap sumData = dssatReader.readSummary(brMap); ArrayList<HashMap> sumSubArr = getObjectOr(sumData, "data", new ArrayList<HashMap>()); HashMap sumSubData; ArrayList<HashMap> ovwSubArr = dssatReader.readOverview(brMap); HashMap ovwSubData; // Get simulation output values from output files by experiment id HashMap<String, String> sumValMap = new HashMap(); // ArrayList<String> sumValArr = new ArrayList(); String version = getObjectOr(sumData, "vevsion", "Ver. N/A"); StringBuilder sbData; for (int i = 0; i < sumSubArr.size(); i++) { sbData = new StringBuilder(); sumSubData = sumSubArr.get(i); ovwSubData = ovwSubArr.get(i); String runno_sum = getObjectOr(sumSubData, "runno", "sum"); String runno_ovw = getObjectOr(ovwSubData, "runno", "ovm"); String trno = getObjectOr(ovwSubData, "trno", "1"); String pdat = formatDateStr(getObjectOr(sumSubData, "pdat", ""), ""); String exp_id = getObjectOr(ovwSubData, "exp_id", ""); String key = exp_id + "__" + trno + "," + pdat; if (!runno_sum.equals(runno_ovw)) { log.warn("THE ORDER OF No." + (i + 1) + " RECORD [" + exp_id + "] IS NOT MATCHED BETWEEN SUMMARY AND OVERVIEW OUTPUT FILE"); continue; } // Create CSV data if (!sumValMap.containsKey(key)) { sbData.append(",\"DSSAT\",\"DSSAT ").append(getObjectOr(sumSubData, "model", "")).append(" ").append(version).append("\""); // MODEL_VER sbData.append(",\"").append(getObjectOr(sumSubData, "hwah", "")).append("\""); // HWAH sbData.append(",\"").append(getObjectOr(sumSubData, "cwam", "")).append("\""); // CWAH sbData.append(",\"").append(formatDateStr(getObjectOr(sumSubData, "adat", ""))).append("\""); // ADAT sbData.append(",\"").append(formatDateStr(getObjectOr(sumSubData, "mdat", ""))).append("\""); // MDAT sbData.append(",\"").append(formatDateStr(getObjectOr(sumSubData, "hdat", ""))).append("\""); // HDATE sbData.append(",\"").append(getObjectOr(sumSubData, "laix", "")).append("\""); // LAIX sbData.append(",\"").append(getObjectOr(sumSubData, "prcp", "")).append("\""); // PRCP sbData.append(",\"").append(getObjectOr(sumSubData, "etcp", "")).append("\""); // ETCP sbData.append(",\"").append(getObjectOr(sumSubData, "nucm", "")).append("\""); // NUCM sbData.append(",\"").append(getObjectOr(sumSubData, "nlcm", "")).append("\""); // NLCM sumValMap.put(key, sbData.toString()); // P.S. since non-DSSAT model won't have multiple treament, thus trno is not used as the part of key } else { log.warn("REPEATED RECORD IN SUMMARY FILE WITH SAME PDAT AND EXNAME"); } // sumValArr.add(sbData.toString()); } // Write CSV File outputCsvPath = revisePath(outputCsvPath); outputFile = new File(outputCsvPath + "ACMO.csv"); BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile)); String line; String titleLine = ""; String[] titles; int curDataLineNo = 1; // Write titles while ((line = brCsv.readLine()) != null) { if (line.startsWith("*") || line.startsWith("\"*\"")) { break; } else { bw.write(line); bw.write("\r\n"); curDataLineNo++; titleLine = line; } } // Get titles if (titleLine.endsWith("\"")) { titleLine = titleLine.substring(0, titleLine.length() - 1); } titles = titleLine.split("\"?,\"?"); // Get key item position int pdateCol = getIndex(titles, "PDATE"); int exnameCol = getIndex(titles, "EXNAME"); int cropModelCol = getIndex(titles, "CROP_MODEL"); if (pdateCol < 0 || exnameCol < 0 || cropModelCol < 0) { log.error("MISSING TITLE FOR PDATE, EXNAME OR CROP_MODEL IN LINE " + (curDataLineNo - 1)); bw.write("MISSING TITLE FOR PDATE, EXNAME OR CROP_MODEL IN LINE " + (curDataLineNo - 1)); bw.close(); return; } // Write data while (line != null) { // currently exname (exp_id) is located in the 3rd spot of row String[] tmp = line.split(","); if (tmp.length < pdateCol + 1 || tmp[exnameCol].trim().equals("") || tmp[pdateCol].trim().equals("")) { bw.write(line); log.warn("MISSING EXNAME OR SDAT IN LINE " + curDataLineNo); } else { tmp[pdateCol] = tmp[pdateCol].replaceAll("/", ""); // remove the comma for blank cell which will be filled with output value line = trimComma(tmp, cropModelCol); bw.write(line); // wirte simulation output info if (!tmp[exnameCol].matches("\\w+_+\\d+")) { tmp[exnameCol] += "__1"; } else if (!tmp[exnameCol].matches("\\w+__\\d+")) { tmp[exnameCol] = tmp[exnameCol].replaceAll("_+", "__"); } String scvKey = tmp[exnameCol] + "," + tmp[pdateCol]; if (sumValMap.containsKey(scvKey)) { bw.write(sumValMap.remove(scvKey)); // P.S. temporal way for multiple treatment } else { bw.write(",\"DSSAT\""); log.warn("THE SIMULATION OUTPUT DATA FOR [" + scvKey + "] IS MISSING"); // if (curDataLineNo - 4 < sumValArr.size()) { // bw.write(sumValArr.get(curDataLineNo - 4)); // } else { // log.warn("THE SIMULATION OUTPUT DATA FOR [" + tmp[2] + "] IS MISSING"); // } } } bw.write("\r\n"); curDataLineNo++; line = brCsv.readLine(); } bw.close(); } /** * Remove the comma in the end of the line and combine to a new String * * @param strs input array of string which is splited by comma * @param length the expected length of that array * @return */ private String trimComma(String[] strs, int length) { StringBuilder sb = new StringBuilder(); sb.append(strs[0]); int min = Math.min(strs.length, length); for (int i = 1; i < min; i++) { sb.append(",").append(strs[i]); } for (int i = min; i < length; i++) { sb.append(","); } return sb.toString(); } /** * Get the index number for the targeted title * * @param titles The array of titles * @param name The name of title * @return The index of title in the line */ private int getIndex(String[] titles, String name) { for (int i = 0; i < titles.length; i++) { if (titles[i].equals(name)) { return i; } } return -1; } }
1. fix the problem which happens when runno over 1000
src/main/java/org/agmip/translators/acmo/AcmoCsvTranslator.java
1. fix the problem which happens when runno over 1000
Java
mit
a1dda298796436d41a68ee92bf637ec6b3bfde8e
0
ZeroPage/CAUScheduler
package org.zeropage.causcheduler.Activity; import android.content.res.Configuration; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBar; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; import org.zeropage.causcheduler.R; import java.util.Arrays; import java.util.List; public class MainActivity extends AppCompatActivity { private final String LOG_TAG = MainActivity.class.getSimpleName(); private Toolbar toolbar; private List<String> mDrawerItemList; private DrawerLayout mDrawerLayout; private ListView mDrawerList; private ActionBarDrawerToggle mDrawerToggle; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // Toolbar 초기화 toolbar = (Toolbar)findViewById(R.id.toolbar); setSupportActionBar(toolbar); // Drawer List 초기화 String[] drawerItemArray = getResources().getStringArray(R.array.drawer_item_array); mDrawerItemList = Arrays.asList(drawerItemArray); mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); mDrawerList = (ListView) findViewById(R.id.drawer); // Set the adapter for the list view mDrawerList.setAdapter(new ArrayAdapter<>(this, R.layout.drawer_list_item, R.id.list_item_drawer_textview, mDrawerItemList)); // Set the list's click listener mDrawerList.setOnItemClickListener(new ListView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) { Log.v(LOG_TAG, (String)mDrawerList.getItemAtPosition(i)); mDrawerLayout.closeDrawer(mDrawerList);// 선택 후 드로어를 닫음 } }); mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); mDrawerToggle = new ActionBarDrawerToggle( this, /* host Activity */ mDrawerLayout, /* DrawerLayout object */ R.string.drawer_open, /* "open drawer" description */ R.string.drawer_close /* "close drawer" description */ ) { /** Called when a drawer has settled in a completely closed state. */ public void onDrawerClosed(View view) { super.onDrawerClosed(view); invalidateOptionsMenu(); // creates call to onPrepareOptionsMenu() } /** Called when a drawer has settled in a completely open state. */ public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); invalidateOptionsMenu(); // creates call to onPrepareOptionsMenu() } }; // Set the drawer toggle as the DrawerListener mDrawerLayout.setDrawerListener(mDrawerToggle); ActionBar actionBar = getSupportActionBar(); if(actionBar != null){ getSupportActionBar().setDisplayHomeAsUpEnabled(true); } } /* Called whenever we call invalidateOptionsMenu() */ // 이 메소드는 드로어가 열릴 경우, 그에 따라 자연스럽게 화면에서 특정 컴포넌트를 없애는 데 사용합니다. // ex) 드로어가 열리면 툴바의 검색 버튼이 자동으로 사라져야 함. @Override public boolean onPrepareOptionsMenu(Menu menu) { // If the nav drawer is open, hide action items related to the content view // boolean drawerOpen = mDrawerLayout.isDrawerOpen(mDrawerList); return super.onPrepareOptionsMenu(menu); } // onPostCreate와 onConfigurationChanged는 툴바의 토글 상태를 동기화 하기 위해 필요함 @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); // Sync the toggle state after onRestoreInstanceState has occurred. mDrawerToggle.syncState(); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); mDrawerToggle.onConfigurationChanged(newConfig); } // 홈버튼 이벤트가 툴바 토글에서 끝나도록 오버라이드한 메소드 @Override public boolean onOptionsItemSelected(MenuItem item) { if(mDrawerToggle.onOptionsItemSelected(item)){ return true; } return super.onOptionsItemSelected(item); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_main, menu); return super.onCreateOptionsMenu(menu); } }
app/src/main/java/org/zeropage/causcheduler/Activity/MainActivity.java
package org.zeropage.causcheduler.Activity; import android.content.res.Configuration; import android.support.annotation.NonNull; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; import org.zeropage.causcheduler.R; import java.util.Arrays; import java.util.List; public class MainActivity extends AppCompatActivity { private final String LOG_TAG = MainActivity.class.getSimpleName(); private Toolbar toolbar; private List<String> mDrawerItemList; private DrawerLayout mDrawerLayout; private ListView mDrawerList; private ActionBarDrawerToggle mDrawerToggle; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // Toolbar 초기화 toolbar = (Toolbar)findViewById(R.id.toolbar); setSupportActionBar(toolbar); // Drawer List 초기화 String[] drawerItemArray = getResources().getStringArray(R.array.drawer_item_array); mDrawerItemList = Arrays.asList(drawerItemArray); mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); mDrawerList = (ListView) findViewById(R.id.drawer); // Set the adapter for the list view mDrawerList.setAdapter(new ArrayAdapter<>(this, R.layout.drawer_list_item, R.id.list_item_drawer_textview, mDrawerItemList)); // Set the list's click listener mDrawerList.setOnItemClickListener(new ListView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) { Log.v(LOG_TAG, (String)mDrawerList.getItemAtPosition(i)); mDrawerLayout.closeDrawer(mDrawerList);// 선택 후 드로어를 닫음 } }); mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); mDrawerToggle = new ActionBarDrawerToggle( this, /* host Activity */ mDrawerLayout, /* DrawerLayout object */ R.string.drawer_open, /* "open drawer" description */ R.string.drawer_close /* "close drawer" description */ ) { /** Called when a drawer has settled in a completely closed state. */ public void onDrawerClosed(View view) { super.onDrawerClosed(view); invalidateOptionsMenu(); // creates call to onPrepareOptionsMenu() } /** Called when a drawer has settled in a completely open state. */ public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); invalidateOptionsMenu(); // creates call to onPrepareOptionsMenu() } }; // Set the drawer toggle as the DrawerListener mDrawerLayout.setDrawerListener(mDrawerToggle); getSupportActionBar().setDisplayHomeAsUpEnabled(true); } /* Called whenever we call invalidateOptionsMenu() */ // 이 메소드는 드로어가 열릴 경우, 그에 따라 자연스럽게 화면에서 특정 컴포넌트를 없애는 데 사용합니다. // ex) 드로어가 열리면 툴바의 검색 버튼이 자동으로 사라져야 함. @Override public boolean onPrepareOptionsMenu(Menu menu) { // If the nav drawer is open, hide action items related to the content view // boolean drawerOpen = mDrawerLayout.isDrawerOpen(mDrawerList); return super.onPrepareOptionsMenu(menu); } // onPostCreate와 onConfigurationChanged는 툴바의 토글 상태를 동기화 하기 위해 필요함 @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); // Sync the toggle state after onRestoreInstanceState has occurred. mDrawerToggle.syncState(); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); mDrawerToggle.onConfigurationChanged(newConfig); } // 홈버튼 이벤트가 툴바 토글에서 끝나도록 오버라이드한 메소드 @Override public boolean onOptionsItemSelected(MenuItem item) { if(mDrawerToggle.onOptionsItemSelected(item)){ return true; } return super.onOptionsItemSelected(item); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_main, menu); return super.onCreateOptionsMenu(menu); } }
actionbar do home button action after check actionbar null
app/src/main/java/org/zeropage/causcheduler/Activity/MainActivity.java
actionbar do home button action after check actionbar null
Java
mit
4697f030d27866c901ca6a0ed86165f1435e07fc
0
rhmeeuwisse/WorkoutApp
package alobar.workout.features.exercise; import android.app.Dialog; import android.app.Fragment; import android.content.ContentValues; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.design.widget.TextInputLayout; import android.support.v4.app.DialogFragment; import android.text.Editable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.EditText; import android.widget.Toast; import alobar.android.text.DebouncingTextWatcher; import alobar.workout.R; import alobar.workout.database.DatabaseContract; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.Unbinder; /** * A simple {@link Fragment} subclass. */ public class ExerciseDialog extends DialogFragment implements ExercisePresenter.View, View.OnClickListener { @BindView(R.id.nameInput) TextInputLayout nameInput; @BindView(R.id.nameEdit) EditText nameEdit; @BindView(R.id.weightInput) TextInputLayout weightInput; @BindView(R.id.weightEdit) EditText weightEdit; private Unbinder unbinder; private ExercisePresenter presenter; public ExerciseDialog() { // Required empty public constructor } @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); presenter = new ExercisePresenter(this); } @NonNull @Override public Dialog onCreateDialog(Bundle savedInstanceState) { Dialog result = super.onCreateDialog(savedInstanceState); result.setTitle(R.string.exercise_dialog_title); return result; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View result = inflater.inflate(R.layout.fragment_exercise_dialog, container, false); unbinder = ButterKnife.bind(this, result); nameEdit.addTextChangedListener(new DebouncingTextWatcher() { @Override public void onDebouncedTextChanged(Editable s) { presenter.onNameChanged(s.toString()); } }); weightEdit.addTextChangedListener(new DebouncingTextWatcher() { @Override public void onDebouncedTextChanged(Editable s) { presenter.onWeightChanged(s.toString()); } }); result.findViewById(R.id.saveButton).setOnClickListener(this); result.findViewById(R.id.cancelButton).setOnClickListener(this); return result; } @Override public void onDestroyView() { super.onDestroyView(); unbinder.unbind(); } @Override public void onClick(View v) { switch (v.getId()) { case R.id.saveButton: String name = nameEdit.getText().toString().trim(); String weight = weightEdit.getText().toString().trim(); presenter.onSave(name, weight); break; case R.id.cancelButton: getDialog().cancel(); break; default: throw new UnsupportedOperationException(); } } public void saveToDatabase(String name, double weight) { ContentValues values = new ContentValues(); values.put(DatabaseContract.Exercise.NAME, name); values.put(DatabaseContract.Exercise.WEIGHT, weight); getActivity().getContentResolver().insert(DatabaseContract.Exercise.CONTENT_URI, values); } @Override public void setName(String value) { nameEdit.setText(value); } @Override public void setNameHint(String message) { nameInput.setError(message); nameInput.setErrorEnabled(message != null); } @Override public void setWeight(String value) { weightEdit.setText(value); } @Override public void setWeightHint(String message) { weightInput.setError(message); weightInput.setErrorEnabled(message != null); } @Override public void toastError(String message) { Toast.makeText(getActivity(), message, Toast.LENGTH_LONG).show(); } @Override public void close() { getDialog().dismiss(); } }
app/src/main/java/alobar/workout/features/exercise/ExerciseDialog.java
package alobar.workout.features.exercise; import android.app.Dialog; import android.app.Fragment; import android.content.ContentValues; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.design.widget.TextInputLayout; import android.support.v4.app.DialogFragment; import android.text.Editable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.EditText; import android.widget.Toast; import alobar.android.text.DebouncingTextWatcher; import alobar.workout.R; import alobar.workout.database.DatabaseContract; /** * A simple {@link Fragment} subclass. */ public class ExerciseDialog extends DialogFragment implements ExercisePresenter.View, View.OnClickListener { private TextInputLayout nameInput; private EditText nameEdit; private TextInputLayout weightInput; private EditText weightEdit; private ExercisePresenter presenter; public ExerciseDialog() { // Required empty public constructor } @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); presenter = new ExercisePresenter(this); } @NonNull @Override public Dialog onCreateDialog(Bundle savedInstanceState) { Dialog result = super.onCreateDialog(savedInstanceState); result.setTitle(R.string.exercise_dialog_title); return result; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View result = inflater.inflate(R.layout.fragment_exercise_dialog, container, false); nameInput = (TextInputLayout) result.findViewById(R.id.nameInput); nameEdit = (EditText) result.findViewById(R.id.nameEdit); weightInput = (TextInputLayout) result.findViewById(R.id.weightInput); weightEdit = (EditText) result.findViewById(R.id.weightEdit); nameEdit.addTextChangedListener(new DebouncingTextWatcher() { @Override public void onDebouncedTextChanged(Editable s) { presenter.onNameChanged(s.toString()); } }); weightEdit.addTextChangedListener(new DebouncingTextWatcher() { @Override public void onDebouncedTextChanged(Editable s) { presenter.onWeightChanged(s.toString()); } }); result.findViewById(R.id.saveButton).setOnClickListener(this); result.findViewById(R.id.cancelButton).setOnClickListener(this); return result; } @Override public void onClick(View v) { switch (v.getId()) { case R.id.saveButton: String name = nameEdit.getText().toString().trim(); String weight = weightEdit.getText().toString().trim(); presenter.onSave(name, weight); break; case R.id.cancelButton: getDialog().cancel(); break; default: throw new UnsupportedOperationException(); } } public void saveToDatabase(String name, double weight) { ContentValues values = new ContentValues(); values.put(DatabaseContract.Exercise.NAME, name); values.put(DatabaseContract.Exercise.WEIGHT, weight); getActivity().getContentResolver().insert(DatabaseContract.Exercise.CONTENT_URI, values); } @Override public void setName(String value) { nameEdit.setText(value); } @Override public void setNameHint(String message) { nameInput.setError(message); nameInput.setErrorEnabled(message != null); } @Override public void setWeight(String value) { weightEdit.setText(value); } @Override public void setWeightHint(String message) { weightInput.setError(message); weightInput.setErrorEnabled(message != null); } @Override public void toastError(String message) { Toast.makeText(getActivity(), message, Toast.LENGTH_LONG).show(); } @Override public void close() { getDialog().dismiss(); } }
Injecting ExerciseDialog with ButterKnife
app/src/main/java/alobar/workout/features/exercise/ExerciseDialog.java
Injecting ExerciseDialog with ButterKnife
Java
mit
b489c3e69f64c0d417b27691653346f6fd5facab
0
Htoonlin/MasterIDE
package com.sdm.ide.controller; import com.sdm.ide.component.AlertDialog; import com.sdm.ide.component.ProgressDialog; import com.sdm.ide.component.TableHelper; import com.sdm.ide.model.EntityModel; import com.sdm.ide.model.PropertyModel; import com.sdm.ide.task.ParseEntityTask; import com.sdm.ide.task.WriteEntityTask; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.Optional; import java.util.ResourceBundle; import javafx.collections.FXCollections; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.fxml.Initializable; import javafx.scene.Node; import javafx.scene.Scene; import javafx.scene.control.ButtonType; import javafx.scene.control.SplitPane; import javafx.scene.control.TableRow; import javafx.scene.control.TableView; import javafx.scene.input.MouseButton; import javafx.scene.layout.AnchorPane; import javafx.stage.Stage; import javafx.stage.StageStyle; public class EntityManagerController implements Initializable { private EntityModel currentEntity; private Node currentDetail; private String moduleDir; @FXML private SplitPane rootPane; @FXML private TableView<PropertyModel> propertyTable; @Override public void initialize(URL location, ResourceBundle resources) { propertyTable.setRowFactory(tv -> { TableRow<PropertyModel> row = new TableRow<>(); row.setOnMouseClicked(mouse -> { if (mouse.getButton() == MouseButton.PRIMARY && mouse.getClickCount() == 2 && !row.isEmpty()) { PropertyModel property = row.getItem(); if (property != null) { this.loadPropertyDetail(property); } } }); return row; }); } private void clearDetail() { if (this.currentDetail != null) { rootPane.getItems().remove(this.currentDetail); this.currentDetail = null; } } private <T> T loadDetail(String fxml) throws IOException { this.clearDetail(); FXMLLoader loader = new FXMLLoader(getClass().getResource(fxml)); this.currentDetail = loader.load(); rootPane.getItems().add(currentDetail); return loader.getController(); } private void loadPropertyDetail(PropertyModel property) { /* Load Property Detail */ try { PropertyDetailController controller = this.loadDetail("/fxml/PropertyDetail.fxml"); controller.setProperty(this.currentEntity, property); } catch (IOException e) { AlertDialog.showException(e); } } public void loadEntity(File entityFile) { if (entityFile != null && entityFile.isFile() && entityFile.getName().endsWith(".java")) { this.moduleDir = entityFile.getParent().replaceAll("entity", ""); try { ParseEntityTask task = new ParseEntityTask(entityFile); ProgressDialog dialog = new ProgressDialog(task, false); dialog.show(); task.setOnSucceeded((event) -> { currentEntity = task.getValue(); TableHelper.generateColumns(PropertyModel.class, propertyTable); propertyTable.setItems(FXCollections.observableArrayList(currentEntity.getProperties())); propertyTable.getColumns().forEach(col -> { if (col.getText().equalsIgnoreCase("index")) { propertyTable.getSortOrder().add(col); } }); propertyTable.refresh(); this.showDetail(null); dialog.close(); }); Thread thread = new Thread(task); thread.setDaemon(true); thread.start(); } catch (Exception ex) { AlertDialog.showException(ex); } } else { AlertDialog.showWarning("It is not java file. <" + entityFile.getName() + ">."); } } @FXML public void showDetail(ActionEvent event) { try { EntityInfoController controller = this.loadDetail("/fxml/EntityInfo.fxml"); controller.setEntity(currentEntity); } catch (IOException e) { AlertDialog.showException(e); } } @FXML public void addProperty(ActionEvent event) { PropertyModel property = new PropertyModel(this.currentEntity.getProperties().size()); this.currentEntity.addProperty(property); propertyTable.getItems().add(property); propertyTable.refresh(); propertyTable.getSelectionModel().select(property); this.loadPropertyDetail(property); } @FXML private void deleteProperty(ActionEvent event) { PropertyModel property = this.propertyTable.getSelectionModel().getSelectedItem(); if (property != null) { Optional<ButtonType> confirm = AlertDialog .showQuestion("Are you sure to remove " + property.getName() + "?"); if (confirm.get() == ButtonType.YES) { if (property.isPrimary()) { this.currentEntity.setPrimaryProperty(null); } this.currentEntity.removeProperty(property); this.propertyTable.getItems().remove(property); this.propertyTable.refresh(); } } } @FXML public void writeEntity(ActionEvent event) { try { if (this.currentEntity.getPrimaryProperty() == null) { AlertDialog.showWarning("Sorry! We can't generate entity without primary property."); return; } WriteEntityTask task = new WriteEntityTask(currentEntity); ProgressDialog dialog = new ProgressDialog(task, false); dialog.show(); task.setOnSucceeded(worker -> { dialog.close(); if (task.getValue()) { this.loadEntity(this.currentEntity.getFile()); } else { AlertDialog.showWarning("Something wrong in code generation process."); } }); Thread thread = new Thread(task); thread.setDaemon(true); thread.start(); } catch (Exception ex) { AlertDialog.showException(ex); } } @FXML private void showCode(ActionEvent event) { try { FXMLLoader loader = new FXMLLoader(getClass().getResource("/fxml/CodeEditor.fxml")); AnchorPane root = (AnchorPane) loader.load(); CodeEditorController controller = loader.getController(); controller.setEntity(currentEntity); Scene dialogScene = new Scene(root, 720, 500); dialogScene.getStylesheets().add(getClass().getResource("/fxml/syntax.css").toExternalForm()); Stage dialogStage = new Stage(); dialogStage.setTitle("Code Editor"); dialogStage.initStyle(StageStyle.DECORATED); dialogStage.setResizable(true); dialogStage.setScene(dialogScene); dialogStage.show(); } catch (IOException ex) { AlertDialog.showException(ex); } } @FXML private void reloadEntity(ActionEvent event) { Optional<ButtonType> result = AlertDialog.showQuestion("It will lost unsaved data. Do you want to continue?"); if (result.isPresent() && result.get().equals(ButtonType.YES)) { this.loadEntity(this.currentEntity.getFile()); } } @FXML private void showPropertyDetail(ActionEvent event) { PropertyModel property = this.propertyTable.getSelectionModel().getSelectedItem(); if (property != null) { this.loadPropertyDetail(property); } } @FXML private void cloneProperty(ActionEvent event) { PropertyModel propSource = this.propertyTable.getSelectionModel().getSelectedItem(); if (propSource != null) { PropertyModel propDest = new PropertyModel(propSource); this.currentEntity.addProperty(propDest); propertyTable.getItems().add(propDest); propertyTable.refresh(); propertyTable.getSelectionModel().select(propDest); this.loadPropertyDetail(propDest); } } }
src/main/java/com/sdm/ide/controller/EntityManagerController.java
package com.sdm.ide.controller; import com.sdm.ide.component.AlertDialog; import com.sdm.ide.component.ProgressDialog; import com.sdm.ide.component.TableHelper; import com.sdm.ide.model.EntityModel; import com.sdm.ide.model.PropertyModel; import com.sdm.ide.task.ParseEntityTask; import com.sdm.ide.task.WriteEntityTask; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.Optional; import java.util.ResourceBundle; import javafx.collections.FXCollections; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.fxml.Initializable; import javafx.scene.Node; import javafx.scene.Scene; import javafx.scene.control.ButtonType; import javafx.scene.control.SplitPane; import javafx.scene.control.TableRow; import javafx.scene.control.TableView; import javafx.scene.input.MouseButton; import javafx.scene.layout.AnchorPane; import javafx.stage.Stage; import javafx.stage.StageStyle; public class EntityManagerController implements Initializable { private EntityModel currentEntity; private Node currentDetail; private String moduleDir; @FXML private SplitPane rootPane; @FXML private TableView<PropertyModel> propertyTable; @Override public void initialize(URL location, ResourceBundle resources) { propertyTable.setRowFactory(tv -> { TableRow<PropertyModel> row = new TableRow<>(); row.setOnMouseClicked(mouse -> { if (mouse.getButton() == MouseButton.PRIMARY && mouse.getClickCount() == 2 && !row.isEmpty()) { PropertyModel property = row.getItem(); if (property != null) { this.loadPropertyDetail(property); } } }); return row; }); } private void clearDetail() { if (this.currentDetail != null) { rootPane.getItems().remove(this.currentDetail); this.currentDetail = null; } } private <T> T loadDetail(String fxml) throws IOException { this.clearDetail(); FXMLLoader loader = new FXMLLoader(getClass().getResource(fxml)); this.currentDetail = loader.load(); rootPane.getItems().add(currentDetail); return loader.getController(); } private void loadPropertyDetail(PropertyModel property) { /* Load Property Detail */ try { PropertyDetailController controller = this.loadDetail("/fxml/PropertyDetail.fxml"); controller.setProperty(this.currentEntity, property); } catch (IOException e) { AlertDialog.showException(e); } } public void loadEntity(File entityFile) { if (entityFile != null && entityFile.isFile() && entityFile.getName().endsWith(".java")) { this.moduleDir = entityFile.getParent().replaceAll("entity", ""); ProgressDialog dialog = new ProgressDialog(); try { ParseEntityTask task = new ParseEntityTask(entityFile); dialog.start(task); task.setOnSucceeded((event) -> { currentEntity = task.getValue(); TableHelper.generateColumns(PropertyModel.class, propertyTable); propertyTable.setItems(FXCollections.observableArrayList(currentEntity.getProperties())); propertyTable.getColumns().forEach(col -> { if (col.getText().equalsIgnoreCase("index")) { propertyTable.getSortOrder().add(col); } }); propertyTable.refresh(); this.showDetail(null); dialog.close(); }); Thread thread = new Thread(task); thread.setDaemon(true); thread.start(); } catch (Exception ex) { AlertDialog.showException(ex); } } else { AlertDialog.showWarning("It is not java file. <" + entityFile.getName() + ">."); } } @FXML public void showDetail(ActionEvent event) { try { EntityInfoController controller = this.loadDetail("/fxml/EntityInfo.fxml"); controller.setEntity(currentEntity); } catch (IOException e) { AlertDialog.showException(e); } } @FXML public void addProperty(ActionEvent event) { PropertyModel property = new PropertyModel(this.currentEntity.getProperties().size()); this.currentEntity.addProperty(property); propertyTable.getItems().add(property); propertyTable.refresh(); propertyTable.getSelectionModel().select(property); this.loadPropertyDetail(property); } @FXML private void deleteProperty(ActionEvent event) { PropertyModel property = this.propertyTable.getSelectionModel().getSelectedItem(); if (property != null) { Optional<ButtonType> confirm = AlertDialog .showQuestion("Are you sure to remove " + property.getName() + "?"); if (confirm.get() == ButtonType.YES) { if (property.isPrimary()) { this.currentEntity.setPrimaryProperty(null); } this.currentEntity.removeProperty(property); this.propertyTable.getItems().remove(property); this.propertyTable.refresh(); } } } @FXML public void writeEntity(ActionEvent event) { try { if (this.currentEntity.getPrimaryProperty() == null) { AlertDialog.showWarning("Sorry! We can't generate entity without primary property."); return; } WriteEntityTask task = new WriteEntityTask(currentEntity); ProgressDialog dialog = new ProgressDialog(); dialog.start(task); task.setOnSucceeded(worker -> { dialog.close(); if (task.getValue()) { this.loadEntity(this.currentEntity.getFile()); } else { AlertDialog.showWarning("Something wrong in code generation process."); } }); Thread thread = new Thread(task); thread.setDaemon(true); thread.start(); } catch (Exception ex) { AlertDialog.showException(ex); } } @FXML private void showCode(ActionEvent event) { try { FXMLLoader loader = new FXMLLoader(getClass().getResource("/fxml/CodeEditor.fxml")); AnchorPane root = (AnchorPane) loader.load(); CodeEditorController controller = loader.getController(); controller.setEntity(currentEntity); Scene dialogScene = new Scene(root, 720, 500); dialogScene.getStylesheets().add(getClass().getResource("/fxml/syntax.css").toExternalForm()); Stage dialogStage = new Stage(); dialogStage.setTitle("Code Editor"); dialogStage.initStyle(StageStyle.DECORATED); dialogStage.setResizable(true); dialogStage.setScene(dialogScene); dialogStage.show(); } catch (IOException ex) { AlertDialog.showException(ex); } } @FXML private void reloadEntity(ActionEvent event) { Optional<ButtonType> result = AlertDialog.showQuestion("It will lost unsaved data. Do you want to continue?"); if (result.isPresent() && result.get().equals(ButtonType.YES)) { this.loadEntity(this.currentEntity.getFile()); } } @FXML private void showPropertyDetail(ActionEvent event) { PropertyModel property = this.propertyTable.getSelectionModel().getSelectedItem(); if (property != null) { this.loadPropertyDetail(property); } } @FXML private void cloneProperty(ActionEvent event) { PropertyModel propSource = this.propertyTable.getSelectionModel().getSelectedItem(); if (propSource != null) { PropertyModel propDest = new PropertyModel(propSource); this.currentEntity.addProperty(propDest); propertyTable.getItems().add(propDest); propertyTable.refresh(); propertyTable.getSelectionModel().select(propDest); this.loadPropertyDetail(propDest); } } }
Change Dialog
src/main/java/com/sdm/ide/controller/EntityManagerController.java
Change Dialog
Java
mit
7157f6836d8ac077f52c47e855a00101f9eaa0a2
0
p0isonra1n/Doze-Settings-Editor
package com.isaacparker.dozesettingseditor; import android.app.AlertDialog; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.text.ClipboardManager; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.EditText; import android.widget.ImageView; import android.widget.Toast; import com.stericson.RootShell.RootShell; import com.stericson.RootShell.exceptions.RootDeniedException; import com.stericson.RootShell.execution.Command; import java.io.IOException; import java.util.concurrent.TimeoutException; public class MainActivity extends AppCompatActivity { // Key names stored in the settings value. private static final String KEY_INACTIVE_TIMEOUT = "inactive_to"; private static final String KEY_SENSING_TIMEOUT = "sensing_to"; private static final String KEY_LOCATING_TIMEOUT = "locating_to"; private static final String KEY_LOCATION_ACCURACY = "location_accuracy"; private static final String KEY_MOTION_INACTIVE_TIMEOUT = "motion_inactive_to"; private static final String KEY_IDLE_AFTER_INACTIVE_TIMEOUT = "idle_after_inactive_to"; private static final String KEY_IDLE_PENDING_TIMEOUT = "idle_pending_to"; private static final String KEY_MAX_IDLE_PENDING_TIMEOUT = "max_idle_pending_to"; private static final String KEY_IDLE_PENDING_FACTOR = "idle_pending_factor"; private static final String KEY_IDLE_TIMEOUT = "idle_to"; private static final String KEY_MAX_IDLE_TIMEOUT = "max_idle_to"; private static final String KEY_IDLE_FACTOR = "idle_factor"; private static final String KEY_MIN_TIME_TO_ALARM = "min_time_to_alarm"; private static final String KEY_MAX_TEMP_APP_WHITELIST_DURATION = "max_temp_app_whitelist_duration"; private static final String KEY_MMS_TEMP_APP_WHITELIST_DURATION = "mms_temp_app_whitelist_duration"; private static final String KEY_SMS_TEMP_APP_WHITELIST_DURATION = "sms_temp_app_whitelist_duration"; final long INACTIVE_TIMEOUT = 30 * 60 * 1000L; final long SENSING_TIMEOUT = 4 * 60 * 1000L; final long LOCATING_TIMEOUT = 30 * 1000L; final float LOCATION_ACCURACY = 20; final long MOTION_INACTIVE_TIMEOUT = 10 * 60 * 1000L; final long IDLE_AFTER_INACTIVE_TIMEOUT = 30 * 60 * 1000L; final long IDLE_PENDING_TIMEOUT = 5 * 60 * 1000L; final long MAX_IDLE_PENDING_TIMEOUT = 10 * 60 * 1000L; final float IDLE_PENDING_FACTOR = 2; final long IDLE_TIMEOUT = 60 * 60 * 1000L; final long MAX_IDLE_TIMEOUT = 6 * 60 * 60 * 1000L; final long IDLE_FACTOR = 2; final long MIN_TIME_TO_ALARM = 60 * 60 * 1000L; final long MAX_TEMP_APP_WHITELIST_DURATION = 5 * 60 * 1000L; final long MMS_TEMP_APP_WHITELIST_DURATION = 60 * 1000L; final long SMS_TEMP_APP_WHITELIST_DURATION = 20 * 1000L; private static final String DESC_INACTIVE_TIMEOUT = "This is the time, after becoming inactive, at which we start looking at the motion sensor to determine if the device is being left alone. We don't do this immediately after going inactive just because we don't want to be continually running the significant motion sensor whenever the screen is off."; private static final String DESC_SENSING_TIMEOUT = "If we don't receive a callback from AnyMotion in this amount of time + locating_to, we will change from STATE_SENSING to STATE_INACTIVE, and any AnyMotion callbacks while not in STATE_SENSING will be ignored."; private static final String DESC_LOCATING_TIMEOUT = "This is how long we will wait to try to get a good location fix before going in to idle mode."; private static final String DESC_LOCATION_ACCURACY = "The desired maximum accuracy (in meters) we consider the location to be good enough to go on to idle. We will be trying to get an accuracy fix at least this good or until locating_to expires."; private static final String DESC_MOTION_INACTIVE_TIMEOUT = "This is the time, after seeing motion, that we wait after becoming inactive from that until we start looking for motion again."; private static final String DESC_IDLE_AFTER_INACTIVE_TIMEOUT = "This is the time, after the inactive timeout elapses, that we will wait looking for significant motion until we truly consider the device to be idle."; private static final String DESC_IDLE_PENDING_TIMEOUT = "This is the initial time, after being idle, that we will allow ourself to be back in the IDLE_PENDING state allowing the system to run normally until we return to idle."; private static final String DESC_MAX_IDLE_PENDING_TIMEOUT = "Maximum pending idle timeout (time spent running) we will be allowed to use."; private static final String DESC_IDLE_PENDING_FACTOR = "Scaling factor to apply to current pending idle timeout each time we cycle through that state."; private static final String DESC_IDLE_TIMEOUT = "This is the initial time that we want to sit in the idle state before waking up again to return to pending idle and allowing normal work to run."; private static final String DESC_MAX_IDLE_TIMEOUT = "Maximum idle duration we will be allowed to use."; private static final String DESC_IDLE_FACTOR = "Scaling factor to apply to current idle timeout each time we cycle through that state."; private static final String DESC_MIN_TIME_TO_ALARM = "This is the minimum time we will allow until the next upcoming alarm for us to actually go in to idle mode."; private static final String DESC_MAX_TEMP_APP_WHITELIST_DURATION = "Max amount of time to temporarily whitelist an app when it receives a high tickle."; private static final String DESC_MMS_TEMP_APP_WHITELIST_DURATION = "Amount of time we would like to whitelist an app that is receiving an MMS."; private static final String DESC_SMS_TEMP_APP_WHITELIST_DURATION = "Amount of time we would like to whitelist an app that is receiving an SMS."; EditText et_inactive_to; EditText et_sensing_to; EditText et_locating_to; EditText et_location_accuracy; EditText et_motion_inactive_to; EditText et_idle_after_inactive_to; EditText et_idle_pending_to; EditText et_max_idle_pending_to; EditText et_idle_pending_factor; EditText et_idle_to; EditText et_max_idle_to; EditText et_idle_factor; EditText et_min_time_to_alarm; EditText et_max_temp_app_whitelist_duration; EditText et_mms_temp_app_whitelist_duration; EditText et_sms_temp_app_whitelist_duration; ImageView iv_inactive_to; ImageView iv_sensing_to; ImageView iv_locating_to; ImageView iv_location_accuracy; ImageView iv_motion_inactive_to; ImageView iv_idle_after_inactive_to; ImageView iv_idle_pending_to; ImageView iv_max_idle_pending_to; ImageView iv_idle_pending_factor; ImageView iv_idle_to; ImageView iv_max_idle_to; ImageView iv_idle_factor; ImageView iv_min_time_to_alarm; ImageView iv_max_temp_app_whitelist_duration; ImageView iv_mms_temp_app_whitelist_duration; ImageView iv_sms_temp_app_whitelist_duration; boolean hasRoot = false; SharedPreferences sharedPref; int displayValueIn; int millisecondsInOneSecond = 1000; int millisecondsInOneMinute = 60 * millisecondsInOneSecond; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); sharedPref = PreferenceManager.getDefaultSharedPreferences(this); et_inactive_to = (EditText) findViewById(R.id.et_inactive_to); et_sensing_to = (EditText) findViewById(R.id.et_sensing_to); et_locating_to = (EditText) findViewById(R.id.et_locating_to); et_location_accuracy = (EditText) findViewById(R.id.et_location_accurary); et_motion_inactive_to = (EditText) findViewById(R.id.et_motion_inactive_to); et_idle_after_inactive_to = (EditText) findViewById(R.id.et_idle_after_inactive_to); et_idle_pending_to = (EditText) findViewById(R.id.et_idle_pending_to); et_max_idle_pending_to = (EditText) findViewById(R.id.et_max_idle_pending_to); et_idle_pending_factor = (EditText) findViewById(R.id.et_idle_pending_factor); et_idle_to = (EditText) findViewById(R.id.et_idle_to); et_max_idle_to = (EditText) findViewById(R.id.et_max_idle_to); et_idle_factor = (EditText) findViewById(R.id.et_idle_factor); et_min_time_to_alarm = (EditText) findViewById(R.id.et_min_time_to_alarm); et_max_temp_app_whitelist_duration = (EditText) findViewById(R.id.et_max_temp_app_whitelist_duration); et_mms_temp_app_whitelist_duration = (EditText) findViewById(R.id.et_mms_temp_app_whitelist_duration); et_sms_temp_app_whitelist_duration = (EditText) findViewById(R.id.et_sms_temp_app_whitelist_duration); iv_inactive_to = (ImageView) findViewById(R.id.iv_inactive_to); iv_sensing_to = (ImageView) findViewById(R.id.iv_sensing_to); iv_locating_to = (ImageView) findViewById(R.id.iv_locating_to); iv_location_accuracy = (ImageView) findViewById(R.id.iv_location_accurary); iv_motion_inactive_to = (ImageView) findViewById(R.id.iv_motion_inactive_to); iv_idle_after_inactive_to = (ImageView) findViewById(R.id.iv_idle_after_inactive_to); iv_idle_pending_to = (ImageView) findViewById(R.id.iv_idle_pending_to); iv_max_idle_pending_to = (ImageView) findViewById(R.id.iv_max_idle_pending_to); iv_idle_pending_factor = (ImageView) findViewById(R.id.iv_idle_pending_factor); iv_idle_to = (ImageView) findViewById(R.id.iv_idle_to); iv_max_idle_to = (ImageView) findViewById(R.id.iv_max_idle_to); iv_idle_factor = (ImageView) findViewById(R.id.iv_idle_factor); iv_min_time_to_alarm = (ImageView) findViewById(R.id.iv_min_time_to_alarm); iv_max_temp_app_whitelist_duration = (ImageView) findViewById(R.id.iv_max_temp_app_whitelist_duration); iv_mms_temp_app_whitelist_duration = (ImageView) findViewById(R.id.iv_mms_temp_app_whitelist_duration); iv_sms_temp_app_whitelist_duration = (ImageView) findViewById(R.id.iv_sms_temp_app_whitelist_duration); setInfoOnClick(); if (RootShell.isAccessGiven()) { hasRoot = true; //getSettings(); }else{ hasRoot = false; //Toast.makeText(this, "Root access required!", Toast.LENGTH_SHORT).show(); //finish(); } getSettings(); } private void setInfoOnClick() { iv_inactive_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_INACTIVE_TIMEOUT); builder.setMessage(DESC_INACTIVE_TIMEOUT + "\n\nDefault: " + String.valueOf(INACTIVE_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_sensing_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_SENSING_TIMEOUT); builder.setMessage(DESC_SENSING_TIMEOUT + "\n\nDefault: " + String.valueOf(SENSING_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_locating_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_LOCATING_TIMEOUT); builder.setMessage(DESC_LOCATING_TIMEOUT + "\n\nDefault: " + String.valueOf(LOCATING_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_location_accuracy.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_LOCATION_ACCURACY); builder.setMessage(DESC_LOCATION_ACCURACY + "\n\nDefault: " + String.valueOf(LOCATION_ACCURACY)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_motion_inactive_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_MOTION_INACTIVE_TIMEOUT); builder.setMessage(DESC_MOTION_INACTIVE_TIMEOUT + "\n\nDefault: " + String.valueOf(MOTION_INACTIVE_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_idle_after_inactive_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_IDLE_AFTER_INACTIVE_TIMEOUT); builder.setMessage(DESC_IDLE_AFTER_INACTIVE_TIMEOUT + "\n\nDefault: " + String.valueOf(IDLE_AFTER_INACTIVE_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_idle_pending_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_IDLE_PENDING_TIMEOUT); builder.setMessage(DESC_IDLE_PENDING_TIMEOUT + "\n\nDefault: " + String.valueOf(IDLE_PENDING_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_max_idle_pending_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_MAX_IDLE_PENDING_TIMEOUT); builder.setMessage(DESC_MAX_IDLE_PENDING_TIMEOUT + "\n\nDefault: " + String.valueOf(MAX_IDLE_PENDING_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_idle_pending_factor.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_IDLE_PENDING_FACTOR); builder.setMessage(DESC_IDLE_PENDING_FACTOR + "\n\nDefault: " + String.valueOf(IDLE_PENDING_FACTOR)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_idle_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_IDLE_TIMEOUT); builder.setMessage(DESC_IDLE_TIMEOUT + "\n\nDefault: " + String.valueOf(IDLE_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_max_idle_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_MAX_IDLE_TIMEOUT); builder.setMessage(DESC_MAX_IDLE_TIMEOUT + "\n\nDefault: " + String.valueOf(MAX_IDLE_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_idle_factor.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_IDLE_FACTOR); builder.setMessage(DESC_IDLE_FACTOR + "\n\nDefault: " + String.valueOf(IDLE_FACTOR)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_min_time_to_alarm.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_MIN_TIME_TO_ALARM); builder.setMessage(DESC_MIN_TIME_TO_ALARM + "\n\nDefault: " + String.valueOf(MIN_TIME_TO_ALARM)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_max_temp_app_whitelist_duration.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_MAX_TEMP_APP_WHITELIST_DURATION); builder.setMessage(DESC_MAX_TEMP_APP_WHITELIST_DURATION + "\n\nDefault: " + String.valueOf(MAX_TEMP_APP_WHITELIST_DURATION)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_mms_temp_app_whitelist_duration.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_MMS_TEMP_APP_WHITELIST_DURATION); builder.setMessage(DESC_MMS_TEMP_APP_WHITELIST_DURATION + "\n\nDefault: " + String.valueOf(MMS_TEMP_APP_WHITELIST_DURATION)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_sms_temp_app_whitelist_duration.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_SMS_TEMP_APP_WHITELIST_DURATION); builder.setMessage(DESC_SMS_TEMP_APP_WHITELIST_DURATION + "\n\nDefault: " + String.valueOf(SMS_TEMP_APP_WHITELIST_DURATION)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); } private void getSettings() { if(hasRoot) { try { Command command = new Command(0, "settings get global device_idle_constants") { @Override public void commandOutput(int id, String line) { if(line.startsWith("Error")){ Toast.makeText(MainActivity.this, "Can not access device settings. You are now in non root mode.", Toast.LENGTH_LONG).show(); hasRoot = false; super.commandOutput(id, line); } KeyValueListParser parser = new KeyValueListParser(','); if ("null".equals(line)) { parser.setString(line + "=0"); } else { parser.setString(line); } int divideBy= getDisplayValueFix(); et_inactive_to.setText(String.valueOf(parser.getLong(KEY_INACTIVE_TIMEOUT, INACTIVE_TIMEOUT) / divideBy)); et_sensing_to.setText(String.valueOf(parser.getLong(KEY_SENSING_TIMEOUT, SENSING_TIMEOUT) / divideBy)); et_locating_to.setText(String.valueOf(parser.getLong(KEY_LOCATING_TIMEOUT, LOCATING_TIMEOUT) / divideBy)); et_location_accuracy.setText(String.valueOf(parser.getFloat(KEY_LOCATION_ACCURACY, LOCATION_ACCURACY))); et_motion_inactive_to.setText(String.valueOf(parser.getLong(KEY_MOTION_INACTIVE_TIMEOUT, MOTION_INACTIVE_TIMEOUT) / divideBy)); et_idle_after_inactive_to.setText(String.valueOf(parser.getLong(KEY_IDLE_AFTER_INACTIVE_TIMEOUT, IDLE_AFTER_INACTIVE_TIMEOUT) / divideBy)); et_idle_pending_to.setText(String.valueOf(parser.getLong(KEY_IDLE_PENDING_TIMEOUT, IDLE_PENDING_TIMEOUT) / divideBy)); et_max_idle_pending_to.setText(String.valueOf(parser.getLong(KEY_MAX_IDLE_PENDING_TIMEOUT, MAX_IDLE_PENDING_TIMEOUT) / divideBy)); et_idle_pending_factor.setText(String.valueOf(parser.getFloat(KEY_IDLE_PENDING_FACTOR, IDLE_PENDING_FACTOR))); et_idle_to.setText(String.valueOf(parser.getLong(KEY_IDLE_TIMEOUT, IDLE_TIMEOUT) / divideBy)); et_max_idle_to.setText(String.valueOf(parser.getLong(KEY_MAX_IDLE_TIMEOUT, MAX_IDLE_TIMEOUT) / divideBy)); et_idle_factor.setText(String.valueOf(parser.getFloat(KEY_IDLE_FACTOR, IDLE_FACTOR))); et_min_time_to_alarm.setText(String.valueOf(parser.getLong(KEY_MIN_TIME_TO_ALARM, MIN_TIME_TO_ALARM) / divideBy)); et_max_temp_app_whitelist_duration.setText(String.valueOf(parser.getLong(KEY_MAX_TEMP_APP_WHITELIST_DURATION, MAX_TEMP_APP_WHITELIST_DURATION) / divideBy)); et_mms_temp_app_whitelist_duration.setText(String.valueOf(parser.getLong(KEY_MMS_TEMP_APP_WHITELIST_DURATION, MMS_TEMP_APP_WHITELIST_DURATION) / divideBy)); et_sms_temp_app_whitelist_duration.setText(String.valueOf(parser.getLong(KEY_SMS_TEMP_APP_WHITELIST_DURATION, SMS_TEMP_APP_WHITELIST_DURATION) / divideBy)); //MUST call the super method when overriding! super.commandOutput(id, line); } @Override public void commandTerminated(int id, String reason) { } @Override public void commandCompleted(int id, int exitcode) { } }; RootShell.getShell(true).add(command); } catch (RootDeniedException e) { e.printStackTrace(); } catch (TimeoutException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }else{ int divideBy = getDisplayValueFix(); et_inactive_to.setText(String.valueOf(INACTIVE_TIMEOUT / divideBy)); et_sensing_to.setText(String.valueOf(SENSING_TIMEOUT / divideBy)); et_locating_to.setText(String.valueOf(LOCATING_TIMEOUT / divideBy)); et_location_accuracy.setText(String.valueOf(LOCATION_ACCURACY)); et_motion_inactive_to.setText(String.valueOf(MOTION_INACTIVE_TIMEOUT / divideBy)); et_idle_after_inactive_to.setText(String.valueOf(IDLE_AFTER_INACTIVE_TIMEOUT / divideBy)); et_idle_pending_to.setText(String.valueOf(IDLE_PENDING_TIMEOUT / divideBy)); et_max_idle_pending_to.setText(String.valueOf(MAX_IDLE_PENDING_TIMEOUT / divideBy)); et_idle_pending_factor.setText(String.valueOf(IDLE_PENDING_FACTOR)); et_idle_to.setText(String.valueOf(IDLE_TIMEOUT / divideBy)); et_max_idle_to.setText(String.valueOf(MAX_IDLE_TIMEOUT / divideBy)); et_idle_factor.setText(String.valueOf(IDLE_FACTOR)); et_min_time_to_alarm.setText(String.valueOf(MIN_TIME_TO_ALARM / divideBy)); et_max_temp_app_whitelist_duration.setText(String.valueOf(MAX_TEMP_APP_WHITELIST_DURATION / divideBy)); et_mms_temp_app_whitelist_duration.setText(String.valueOf(MMS_TEMP_APP_WHITELIST_DURATION / divideBy)); et_sms_temp_app_whitelist_duration.setText(String.valueOf(SMS_TEMP_APP_WHITELIST_DURATION / divideBy)); } } private int getDisplayValueFix() { displayValueIn = Integer.valueOf(sharedPref.getString("list_display_value_in", "-1")); int divideBy = 1; switch (displayValueIn){ case -1: divideBy = 1; break; case 0: divideBy = millisecondsInOneSecond; break; } return divideBy; } private void save(){ int multiplyBy = getDisplayValueFix(); StringBuilder sb = new StringBuilder(); sb.append(KEY_INACTIVE_TIMEOUT + "=" + Long.valueOf(et_inactive_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_SENSING_TIMEOUT + "=" + Long.valueOf(et_sensing_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_LOCATING_TIMEOUT + "=" + Long.valueOf(et_locating_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_LOCATION_ACCURACY + "=" + Float.valueOf(et_location_accuracy.getText().toString()) + ","); sb.append(KEY_MOTION_INACTIVE_TIMEOUT + "=" + Long.valueOf(et_motion_inactive_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_IDLE_AFTER_INACTIVE_TIMEOUT + "=" + Long.valueOf(et_idle_after_inactive_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_IDLE_PENDING_TIMEOUT + "=" + Long.valueOf(et_idle_pending_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_MAX_IDLE_PENDING_TIMEOUT + "=" + Long.valueOf(et_max_idle_pending_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_IDLE_PENDING_FACTOR + "=" + Float.valueOf(et_idle_pending_factor.getText().toString()) + ","); sb.append(KEY_IDLE_TIMEOUT + "=" + Long.valueOf(et_idle_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_MAX_IDLE_TIMEOUT + "=" + Long.valueOf(et_max_idle_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_IDLE_FACTOR + "=" + Float.valueOf(et_idle_factor.getText().toString()) + ","); sb.append(KEY_MIN_TIME_TO_ALARM + "=" + Long.valueOf(et_min_time_to_alarm.getText().toString()) * multiplyBy + ","); sb.append(KEY_MAX_TEMP_APP_WHITELIST_DURATION + "=" + Long.valueOf(et_max_temp_app_whitelist_duration.getText().toString()) * multiplyBy + ","); sb.append(KEY_MMS_TEMP_APP_WHITELIST_DURATION + "=" + Long.valueOf(et_mms_temp_app_whitelist_duration.getText().toString()) * multiplyBy + ","); sb.append(KEY_SMS_TEMP_APP_WHITELIST_DURATION + "=" + Long.valueOf(et_sms_temp_app_whitelist_duration.getText().toString()) * multiplyBy); if(hasRoot) { try { Command command = new Command(0, "settings put global device_idle_constants " + sb.toString()) { @Override public void commandOutput(int id, String line) { //MUST call the super method when overriding! super.commandOutput(id, line); } @Override public void commandTerminated(int id, String reason) { } @Override public void commandCompleted(int id, int exitcode) { Toast.makeText(MainActivity.this, "Saved", Toast.LENGTH_SHORT).show(); } }; RootShell.getShell(true).add(command); } catch (RootDeniedException e) { e.printStackTrace(); } catch (TimeoutException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }else{ final String command = "adb shell settings put global device_idle_constants " + sb.toString(); AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle("ADB Command"); builder.setMessage(command); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); builder.setNegativeButton("Copy to clipboard", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { ClipboardManager manager = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE); manager.setText(command); Toast.makeText(MainActivity.this, "Copied to clipboard", Toast.LENGTH_SHORT).show(); } }); AlertDialog dialog = builder.create(); dialog.show(); } } private void restoreDefaults(){ StringBuilder sb = new StringBuilder(); sb.append(KEY_INACTIVE_TIMEOUT + "=" + INACTIVE_TIMEOUT + ","); sb.append(KEY_SENSING_TIMEOUT + "=" + SENSING_TIMEOUT + ","); sb.append(KEY_LOCATING_TIMEOUT + "=" + LOCATING_TIMEOUT + ","); sb.append(KEY_LOCATION_ACCURACY + "=" + LOCATION_ACCURACY + ","); sb.append(KEY_MOTION_INACTIVE_TIMEOUT + "=" + MOTION_INACTIVE_TIMEOUT + ","); sb.append(KEY_IDLE_AFTER_INACTIVE_TIMEOUT + "=" + IDLE_AFTER_INACTIVE_TIMEOUT + ","); sb.append(KEY_IDLE_PENDING_TIMEOUT + "=" + IDLE_PENDING_TIMEOUT + ","); sb.append(KEY_MAX_IDLE_PENDING_TIMEOUT + "=" + MAX_IDLE_PENDING_TIMEOUT + ","); sb.append(KEY_IDLE_PENDING_FACTOR + "=" + IDLE_PENDING_FACTOR + ","); sb.append(KEY_IDLE_TIMEOUT + "=" + IDLE_TIMEOUT + ","); sb.append(KEY_MAX_IDLE_TIMEOUT + "=" + MAX_IDLE_TIMEOUT + ","); sb.append(KEY_IDLE_FACTOR + "=" + IDLE_FACTOR + ","); sb.append(KEY_MIN_TIME_TO_ALARM + "=" + MIN_TIME_TO_ALARM + ","); sb.append(KEY_MAX_TEMP_APP_WHITELIST_DURATION + "=" + MAX_TEMP_APP_WHITELIST_DURATION + ","); sb.append(KEY_MMS_TEMP_APP_WHITELIST_DURATION + "=" + MMS_TEMP_APP_WHITELIST_DURATION + ","); sb.append(KEY_SMS_TEMP_APP_WHITELIST_DURATION + "=" + SMS_TEMP_APP_WHITELIST_DURATION); if(hasRoot) { try { Command command = new Command(0, "settings put global device_idle_constants " + sb.toString()) { @Override public void commandOutput(int id, String line) { //MUST call the super method when overriding! super.commandOutput(id, line); } @Override public void commandTerminated(int id, String reason) { } @Override public void commandCompleted(int id, int exitcode) { Toast.makeText(MainActivity.this, "Defaults restored", Toast.LENGTH_SHORT).show(); } }; RootShell.getShell(true).add(command); } catch (RootDeniedException e) { e.printStackTrace(); } catch (TimeoutException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }else{ final String command = "adb shell settings put global device_idle_constants " + sb.toString(); AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle("ADB Command"); builder.setMessage(command); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); builder.setNegativeButton("Copy to clipboard", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { ClipboardManager manager = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE); manager.setText(command); Toast.makeText(MainActivity.this, "Copied to clipboard", Toast.LENGTH_SHORT).show(); } }); AlertDialog dialog = builder.create(); dialog.show(); } //Show changes Toast.makeText(MainActivity.this, "Refreshing settings", Toast.LENGTH_SHORT).show(); getSettings(); } private void applyProfile(String settings){ if(hasRoot) { try { Command command = new Command(0, "settings put global device_idle_constants " + settings) { @Override public void commandOutput(int id, String line) { //MUST call the super method when overriding! super.commandOutput(id, line); } @Override public void commandTerminated(int id, String reason) { } @Override public void commandCompleted(int id, int exitcode) { Toast.makeText(MainActivity.this, "Defaults restored", Toast.LENGTH_SHORT).show(); } }; RootShell.getShell(true).add(command); } catch (RootDeniedException e) { e.printStackTrace(); } catch (TimeoutException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }else{ final String command = "adb shell settings put global device_idle_constants " + settings; AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle("ADB Command"); builder.setMessage(command); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); builder.setNegativeButton("Copy to clipboard", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { ClipboardManager manager = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE); manager.setText(command); Toast.makeText(MainActivity.this, "Copied to clipboard", Toast.LENGTH_SHORT).show(); } }); AlertDialog dialog = builder.create(); dialog.show(); } //Show changes Toast.makeText(MainActivity.this, "Refreshing settings", Toast.LENGTH_SHORT).show(); KeyValueListParser parser = new KeyValueListParser(','); parser.setString(settings); int divideBy= getDisplayValueFix(); et_inactive_to.setText(String.valueOf(parser.getLong(KEY_INACTIVE_TIMEOUT, INACTIVE_TIMEOUT) / divideBy)); et_sensing_to.setText(String.valueOf(parser.getLong(KEY_SENSING_TIMEOUT, SENSING_TIMEOUT) / divideBy)); et_locating_to.setText(String.valueOf(parser.getLong(KEY_LOCATING_TIMEOUT, LOCATING_TIMEOUT) / divideBy)); et_location_accuracy.setText(String.valueOf(parser.getFloat(KEY_LOCATION_ACCURACY, LOCATION_ACCURACY))); et_motion_inactive_to.setText(String.valueOf(parser.getLong(KEY_MOTION_INACTIVE_TIMEOUT, MOTION_INACTIVE_TIMEOUT) / divideBy)); et_idle_after_inactive_to.setText(String.valueOf(parser.getLong(KEY_IDLE_AFTER_INACTIVE_TIMEOUT, IDLE_AFTER_INACTIVE_TIMEOUT) / divideBy)); et_idle_pending_to.setText(String.valueOf(parser.getLong(KEY_IDLE_PENDING_TIMEOUT, IDLE_PENDING_TIMEOUT) / divideBy)); et_max_idle_pending_to.setText(String.valueOf(parser.getLong(KEY_MAX_IDLE_PENDING_TIMEOUT, MAX_IDLE_PENDING_TIMEOUT) / divideBy)); et_idle_pending_factor.setText(String.valueOf(parser.getFloat(KEY_IDLE_PENDING_FACTOR, IDLE_PENDING_FACTOR))); et_idle_to.setText(String.valueOf(parser.getLong(KEY_IDLE_TIMEOUT, IDLE_TIMEOUT) / divideBy)); et_max_idle_to.setText(String.valueOf(parser.getLong(KEY_MAX_IDLE_TIMEOUT, MAX_IDLE_TIMEOUT) / divideBy)); et_idle_factor.setText(String.valueOf(parser.getFloat(KEY_IDLE_FACTOR, IDLE_FACTOR))); et_min_time_to_alarm.setText(String.valueOf(parser.getLong(KEY_MIN_TIME_TO_ALARM, MIN_TIME_TO_ALARM) / divideBy)); et_max_temp_app_whitelist_duration.setText(String.valueOf(parser.getLong(KEY_MAX_TEMP_APP_WHITELIST_DURATION, MAX_TEMP_APP_WHITELIST_DURATION) / divideBy)); et_mms_temp_app_whitelist_duration.setText(String.valueOf(parser.getLong(KEY_MMS_TEMP_APP_WHITELIST_DURATION, MMS_TEMP_APP_WHITELIST_DURATION) / divideBy)); et_sms_temp_app_whitelist_duration.setText(String.valueOf(parser.getLong(KEY_SMS_TEMP_APP_WHITELIST_DURATION, SMS_TEMP_APP_WHITELIST_DURATION) / divideBy)); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement switch(id){ case R.id.action_profile: AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle("Profiles"); builder.setItems(Profiles.ProfileListNames, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { applyProfile(Profiles.ProfileList[item]); } }); AlertDialog alert = builder.create(); alert.show(); break; case R.id.action_save: save(); break; case R.id.action_restoredefault: restoreDefaults(); break; case R.id.action_settings: startActivity(new Intent(MainActivity.this, SettingsActivity.class)); sharedPref.registerOnSharedPreferenceChangeListener( new SharedPreferences.OnSharedPreferenceChangeListener() { public void onSharedPreferenceChanged(SharedPreferences prefs, String key) { getSettings(); } }); } return super.onOptionsItemSelected(item); } }
app/src/main/java/com/isaacparker/dozesettingseditor/MainActivity.java
package com.isaacparker.dozesettingseditor; import android.app.AlertDialog; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.text.ClipboardManager; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.EditText; import android.widget.ImageView; import android.widget.Toast; import com.stericson.RootShell.RootShell; import com.stericson.RootShell.exceptions.RootDeniedException; import com.stericson.RootShell.execution.Command; import java.io.IOException; import java.util.concurrent.TimeoutException; public class MainActivity extends AppCompatActivity { // Key names stored in the settings value. private static final String KEY_INACTIVE_TIMEOUT = "inactive_to"; private static final String KEY_SENSING_TIMEOUT = "sensing_to"; private static final String KEY_LOCATING_TIMEOUT = "locating_to"; private static final String KEY_LOCATION_ACCURACY = "location_accuracy"; private static final String KEY_MOTION_INACTIVE_TIMEOUT = "motion_inactive_to"; private static final String KEY_IDLE_AFTER_INACTIVE_TIMEOUT = "idle_after_inactive_to"; private static final String KEY_IDLE_PENDING_TIMEOUT = "idle_pending_to"; private static final String KEY_MAX_IDLE_PENDING_TIMEOUT = "max_idle_pending_to"; private static final String KEY_IDLE_PENDING_FACTOR = "idle_pending_factor"; private static final String KEY_IDLE_TIMEOUT = "idle_to"; private static final String KEY_MAX_IDLE_TIMEOUT = "max_idle_to"; private static final String KEY_IDLE_FACTOR = "idle_factor"; private static final String KEY_MIN_TIME_TO_ALARM = "min_time_to_alarm"; private static final String KEY_MAX_TEMP_APP_WHITELIST_DURATION = "max_temp_app_whitelist_duration"; private static final String KEY_MMS_TEMP_APP_WHITELIST_DURATION = "mms_temp_app_whitelist_duration"; private static final String KEY_SMS_TEMP_APP_WHITELIST_DURATION = "sms_temp_app_whitelist_duration"; final long INACTIVE_TIMEOUT = 30 * 60 * 1000L; final long SENSING_TIMEOUT = 4 * 60 * 1000L; final long LOCATING_TIMEOUT = 30 * 1000L; final float LOCATION_ACCURACY = 20; final long MOTION_INACTIVE_TIMEOUT = 10 * 60 * 1000L; final long IDLE_AFTER_INACTIVE_TIMEOUT = 30 * 60 * 1000L; final long IDLE_PENDING_TIMEOUT = 5 * 60 * 1000L; final long MAX_IDLE_PENDING_TIMEOUT = 10 * 60 * 1000L; final float IDLE_PENDING_FACTOR = 2; final long IDLE_TIMEOUT = 60 * 60 * 1000L; final long MAX_IDLE_TIMEOUT = 6 * 60 * 60 * 1000L; final long IDLE_FACTOR = 2; final long MIN_TIME_TO_ALARM = 60 * 60 * 1000L; final long MAX_TEMP_APP_WHITELIST_DURATION = 5 * 60 * 1000L; final long MMS_TEMP_APP_WHITELIST_DURATION = 60 * 1000L; final long SMS_TEMP_APP_WHITELIST_DURATION = 20 * 1000L; private static final String DESC_INACTIVE_TIMEOUT = "This is the time, after becoming inactive, at which we start looking at the motion sensor to determine if the device is being left alone. We don't do this immediately after going inactive just because we don't want to be continually running the significant motion sensor whenever the screen is off."; private static final String DESC_SENSING_TIMEOUT = "If we don't receive a callback from AnyMotion in this amount of time + locating_to, we will change from STATE_SENSING to STATE_INACTIVE, and any AnyMotion callbacks while not in STATE_SENSING will be ignored."; private static final String DESC_LOCATING_TIMEOUT = "This is how long we will wait to try to get a good location fix before going in to idle mode."; private static final String DESC_LOCATION_ACCURACY = "The desired maximum accuracy (in meters) we consider the location to be good enough to go on to idle. We will be trying to get an accuracy fix at least this good or until locating_to expires."; private static final String DESC_MOTION_INACTIVE_TIMEOUT = "This is the time, after seeing motion, that we wait after becoming inactive from that until we start looking for motion again."; private static final String DESC_IDLE_AFTER_INACTIVE_TIMEOUT = "This is the time, after the inactive timeout elapses, that we will wait looking for significant motion until we truly consider the device to be idle."; private static final String DESC_IDLE_PENDING_TIMEOUT = "This is the initial time, after being idle, that we will allow ourself to be back in the IDLE_PENDING state allowing the system to run normally until we return to idle."; private static final String DESC_MAX_IDLE_PENDING_TIMEOUT = "Maximum pending idle timeout (time spent running) we will be allowed to use."; private static final String DESC_IDLE_PENDING_FACTOR = "Scaling factor to apply to current pending idle timeout each time we cycle through that state."; private static final String DESC_IDLE_TIMEOUT = "This is the initial time that we want to sit in the idle state before waking up again to return to pending idle and allowing normal work to run."; private static final String DESC_MAX_IDLE_TIMEOUT = "Maximum idle duration we will be allowed to use."; private static final String DESC_IDLE_FACTOR = "Scaling factor to apply to current idle timeout each time we cycle through that state."; private static final String DESC_MIN_TIME_TO_ALARM = "This is the minimum time we will allow until the next upcoming alarm for us to actually go in to idle mode."; private static final String DESC_MAX_TEMP_APP_WHITELIST_DURATION = "Max amount of time to temporarily whitelist an app when it receives a high tickle."; private static final String DESC_MMS_TEMP_APP_WHITELIST_DURATION = "Amount of time we would like to whitelist an app that is receiving an MMS."; private static final String DESC_SMS_TEMP_APP_WHITELIST_DURATION = "Amount of time we would like to whitelist an app that is receiving an SMS."; EditText et_inactive_to; EditText et_sensing_to; EditText et_locating_to; EditText et_location_accuracy; EditText et_motion_inactive_to; EditText et_idle_after_inactive_to; EditText et_idle_pending_to; EditText et_max_idle_pending_to; EditText et_idle_pending_factor; EditText et_idle_to; EditText et_max_idle_to; EditText et_idle_factor; EditText et_min_time_to_alarm; EditText et_max_temp_app_whitelist_duration; EditText et_mms_temp_app_whitelist_duration; EditText et_sms_temp_app_whitelist_duration; ImageView iv_inactive_to; ImageView iv_sensing_to; ImageView iv_locating_to; ImageView iv_location_accuracy; ImageView iv_motion_inactive_to; ImageView iv_idle_after_inactive_to; ImageView iv_idle_pending_to; ImageView iv_max_idle_pending_to; ImageView iv_idle_pending_factor; ImageView iv_idle_to; ImageView iv_max_idle_to; ImageView iv_idle_factor; ImageView iv_min_time_to_alarm; ImageView iv_max_temp_app_whitelist_duration; ImageView iv_mms_temp_app_whitelist_duration; ImageView iv_sms_temp_app_whitelist_duration; boolean hasRoot = false; SharedPreferences sharedPref; int displayValueIn; int millisecondsInOneSecond = 1000; int millisecondsInOneMinute = 60 * millisecondsInOneSecond; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); sharedPref = PreferenceManager.getDefaultSharedPreferences(this); et_inactive_to = (EditText) findViewById(R.id.et_inactive_to); et_sensing_to = (EditText) findViewById(R.id.et_sensing_to); et_locating_to = (EditText) findViewById(R.id.et_locating_to); et_location_accuracy = (EditText) findViewById(R.id.et_location_accurary); et_motion_inactive_to = (EditText) findViewById(R.id.et_motion_inactive_to); et_idle_after_inactive_to = (EditText) findViewById(R.id.et_idle_after_inactive_to); et_idle_pending_to = (EditText) findViewById(R.id.et_idle_pending_to); et_max_idle_pending_to = (EditText) findViewById(R.id.et_max_idle_pending_to); et_idle_pending_factor = (EditText) findViewById(R.id.et_idle_pending_factor); et_idle_to = (EditText) findViewById(R.id.et_idle_to); et_max_idle_to = (EditText) findViewById(R.id.et_max_idle_to); et_idle_factor = (EditText) findViewById(R.id.et_idle_factor); et_min_time_to_alarm = (EditText) findViewById(R.id.et_min_time_to_alarm); et_max_temp_app_whitelist_duration = (EditText) findViewById(R.id.et_max_temp_app_whitelist_duration); et_mms_temp_app_whitelist_duration = (EditText) findViewById(R.id.et_mms_temp_app_whitelist_duration); et_sms_temp_app_whitelist_duration = (EditText) findViewById(R.id.et_sms_temp_app_whitelist_duration); iv_inactive_to = (ImageView) findViewById(R.id.iv_inactive_to); iv_sensing_to = (ImageView) findViewById(R.id.iv_sensing_to); iv_locating_to = (ImageView) findViewById(R.id.iv_locating_to); iv_location_accuracy = (ImageView) findViewById(R.id.iv_location_accurary); iv_motion_inactive_to = (ImageView) findViewById(R.id.iv_motion_inactive_to); iv_idle_after_inactive_to = (ImageView) findViewById(R.id.iv_idle_after_inactive_to); iv_idle_pending_to = (ImageView) findViewById(R.id.iv_idle_pending_to); iv_max_idle_pending_to = (ImageView) findViewById(R.id.iv_max_idle_pending_to); iv_idle_pending_factor = (ImageView) findViewById(R.id.iv_idle_pending_factor); iv_idle_to = (ImageView) findViewById(R.id.iv_idle_to); iv_max_idle_to = (ImageView) findViewById(R.id.iv_max_idle_to); iv_idle_factor = (ImageView) findViewById(R.id.iv_idle_factor); iv_min_time_to_alarm = (ImageView) findViewById(R.id.iv_min_time_to_alarm); iv_max_temp_app_whitelist_duration = (ImageView) findViewById(R.id.iv_max_temp_app_whitelist_duration); iv_mms_temp_app_whitelist_duration = (ImageView) findViewById(R.id.iv_mms_temp_app_whitelist_duration); iv_sms_temp_app_whitelist_duration = (ImageView) findViewById(R.id.iv_sms_temp_app_whitelist_duration); setInfoOnClick(); if (RootShell.isAccessGiven()) { hasRoot = true; //getSettings(); }else{ hasRoot = false; //Toast.makeText(this, "Root access required!", Toast.LENGTH_SHORT).show(); //finish(); } getSettings(); } private void setInfoOnClick() { iv_inactive_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_INACTIVE_TIMEOUT); builder.setMessage(DESC_INACTIVE_TIMEOUT + "\n\nDefault: " + String.valueOf(INACTIVE_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_sensing_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_SENSING_TIMEOUT); builder.setMessage(DESC_SENSING_TIMEOUT + "\n\nDefault: " + String.valueOf(SENSING_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_locating_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_LOCATING_TIMEOUT); builder.setMessage(DESC_LOCATING_TIMEOUT + "\n\nDefault: " + String.valueOf(LOCATING_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_location_accuracy.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_LOCATION_ACCURACY); builder.setMessage(DESC_LOCATION_ACCURACY + "\n\nDefault: " + String.valueOf(LOCATION_ACCURACY)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_motion_inactive_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_MOTION_INACTIVE_TIMEOUT); builder.setMessage(DESC_MOTION_INACTIVE_TIMEOUT + "\n\nDefault: " + String.valueOf(MOTION_INACTIVE_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_idle_after_inactive_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_IDLE_AFTER_INACTIVE_TIMEOUT); builder.setMessage(DESC_IDLE_AFTER_INACTIVE_TIMEOUT + "\n\nDefault: " + String.valueOf(IDLE_AFTER_INACTIVE_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_idle_pending_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_IDLE_PENDING_TIMEOUT); builder.setMessage(DESC_IDLE_PENDING_TIMEOUT + "\n\nDefault: " + String.valueOf(IDLE_PENDING_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_max_idle_pending_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_MAX_IDLE_PENDING_TIMEOUT); builder.setMessage(DESC_MAX_IDLE_PENDING_TIMEOUT + "\n\nDefault: " + String.valueOf(MAX_IDLE_PENDING_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_idle_pending_factor.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_IDLE_PENDING_FACTOR); builder.setMessage(DESC_IDLE_PENDING_FACTOR + "\n\nDefault: " + String.valueOf(IDLE_PENDING_FACTOR)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_idle_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_IDLE_TIMEOUT); builder.setMessage(DESC_IDLE_TIMEOUT + "\n\nDefault: " + String.valueOf(IDLE_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_max_idle_to.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_MAX_IDLE_TIMEOUT); builder.setMessage(DESC_MAX_IDLE_TIMEOUT + "\n\nDefault: " + String.valueOf(MAX_IDLE_TIMEOUT)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_idle_factor.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_IDLE_FACTOR); builder.setMessage(DESC_IDLE_FACTOR + "\n\nDefault: " + String.valueOf(IDLE_FACTOR)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_min_time_to_alarm.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_MIN_TIME_TO_ALARM); builder.setMessage(DESC_MIN_TIME_TO_ALARM + "\n\nDefault: " + String.valueOf(MIN_TIME_TO_ALARM)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_max_temp_app_whitelist_duration.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_MAX_TEMP_APP_WHITELIST_DURATION); builder.setMessage(DESC_MAX_TEMP_APP_WHITELIST_DURATION + "\n\nDefault: " + String.valueOf(MAX_TEMP_APP_WHITELIST_DURATION)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_mms_temp_app_whitelist_duration.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_MMS_TEMP_APP_WHITELIST_DURATION); builder.setMessage(DESC_MMS_TEMP_APP_WHITELIST_DURATION + "\n\nDefault: " + String.valueOf(MMS_TEMP_APP_WHITELIST_DURATION)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); iv_sms_temp_app_whitelist_duration.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle(KEY_SMS_TEMP_APP_WHITELIST_DURATION); builder.setMessage(DESC_SMS_TEMP_APP_WHITELIST_DURATION + "\n\nDefault: " + String.valueOf(SMS_TEMP_APP_WHITELIST_DURATION)); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); AlertDialog dialog = builder.create(); dialog.show(); } }); } private void getSettings() { if(hasRoot) { try { Command command = new Command(0, "settings get global device_idle_constants") { @Override public void commandOutput(int id, String line) { KeyValueListParser parser = new KeyValueListParser(','); if ("null".equals(line)) { parser.setString(line + "=0"); } else { parser.setString(line); } int divideBy= getDisplayValueFix(); et_inactive_to.setText(String.valueOf(parser.getLong(KEY_INACTIVE_TIMEOUT, INACTIVE_TIMEOUT) / divideBy)); et_sensing_to.setText(String.valueOf(parser.getLong(KEY_SENSING_TIMEOUT, SENSING_TIMEOUT) / divideBy)); et_locating_to.setText(String.valueOf(parser.getLong(KEY_LOCATING_TIMEOUT, LOCATING_TIMEOUT) / divideBy)); et_location_accuracy.setText(String.valueOf(parser.getFloat(KEY_LOCATION_ACCURACY, LOCATION_ACCURACY))); et_motion_inactive_to.setText(String.valueOf(parser.getLong(KEY_MOTION_INACTIVE_TIMEOUT, MOTION_INACTIVE_TIMEOUT) / divideBy)); et_idle_after_inactive_to.setText(String.valueOf(parser.getLong(KEY_IDLE_AFTER_INACTIVE_TIMEOUT, IDLE_AFTER_INACTIVE_TIMEOUT) / divideBy)); et_idle_pending_to.setText(String.valueOf(parser.getLong(KEY_IDLE_PENDING_TIMEOUT, IDLE_PENDING_TIMEOUT) / divideBy)); et_max_idle_pending_to.setText(String.valueOf(parser.getLong(KEY_MAX_IDLE_PENDING_TIMEOUT, MAX_IDLE_PENDING_TIMEOUT) / divideBy)); et_idle_pending_factor.setText(String.valueOf(parser.getFloat(KEY_IDLE_PENDING_FACTOR, IDLE_PENDING_FACTOR))); et_idle_to.setText(String.valueOf(parser.getLong(KEY_IDLE_TIMEOUT, IDLE_TIMEOUT) / divideBy)); et_max_idle_to.setText(String.valueOf(parser.getLong(KEY_MAX_IDLE_TIMEOUT, MAX_IDLE_TIMEOUT) / divideBy)); et_idle_factor.setText(String.valueOf(parser.getFloat(KEY_IDLE_FACTOR, IDLE_FACTOR))); et_min_time_to_alarm.setText(String.valueOf(parser.getLong(KEY_MIN_TIME_TO_ALARM, MIN_TIME_TO_ALARM) / divideBy)); et_max_temp_app_whitelist_duration.setText(String.valueOf(parser.getLong(KEY_MAX_TEMP_APP_WHITELIST_DURATION, MAX_TEMP_APP_WHITELIST_DURATION) / divideBy)); et_mms_temp_app_whitelist_duration.setText(String.valueOf(parser.getLong(KEY_MMS_TEMP_APP_WHITELIST_DURATION, MMS_TEMP_APP_WHITELIST_DURATION) / divideBy)); et_sms_temp_app_whitelist_duration.setText(String.valueOf(parser.getLong(KEY_SMS_TEMP_APP_WHITELIST_DURATION, SMS_TEMP_APP_WHITELIST_DURATION) / divideBy)); //MUST call the super method when overriding! super.commandOutput(id, line); } @Override public void commandTerminated(int id, String reason) { } @Override public void commandCompleted(int id, int exitcode) { } }; RootShell.getShell(true).add(command); } catch (RootDeniedException e) { e.printStackTrace(); } catch (TimeoutException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }else{ int divideBy = getDisplayValueFix(); et_inactive_to.setText(String.valueOf(INACTIVE_TIMEOUT / divideBy)); et_sensing_to.setText(String.valueOf(SENSING_TIMEOUT / divideBy)); et_locating_to.setText(String.valueOf(LOCATING_TIMEOUT / divideBy)); et_location_accuracy.setText(String.valueOf(LOCATION_ACCURACY)); et_motion_inactive_to.setText(String.valueOf(MOTION_INACTIVE_TIMEOUT / divideBy)); et_idle_after_inactive_to.setText(String.valueOf(IDLE_AFTER_INACTIVE_TIMEOUT / divideBy)); et_idle_pending_to.setText(String.valueOf(IDLE_PENDING_TIMEOUT / divideBy)); et_max_idle_pending_to.setText(String.valueOf(MAX_IDLE_PENDING_TIMEOUT / divideBy)); et_idle_pending_factor.setText(String.valueOf(IDLE_PENDING_FACTOR)); et_idle_to.setText(String.valueOf(IDLE_TIMEOUT / divideBy)); et_max_idle_to.setText(String.valueOf(MAX_IDLE_TIMEOUT / divideBy)); et_idle_factor.setText(String.valueOf(IDLE_FACTOR)); et_min_time_to_alarm.setText(String.valueOf(MIN_TIME_TO_ALARM / divideBy)); et_max_temp_app_whitelist_duration.setText(String.valueOf(MAX_TEMP_APP_WHITELIST_DURATION / divideBy)); et_mms_temp_app_whitelist_duration.setText(String.valueOf(MMS_TEMP_APP_WHITELIST_DURATION / divideBy)); et_sms_temp_app_whitelist_duration.setText(String.valueOf(SMS_TEMP_APP_WHITELIST_DURATION / divideBy)); } } private int getDisplayValueFix() { displayValueIn = Integer.valueOf(sharedPref.getString("list_display_value_in", "-1")); int divideBy = 1; switch (displayValueIn){ case -1: divideBy = 1; break; case 0: divideBy = millisecondsInOneSecond; break; } return divideBy; } private void save(){ int multiplyBy = getDisplayValueFix(); StringBuilder sb = new StringBuilder(); sb.append(KEY_INACTIVE_TIMEOUT + "=" + Long.valueOf(et_inactive_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_SENSING_TIMEOUT + "=" + Long.valueOf(et_sensing_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_LOCATING_TIMEOUT + "=" + Long.valueOf(et_locating_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_LOCATION_ACCURACY + "=" + Float.valueOf(et_location_accuracy.getText().toString()) + ","); sb.append(KEY_MOTION_INACTIVE_TIMEOUT + "=" + Long.valueOf(et_motion_inactive_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_IDLE_AFTER_INACTIVE_TIMEOUT + "=" + Long.valueOf(et_idle_after_inactive_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_IDLE_PENDING_TIMEOUT + "=" + Long.valueOf(et_idle_pending_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_MAX_IDLE_PENDING_TIMEOUT + "=" + Long.valueOf(et_max_idle_pending_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_IDLE_PENDING_FACTOR + "=" + Float.valueOf(et_idle_pending_factor.getText().toString()) + ","); sb.append(KEY_IDLE_TIMEOUT + "=" + Long.valueOf(et_idle_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_MAX_IDLE_TIMEOUT + "=" + Long.valueOf(et_max_idle_to.getText().toString()) * multiplyBy + ","); sb.append(KEY_IDLE_FACTOR + "=" + Float.valueOf(et_idle_factor.getText().toString()) + ","); sb.append(KEY_MIN_TIME_TO_ALARM + "=" + Long.valueOf(et_min_time_to_alarm.getText().toString()) * multiplyBy + ","); sb.append(KEY_MAX_TEMP_APP_WHITELIST_DURATION + "=" + Long.valueOf(et_max_temp_app_whitelist_duration.getText().toString()) * multiplyBy + ","); sb.append(KEY_MMS_TEMP_APP_WHITELIST_DURATION + "=" + Long.valueOf(et_mms_temp_app_whitelist_duration.getText().toString()) * multiplyBy + ","); sb.append(KEY_SMS_TEMP_APP_WHITELIST_DURATION + "=" + Long.valueOf(et_sms_temp_app_whitelist_duration.getText().toString()) * multiplyBy); if(hasRoot) { try { Command command = new Command(0, "settings put global device_idle_constants " + sb.toString()) { @Override public void commandOutput(int id, String line) { //MUST call the super method when overriding! super.commandOutput(id, line); } @Override public void commandTerminated(int id, String reason) { } @Override public void commandCompleted(int id, int exitcode) { Toast.makeText(MainActivity.this, "Saved", Toast.LENGTH_SHORT).show(); } }; RootShell.getShell(true).add(command); } catch (RootDeniedException e) { e.printStackTrace(); } catch (TimeoutException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }else{ final String command = "adb shell settings put global device_idle_constants " + sb.toString(); AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle("ADB Command"); builder.setMessage(command); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); builder.setNegativeButton("Copy to clipboard", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { ClipboardManager manager = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE); manager.setText(command); Toast.makeText(MainActivity.this, "Copied to clipboard", Toast.LENGTH_SHORT).show(); } }); AlertDialog dialog = builder.create(); dialog.show(); } } private void restoreDefaults(){ StringBuilder sb = new StringBuilder(); sb.append(KEY_INACTIVE_TIMEOUT + "=" + INACTIVE_TIMEOUT + ","); sb.append(KEY_SENSING_TIMEOUT + "=" + SENSING_TIMEOUT + ","); sb.append(KEY_LOCATING_TIMEOUT + "=" + LOCATING_TIMEOUT + ","); sb.append(KEY_LOCATION_ACCURACY + "=" + LOCATION_ACCURACY + ","); sb.append(KEY_MOTION_INACTIVE_TIMEOUT + "=" + MOTION_INACTIVE_TIMEOUT + ","); sb.append(KEY_IDLE_AFTER_INACTIVE_TIMEOUT + "=" + IDLE_AFTER_INACTIVE_TIMEOUT + ","); sb.append(KEY_IDLE_PENDING_TIMEOUT + "=" + IDLE_PENDING_TIMEOUT + ","); sb.append(KEY_MAX_IDLE_PENDING_TIMEOUT + "=" + MAX_IDLE_PENDING_TIMEOUT + ","); sb.append(KEY_IDLE_PENDING_FACTOR + "=" + IDLE_PENDING_FACTOR + ","); sb.append(KEY_IDLE_TIMEOUT + "=" + IDLE_TIMEOUT + ","); sb.append(KEY_MAX_IDLE_TIMEOUT + "=" + MAX_IDLE_TIMEOUT + ","); sb.append(KEY_IDLE_FACTOR + "=" + IDLE_FACTOR + ","); sb.append(KEY_MIN_TIME_TO_ALARM + "=" + MIN_TIME_TO_ALARM + ","); sb.append(KEY_MAX_TEMP_APP_WHITELIST_DURATION + "=" + MAX_TEMP_APP_WHITELIST_DURATION + ","); sb.append(KEY_MMS_TEMP_APP_WHITELIST_DURATION + "=" + MMS_TEMP_APP_WHITELIST_DURATION + ","); sb.append(KEY_SMS_TEMP_APP_WHITELIST_DURATION + "=" + SMS_TEMP_APP_WHITELIST_DURATION); if(hasRoot) { try { Command command = new Command(0, "settings put global device_idle_constants " + sb.toString()) { @Override public void commandOutput(int id, String line) { //MUST call the super method when overriding! super.commandOutput(id, line); } @Override public void commandTerminated(int id, String reason) { } @Override public void commandCompleted(int id, int exitcode) { Toast.makeText(MainActivity.this, "Defaults restored", Toast.LENGTH_SHORT).show(); } }; RootShell.getShell(true).add(command); } catch (RootDeniedException e) { e.printStackTrace(); } catch (TimeoutException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }else{ final String command = "adb shell settings put global device_idle_constants " + sb.toString(); AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle("ADB Command"); builder.setMessage(command); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); builder.setNegativeButton("Copy to clipboard", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { ClipboardManager manager = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE); manager.setText(command); Toast.makeText(MainActivity.this, "Copied to clipboard", Toast.LENGTH_SHORT).show(); } }); AlertDialog dialog = builder.create(); dialog.show(); } //Show changes Toast.makeText(MainActivity.this, "Refreshing settings", Toast.LENGTH_SHORT).show(); getSettings(); } private void applyProfile(String settings){ if(hasRoot) { try { Command command = new Command(0, "settings put global device_idle_constants " + settings) { @Override public void commandOutput(int id, String line) { //MUST call the super method when overriding! super.commandOutput(id, line); } @Override public void commandTerminated(int id, String reason) { } @Override public void commandCompleted(int id, int exitcode) { Toast.makeText(MainActivity.this, "Defaults restored", Toast.LENGTH_SHORT).show(); } }; RootShell.getShell(true).add(command); } catch (RootDeniedException e) { e.printStackTrace(); } catch (TimeoutException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }else{ final String command = "adb shell settings put global device_idle_constants " + settings; AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this); builder.setTitle("ADB Command"); builder.setMessage(command); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Nothing } }); builder.setNegativeButton("Copy to clipboard", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { ClipboardManager manager = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE); manager.setText(command); Toast.makeText(MainActivity.this, "Copied to clipboard", Toast.LENGTH_SHORT).show(); } }); AlertDialog dialog = builder.create(); dialog.show(); } //Show changes Toast.makeText(MainActivity.this, "Refreshing settings", Toast.LENGTH_SHORT).show(); KeyValueListParser parser = new KeyValueListParser(','); parser.setString(settings); int divideBy= getDisplayValueFix(); et_inactive_to.setText(String.valueOf(parser.getLong(KEY_INACTIVE_TIMEOUT, INACTIVE_TIMEOUT) / divideBy)); et_sensing_to.setText(String.valueOf(parser.getLong(KEY_SENSING_TIMEOUT, SENSING_TIMEOUT) / divideBy)); et_locating_to.setText(String.valueOf(parser.getLong(KEY_LOCATING_TIMEOUT, LOCATING_TIMEOUT) / divideBy)); et_location_accuracy.setText(String.valueOf(parser.getFloat(KEY_LOCATION_ACCURACY, LOCATION_ACCURACY))); et_motion_inactive_to.setText(String.valueOf(parser.getLong(KEY_MOTION_INACTIVE_TIMEOUT, MOTION_INACTIVE_TIMEOUT) / divideBy)); et_idle_after_inactive_to.setText(String.valueOf(parser.getLong(KEY_IDLE_AFTER_INACTIVE_TIMEOUT, IDLE_AFTER_INACTIVE_TIMEOUT) / divideBy)); et_idle_pending_to.setText(String.valueOf(parser.getLong(KEY_IDLE_PENDING_TIMEOUT, IDLE_PENDING_TIMEOUT) / divideBy)); et_max_idle_pending_to.setText(String.valueOf(parser.getLong(KEY_MAX_IDLE_PENDING_TIMEOUT, MAX_IDLE_PENDING_TIMEOUT) / divideBy)); et_idle_pending_factor.setText(String.valueOf(parser.getFloat(KEY_IDLE_PENDING_FACTOR, IDLE_PENDING_FACTOR))); et_idle_to.setText(String.valueOf(parser.getLong(KEY_IDLE_TIMEOUT, IDLE_TIMEOUT) / divideBy)); et_max_idle_to.setText(String.valueOf(parser.getLong(KEY_MAX_IDLE_TIMEOUT, MAX_IDLE_TIMEOUT) / divideBy)); et_idle_factor.setText(String.valueOf(parser.getFloat(KEY_IDLE_FACTOR, IDLE_FACTOR))); et_min_time_to_alarm.setText(String.valueOf(parser.getLong(KEY_MIN_TIME_TO_ALARM, MIN_TIME_TO_ALARM) / divideBy)); et_max_temp_app_whitelist_duration.setText(String.valueOf(parser.getLong(KEY_MAX_TEMP_APP_WHITELIST_DURATION, MAX_TEMP_APP_WHITELIST_DURATION) / divideBy)); et_mms_temp_app_whitelist_duration.setText(String.valueOf(parser.getLong(KEY_MMS_TEMP_APP_WHITELIST_DURATION, MMS_TEMP_APP_WHITELIST_DURATION) / divideBy)); et_sms_temp_app_whitelist_duration.setText(String.valueOf(parser.getLong(KEY_SMS_TEMP_APP_WHITELIST_DURATION, SMS_TEMP_APP_WHITELIST_DURATION) / divideBy)); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement switch(id){ case R.id.action_profile: AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle("Profiles"); builder.setItems(Profiles.ProfileListNames, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { applyProfile(Profiles.ProfileList[item]); } }); AlertDialog alert = builder.create(); alert.show(); break; case R.id.action_save: save(); break; case R.id.action_restoredefault: restoreDefaults(); break; case R.id.action_settings: startActivity(new Intent(MainActivity.this, SettingsActivity.class)); sharedPref.registerOnSharedPreferenceChangeListener( new SharedPreferences.OnSharedPreferenceChangeListener() { public void onSharedPreferenceChanged(SharedPreferences prefs, String key) { getSettings(); } }); } return super.onOptionsItemSelected(item); } }
Catch when device settings are unavailable
app/src/main/java/com/isaacparker/dozesettingseditor/MainActivity.java
Catch when device settings are unavailable
Java
mit
f5164d3ba4fc7c1ab4935438e2c2e2421443d838
0
zkastl/AcmeCourierService
package main; import java.awt.Toolkit; import java.time.LocalDate; import javax.swing.JFrame; import model.Intersection; import model.Map; import model.Road; import view.LogInScreen; public class Application { public static void main(String[] args) throws Exception { CourierSystem.InitializeCourierSystem(); final JFrame window = new JFrame(); window.setContentPane(new LogInScreen(window)); window.setIconImage( Toolkit.getDefaultToolkit().getImage(Application.class.getResource("/view/courier logo.png"))); window.setTitle("ACME Delivery Management System"); window.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); window.setVisible(true); // test map stuff Map map = new Map(); map.getRoute(map.getIntersection("A1"), map.getIntersection("F1")).print(); map.getIntersection("B2").changeClosure(LocalDate.of(2017, 6, 7), LocalDate.of(2018, 6, 9)); map.getRoute(map.getIntersection("A1"), map.getIntersection("F1")).print(); /* * for(Character a = 'A'; a < 'H'; a++){ for(Integer i = 1; i < 8; i++) * { Intersection intersection = new Intersection(a.toString(), * i.toString()); map.addIntersection(intersection); * System.out.print(intersection.getName()); } System.out.println(); } */ } }
AcmeCourierSystem/src/main/Application.java
package main; import java.awt.Toolkit; import java.time.LocalDate; import javax.swing.JFrame; import model.Intersection; import model.Map; import model.Road; import view.LogInScreen; public class Application { public static void main(String[] args) throws Exception { CourierSystem.InitializeCourierSystem(); final JFrame window = new JFrame(); window.setContentPane(new LogInScreen(window)); window.setIconImage( Toolkit.getDefaultToolkit().getImage(Application.class.getResource("/view/courier logo.png"))); window.setTitle("ACME Delivery Management System"); window.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); window.setVisible(true); // test map stuff Map map = new Map(); map.getRoute(map.getIntersection("a1"), map.getIntersection("f1")).print(); map.getIntersection("b2").changeClosure(LocalDate.of(2017, 6, 7), LocalDate.of(2018, 6, 9)); map.getRoute(map.getIntersection("a1"), map.getIntersection("f1")).print(); /* * for(Character a = 'A'; a < 'H'; a++){ for(Integer i = 1; i < 8; i++) * { Intersection intersection = new Intersection(a.toString(), * i.toString()); map.addIntersection(intersection); * System.out.print(intersection.getName()); } System.out.println(); } */ } }
Switched intersection names to upper case so they work
AcmeCourierSystem/src/main/Application.java
Switched intersection names to upper case so they work
Java
mit
b84c9222f5bd85674c0cabb74742f33465246f90
0
divayprakash/isprime
package io.github.divayprakash.isprime; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.View; import android.widget.Button; import android.widget.TextView; import android.widget.Toast; import android.os.Vibrator; import java.util.Locale; import java.util.concurrent.ThreadLocalRandom; /** * The MainActivity class implements all the control logic for the 'Prime or * Not' Android application. * * @author Divay Prakash */ public class MainActivity extends AppCompatActivity { /** * int variable representing the random number currently being * shown in the app. */ private int RANDOM_NUMBER = 1000; /** * Boolean variable representing if the currently displayed number is * prime or not. */ private boolean IS_PRIME = isPrime(); /** * TextView variable representing the object to display the random number * in. */ private TextView numberDisplay; /** * Vibrator instance variable to enable vibration of device when needed. */ private Vibrator vibratorInstance; /** * int variable representing the request code for the call to HintActivity * by MainActivity on pressing the "Hint" button. */ private static final int HINT_REQUEST = 1; /** * Boolean variable representing if the hint has been taken or not. */ private boolean IS_HINT_TAKEN; /** * Button variable representing the hintButton. */ private Button hintButton; /** * int variable representing the request code for the call to CheatActivity * by MainActivity on pressing the "Cheat" button. */ private static final int CHEAT_REQUEST = 2; /** * Boolean variable representing if the cheat has been taken or not. */ private boolean IS_CHEAT_TAKEN; /** * Button variable representing the cheatButton. */ private Button cheatButton; /** * This method is called at the startup of the application. It initializes * the random number, hint status and cheat status using parameter * savedInstanceState and also assigns the Android VIBRATOR_SERVICE to the * Vibrator instance variable using getSystemService(). * @param savedInstanceState The saved instance state of the application. */ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); numberDisplay = (TextView)findViewById(R.id.numberDisplay); hintButton = (Button)findViewById(R.id.hintButton); cheatButton = (Button)findViewById(R.id.cheatButton); if (savedInstanceState == null) { RANDOM_NUMBER = returnRandom(); IS_PRIME = isPrime(); setNumberDisplay(); enableHintButton(); enableCheatButton(); } else { RANDOM_NUMBER = savedInstanceState.getInt("RandomNumber"); setNumberDisplay(); IS_HINT_TAKEN = savedInstanceState.getBoolean("IsHintTaken"); if (IS_HINT_TAKEN) { disableHintButton(); } else { enableHintButton(); } IS_CHEAT_TAKEN = savedInstanceState.getBoolean("IsCheatTaken"); if (IS_CHEAT_TAKEN) { disableCheatButton(); } else { enableCheatButton(); } } vibratorInstance = (Vibrator) getSystemService(Context.VIBRATOR_SERVICE); } /** * This method is called during application destruction and is used to * store the instance state so as to be able to restore it at the next * initialization. * @param savedInstanceState The saved instance state of the application. */ @Override public void onSaveInstanceState(Bundle savedInstanceState) { savedInstanceState.putInt("RandomNumber", RANDOM_NUMBER); savedInstanceState.putBoolean("IsHintTaken", IS_HINT_TAKEN); savedInstanceState.putBoolean("IsCheatTaken", IS_CHEAT_TAKEN); super.onSaveInstanceState(savedInstanceState); } /** * This is the onClick action handler for the "Next" button. It calls the * returnRandom() method to assign a new random value to the RANDOM_NUMBER * variable. Thereafter, it calls the isPrime() method to determine if * RANDOM_NUMBER is prime or not. It then sets the numberDisplay TextView. * Also, it restores the hint and cheat buttons to their default state. * @param view The View instance passed to this method. */ @SuppressWarnings("unused") public void onNext(View view) { RANDOM_NUMBER = returnRandom(); IS_PRIME = isPrime(); setNumberDisplay(); IS_HINT_TAKEN = false; enableHintButton(); IS_CHEAT_TAKEN = false; enableCheatButton(); } /** * This is the onClick action handler for the "True" button. It checks the * IS_PRIME variable to determine if the answer given is correct or not. * Accordingly, it sets the numberDisplay TextView color using * TextView.setTextColor() method. It also displays a Toast with a * correct/incorrect message. In case of incorrect answer, it uses * Vibrator.vibrate() method to vibrate device for 500ms. * @param view The View instance passed to this method. */ @SuppressWarnings("unused") public void onTrue(View view) { if (IS_PRIME) { makeToast(this, "Your answer is correct!"); setNumberDisplayGreen(); } else { makeToast(this, "Your answer is incorrect!"); setNumberDisplayRed(); vibratorInstance.vibrate(500); } } /** * This is the onClick action handler for the "False" button. It checks the * IS_PRIME variable to determine if the answer given is correct or not. * Accordingly, it sets the numberDisplay TextView color using * TextView.setTextColor() method. It also displays a Toast with a * correct/incorrect message. In case of incorrect answer, it uses * Vibrator.vibrate() method to vibrate device for 500ms. * @param view The View instance passed to this method. */ @SuppressWarnings("unused") public void onFalse(View view) { if (!IS_PRIME) { makeToast(this, "Your answer is correct!"); setNumberDisplayGreen(); } else { makeToast(this, "Your answer is incorrect!"); setNumberDisplayRed(); vibratorInstance.vibrate(500); } } /** * This is the onClick action handler for the "Cheat" button. It creates an * instance of the Intent class and uses it to launch the CheatActivity. * Also, it passes the RANDOM_NUMBER to the CheatActivity using the * putExtra() method of the Intent instance. * @param view The view instance passed to this method. */ @SuppressWarnings("unused") public void onCheat(View view) { Intent intent = new Intent(this, CheatActivity.class); intent.putExtra("RandomNumber", RANDOM_NUMBER); startActivityForResult(intent, CHEAT_REQUEST); } /** * This is the onClick action handler for the "Hint" button. It creates an * instance of the Intent class and uses it to launch the HintActivity. * @param view The view instance passed to this method. */ @SuppressWarnings("unused") public void onHint(View view) { Intent intent = new Intent(this, HintActivity.class); startActivityForResult(intent, HINT_REQUEST); } /** * This is the return handler triggered on returning from any activity. * It uses the requestCode parameter to differentiate between the activities * returning. Thereafter, it checks the resultCode to determine if the hint/ * cheat was taken or not and accordingly sets the hint/cheat status. * @param requestCode The request code passed to this method. * @param resultCode The result code passed to this method. * @param data The Intent instance passed to this method. */ @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == HINT_REQUEST) { if (resultCode == RESULT_OK) { if (data != null) { makeToast(this, "Hint Taken!"); IS_HINT_TAKEN = true; disableHintButton(); } } } else if (requestCode == CHEAT_REQUEST){ if (resultCode == RESULT_OK) { if (data != null) { makeToast(this, "Cheat taken!"); IS_CHEAT_TAKEN = true; disableCheatButton(); } } else if (resultCode == RESULT_CANCELED) { if (data != null) { makeToast(this, "Cheat not taken!"); IS_CHEAT_TAKEN = false; enableCheatButton(); } } } } /** * This method enables the cheat button. */ private void enableCheatButton() { cheatButton.setEnabled(true); cheatButton.setBackgroundColor(Color.parseColor("#FFAA66CC")); } /** * This method disables the cheat button. */ private void disableCheatButton() { cheatButton.setEnabled(false); cheatButton.setBackgroundColor(Color.parseColor("#FF616161")); } /** * This method enables the hint button. */ private void enableHintButton() { hintButton.setEnabled(true); hintButton.setBackgroundColor(Color.parseColor("#FF00DDFF")); } /** * This method disables the hint button. */ private void disableHintButton() { hintButton.setEnabled(false); hintButton.setBackgroundColor(Color.parseColor("#FF616161")); } /** * This method sets the numberDisplay TextView. */ private void setNumberDisplay() { numberDisplay.setText(String.format(Locale.US, "%d", RANDOM_NUMBER)); numberDisplay.setTextColor(Color.parseColor("#FF000000")); } /** * This method sets the color of the numberDisplay TextView to green. */ private void setNumberDisplayGreen() { numberDisplay.setTextColor(Color.parseColor("#FF99CC00")); } /** * This method sets the color of the numberDisplay TextView to red. */ private void setNumberDisplayRed() { numberDisplay.setTextColor(Color.parseColor("#FFD50000")); } /** * This method makes a Toast. * @param context The Context instance passed to this method. * @param toastMsg The message to be printed in the Toast. */ private void makeToast(Context context, String toastMsg) { Toast.makeText(context, toastMsg, Toast.LENGTH_SHORT).show(); } /** * This is the onClick action handler for the "Exit" FAB button. It calls * the Android finish() method to safely end application execution and * thereafter calls System.exit() to clean up variables etc. * @param view The View instance passed to this method. */ @SuppressWarnings("unused") public void onExit(View view) { finish(); System.exit(0); } /** * This method checks if the RANDOM_NUMBER is prime or not and returns a * Boolean value. * @return Boolean value */ private boolean isPrime() { for (int Divisor = 2; Divisor < RANDOM_NUMBER / 2; Divisor++) { if (RANDOM_NUMBER % Divisor == 0) return false; } return true; } /** * This method returns a Random integer in the range 2 to 1000 using * ThreadLocalRandom.current().nextInt(). * @return int value */ private int returnRandom() { int MAX = 1000; int MIN = 2; return ThreadLocalRandom.current().nextInt(MIN, MAX + 1); } }
app/src/main/java/io/github/divayprakash/isprime/MainActivity.java
package io.github.divayprakash.isprime; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.View; import android.widget.Button; import android.widget.TextView; import android.widget.Toast; import android.os.Vibrator; import java.util.Locale; import java.util.concurrent.ThreadLocalRandom; /** * The MainActivity class implements all the control logic for the 'Prime or * Not' Android application. * * @author Divay Prakash */ public class MainActivity extends AppCompatActivity { /** * int variable representing the random number currently being * shown in the app. */ private int RANDOM_NUMBER = 1000; /** * Boolean variable representing if the currently displayed number is * prime or not. */ private boolean IS_PRIME = isPrime(); /** * TextView variable representing the object to display the random number * in. */ private TextView numberDisplay; /** * Vibrator instance variable to enable vibration of device when needed. */ private Vibrator vibratorInstance; /** * int variable representing the request code for the call to HintActivity * by MainActivity on pressing the "Hint" button */ private static final int HINT_REQUEST = 1; /** * Boolean variable representing if the hint has been taken or not. */ private boolean IS_HINT_TAKEN; /** * Button variable representing the hintButton. */ private Button hintButton; /** * int variable representing the request code for the call to CheatActivity * by MainActivity on pressing the "Cheat" button */ private static final int CHEAT_REQUEST = 2; /** * Boolean variable representing if the cheat has been taken or not. */ private boolean IS_CHEAT_TAKEN; /** * Button variable representing the cheatButton. */ private Button cheatButton; /** * This method is called at the startup of the application. It initializes * the random number, hint status and cheat status using parameter * savedInstanceState and also assigns the Android VIBRATOR_SERVICE to the * Vibrator instance variable using getSystemService(). * @param savedInstanceState The saved instance state of the application. */ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); numberDisplay = (TextView)findViewById(R.id.numberDisplay); hintButton = (Button)findViewById(R.id.hintButton); cheatButton = (Button)findViewById(R.id.cheatButton); if (savedInstanceState == null) { RANDOM_NUMBER = returnRandom(); IS_PRIME = isPrime(); setNumberDisplay(); enableHintButton(); enableCheatButton(); } else { RANDOM_NUMBER = savedInstanceState.getInt("RandomNumber"); setNumberDisplay(); IS_HINT_TAKEN = savedInstanceState.getBoolean("IsHintTaken"); if (IS_HINT_TAKEN) { disableHintButton(); } else { enableHintButton(); } IS_CHEAT_TAKEN = savedInstanceState.getBoolean("IsCheatTaken"); if (IS_CHEAT_TAKEN) { disableCheatButton(); } else { enableCheatButton(); } } vibratorInstance = (Vibrator) getSystemService(Context.VIBRATOR_SERVICE); } /** * This method is called during application destruction and is used to * store the instance state so as to be able to restore it at the next * initialization. * @param savedInstanceState The saved instance state of the application. */ @Override public void onSaveInstanceState(Bundle savedInstanceState) { savedInstanceState.putInt("RandomNumber", RANDOM_NUMBER); savedInstanceState.putBoolean("IsHintTaken", IS_HINT_TAKEN); savedInstanceState.putBoolean("IsCheatTaken", IS_CHEAT_TAKEN); super.onSaveInstanceState(savedInstanceState); } /** * This is the onClick action handler for the "Next" button. It calls the * returnRandom() method to assign a new random value to the RANDOM_NUMBER * variable. Thereafter, it calls the isPrime() method to determine if * RANDOM_NUMBER is prime or not. It then sets the numberDisplay TextView. * Also, it restores the hint and cheat buttons to their default state. * @param view The View instance passed to this method. */ @SuppressWarnings("unused") public void onNext(View view) { RANDOM_NUMBER = returnRandom(); IS_PRIME = isPrime(); setNumberDisplay(); IS_HINT_TAKEN = false; enableHintButton(); IS_CHEAT_TAKEN = false; enableCheatButton(); } /** * This is the onClick action handler for the "True" button. It checks the * IS_PRIME variable to determine if the answer given is correct or not. * Accordingly, it sets the numberDisplay TextView color using * TextView.setTextColor() method. It also displays a Toast with a * correct/incorrect message. In case of incorrect answer, it uses * Vibrator.vibrate() method to vibrate device for 500ms. * @param view The View instance passed to this method. */ @SuppressWarnings("unused") public void onTrue(View view) { if (IS_PRIME) { makeToast(this, "Your answer is correct!"); setNumberDisplayGreen(); } else { makeToast(this, "Your answer is incorrect!"); setNumberDisplayRed(); vibratorInstance.vibrate(500); } } /** * This is the onClick action handler for the "False" button. It checks the * IS_PRIME variable to determine if the answer given is correct or not. * Accordingly, it sets the numberDisplay TextView color using * TextView.setTextColor() method. It also displays a Toast with a * correct/incorrect message. In case of incorrect answer, it uses * Vibrator.vibrate() method to vibrate device for 500ms. * @param view The View instance passed to this method. */ @SuppressWarnings("unused") public void onFalse(View view) { if (!IS_PRIME) { makeToast(this, "Your answer is correct!"); setNumberDisplayGreen(); } else { makeToast(this, "Your answer is incorrect!"); setNumberDisplayRed(); vibratorInstance.vibrate(500); } } /** * This is the onClick action handler for the "Cheat" button. It creates an * instance of the Intent class and uses it to launch the CheatActivity. * Also, it passes the RANDOM_NUMBER to the CheatActivity using the * putExtra() method of the Intent instance. * @param view The view instance passed to this method. */ @SuppressWarnings("unused") public void onCheat(View view) { Intent intent = new Intent(this, CheatActivity.class); intent.putExtra("RandomNumber", RANDOM_NUMBER); startActivityForResult(intent, CHEAT_REQUEST); } /** * This is the onClick action handler for the "Hint" button. It creates an * instance of the Intent class and uses it to launch the HintActivity. * @param view The view instance passed to this method. */ @SuppressWarnings("unused") public void onHint(View view) { Intent intent = new Intent(this, HintActivity.class); startActivityForResult(intent, HINT_REQUEST); } /** * This is the return handler triggered on returning from any activity. * It uses the requestCode parameter to differentiate between the activities * returning. Thereafter, it checks the resultCode to determine if the hint/ * cheat was taken or not and accordingly sets the hint/cheat status. * @param requestCode The request code passed to this method. * @param resultCode The result code passed to this method. * @param data The Intent instance passed to this method. */ @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == HINT_REQUEST) { if (resultCode == RESULT_OK) { if (data != null) { makeToast(this, "Hint Taken!"); IS_HINT_TAKEN = true; disableHintButton(); } } } else if (requestCode == CHEAT_REQUEST){ if (resultCode == RESULT_OK) { if (data != null) { makeToast(this, "Cheat taken!"); IS_CHEAT_TAKEN = true; disableCheatButton(); } } else if (resultCode == RESULT_CANCELED) { if (data != null) { makeToast(this, "Cheat not taken!"); IS_CHEAT_TAKEN = false; enableCheatButton(); } } } } /** * This method enables the cheat button. */ private void enableCheatButton() { cheatButton.setEnabled(true); cheatButton.setBackgroundColor(Color.parseColor("#FFAA66CC")); } /** * This method disables the cheat button. */ private void disableCheatButton() { cheatButton.setEnabled(false); cheatButton.setBackgroundColor(Color.parseColor("#FF616161")); } /** * This method enables the hint button. */ private void enableHintButton() { hintButton.setEnabled(true); hintButton.setBackgroundColor(Color.parseColor("#FF00DDFF")); } /** * This method disables the hint button. */ private void disableHintButton() { hintButton.setEnabled(false); hintButton.setBackgroundColor(Color.parseColor("#FF616161")); } /** * This method sets the numberDisplay TextView. */ private void setNumberDisplay() { numberDisplay.setText(String.format(Locale.US, "%d", RANDOM_NUMBER)); numberDisplay.setTextColor(Color.parseColor("#FF000000")); } /** * This method sets the color of the numberDisplay TextView to green. */ private void setNumberDisplayGreen() { numberDisplay.setTextColor(Color.parseColor("#FF99CC00")); } /** * This method sets the color of the numberDisplay TextView to red. */ private void setNumberDisplayRed() { numberDisplay.setTextColor(Color.parseColor("#FFD50000")); } /** * This method makes a Toast. * @param context The Context instance passed to this method. * @param toastMsg The message to be printed in the Toast. */ private void makeToast(Context context, String toastMsg) { Toast.makeText(context, toastMsg, Toast.LENGTH_SHORT).show(); } /** * This is the onClick action handler for the "Exit" FAB button. It calls * the Android finish() method to safely end application execution and * thereafter calls System.exit() to clean up variables etc. * @param view The View instance passed to this method. */ @SuppressWarnings("unused") public void onExit(View view) { finish(); System.exit(0); } /** * This method checks if the RANDOM_NUMBER is prime or not and returns a * Boolean value. * @return Boolean value */ private boolean isPrime() { for (int Divisor = 2; Divisor < RANDOM_NUMBER / 2; Divisor++) { if (RANDOM_NUMBER % Divisor == 0) return false; } return true; } /** * This method returns a Random integer in the range 2 to 1000 using * ThreadLocalRandom.current().nextInt(). * @return int value */ private int returnRandom() { int MAX = 1000; int MIN = 2; return ThreadLocalRandom.current().nextInt(MIN, MAX + 1); } }
Fix typos
app/src/main/java/io/github/divayprakash/isprime/MainActivity.java
Fix typos
Java
mit
7fa266cf9f3bc97c452a799838a2f81dfe56e5ce
0
JornC/bitcoin-transaction-explorer,JornC/bitcoin-transaction-explorer,JornC/bitcoin-transaction-explorer
package com.yoghurt.crypto.transactions.client.util.script; import java.util.Deque; import java.util.Iterator; import com.googlecode.gwt.crypto.bouncycastle.util.Arrays; import com.yoghurt.crypto.transactions.client.util.transaction.ComputeUtil; import com.yoghurt.crypto.transactions.client.util.transaction.ScriptOperationUtil; import com.yoghurt.crypto.transactions.shared.domain.StackObject; public final class ScriptExecutionUtil { private static final byte[] FALSE = new byte[] { }; private static final byte[] TRUE = new byte[] { 0x01 }; private ScriptExecutionUtil() {} public static void execute(final ExecutionStep step) { final Deque<StackObject> stack = step.getStack(); if(ScriptOperationUtil.isDataPushOperation(step.getInstruction().getOperation())) { addStackObject(stack, step.getInstruction().getBytes()); } switch(step.getInstruction().getOperation()) { case OP_DUP: addStackObject(stack, stack.peek()); break; case OP_2DUP: final Iterator<StackObject> dup2Iterator = stack.iterator(); addStackObject(stack, dup2Iterator.next()); addStackObject(stack, dup2Iterator.next()); break; case OP_3DUP: final Iterator<StackObject> dup3Iterator = stack.iterator(); addStackObject(stack, dup3Iterator.next()); addStackObject(stack, dup3Iterator.next()); addStackObject(stack, dup3Iterator.next()); break; case OP_DROP: stack.remove(); break; case OP_2DROP: stack.remove(); stack.remove(); break; case OP_CHECKSIG: stack.remove(); stack.remove(); // TODO Do actual checksig addTrue(stack); break; case OP_EQUAL: final StackObject objEqualA = stack.poll(); final StackObject objEqualB = stack.poll(); if(Arrays.areEqual(objEqualA.getBytes(), objEqualB.getBytes())) { addTrue(stack); } else { addFalse(stack); } break; case OP_EQUALVERIFY: final StackObject objEqualVerifyA = stack.poll(); final StackObject objEqualVerifyB = stack.poll(); if(!Arrays.areEqual(objEqualVerifyA.getBytes(), objEqualVerifyB.getBytes())) { addException(step); } break; case OP_HASH160: final StackObject poll = stack.poll(); final byte[] hash160 = ComputeUtil.computeHash160(poll.getBytes()); addStackObject(stack, hash160); break; case OP_TRUE: addTrue(stack); break; case OP_FALSE: addFalse(stack); break; default: } } private static void addException(final ExecutionStep step) { step.setExecutionError(true); } private static void addTrue(final Deque<StackObject> stack) { addStackObject(stack, TRUE); } private static void addFalse(final Deque<StackObject> stack) { addStackObject(stack, FALSE); } private static void addStackObject(final Deque<StackObject> stack, final byte[] bytes) { addStackObject(stack, new StackObject(bytes)); } private static void addStackObject(final Deque<StackObject> stack, final StackObject object) { stack.addFirst(object); } }
bitcoin-transactions-core/src/main/java/com/yoghurt/crypto/transactions/client/util/script/ScriptExecutionUtil.java
package com.yoghurt.crypto.transactions.client.util.script; import java.util.Deque; import java.util.Iterator; import com.googlecode.gwt.crypto.bouncycastle.util.Arrays; import com.yoghurt.crypto.transactions.client.util.transaction.ComputeUtil; import com.yoghurt.crypto.transactions.client.util.transaction.ScriptOperationUtil; import com.yoghurt.crypto.transactions.shared.domain.StackObject; public final class ScriptExecutionUtil { private static final byte[] FALSE = new byte[] { }; private static final byte[] TRUE = new byte[] { 0x01 }; private ScriptExecutionUtil() {} public static void execute(final ExecutionStep step) { final Deque<StackObject> stack = step.getStack(); if(ScriptOperationUtil.isDataPushOperation(step.getInstruction().getOperation())) { addStackObject(stack, step.getInstruction().getBytes()); } switch(step.getInstruction().getOperation()) { case OP_DUP: addStackObject(stack, stack.peek()); break; case OP_2DUP: final Iterator<StackObject> dup2Iterator = stack.iterator(); addStackObject(stack, dup2Iterator.next()); addStackObject(stack, dup2Iterator.next()); break; case OP_3DUP: final Iterator<StackObject> dup3Iterator = stack.iterator(); addStackObject(stack, dup3Iterator.next()); addStackObject(stack, dup3Iterator.next()); addStackObject(stack, dup3Iterator.next()); break; case OP_DROP: stack.remove(); break; case OP_2DROP: stack.remove(); stack.remove(); break; case OP_CHECKSIG: stack.remove(); stack.remove(); // TODO Do actual checksig addTrue(stack); break; case OP_EQUAL: final StackObject objEqualA = stack.poll(); final StackObject objEqualB = stack.poll(); if(!Arrays.areEqual(objEqualA.getBytes(), objEqualB.getBytes())) { addTrue(stack); } else { addFalse(stack); } break; case OP_EQUALVERIFY: final StackObject objEqualVerifyA = stack.poll(); final StackObject objEqualVerifyB = stack.poll(); if(!Arrays.areEqual(objEqualVerifyA.getBytes(), objEqualVerifyB.getBytes())) { addException(step); } break; case OP_HASH160: final StackObject poll = stack.poll(); final byte[] hash160 = ComputeUtil.computeHash160(poll.getBytes()); addStackObject(stack, hash160); break; case OP_TRUE: addTrue(stack); break; case OP_FALSE: addFalse(stack); break; default: } } private static void addException(final ExecutionStep step) { step.setExecutionError(true); } private static void addTrue(final Deque<StackObject> stack) { addStackObject(stack, TRUE); } private static void addFalse(final Deque<StackObject> stack) { addStackObject(stack, FALSE); } private static void addStackObject(final Deque<StackObject> stack, final byte[] bytes) { addStackObject(stack, new StackObject(bytes)); } private static void addStackObject(final Deque<StackObject> stack, final StackObject object) { stack.addFirst(object); } }
Fixed bug in OP_EQUAL
bitcoin-transactions-core/src/main/java/com/yoghurt/crypto/transactions/client/util/script/ScriptExecutionUtil.java
Fixed bug in OP_EQUAL
Java
epl-1.0
7742d43b984d779f456dbc00e15ee2f747dc725c
0
sguan-actuate/birt,rrimmana/birt-1,Charling-Huang/birt,rrimmana/birt-1,sguan-actuate/birt,Charling-Huang/birt,rrimmana/birt-1,rrimmana/birt-1,Charling-Huang/birt,sguan-actuate/birt,rrimmana/birt-1,Charling-Huang/birt,sguan-actuate/birt,sguan-actuate/birt,Charling-Huang/birt
/******************************************************************************* * Copyright (c) 2004, 2009 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.birt.report.engine.executor; import java.io.IOException; import java.io.Serializable; import java.net.URL; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.Stack; import java.util.logging.Level; import java.util.logging.Logger; import org.eclipse.birt.core.exception.BirtException; import org.eclipse.birt.core.format.DateFormatter; import org.eclipse.birt.core.format.NumberFormatter; import org.eclipse.birt.core.format.StringFormatter; import org.eclipse.birt.core.script.BirtHashMap; import org.eclipse.birt.core.script.ICompiledScript; import org.eclipse.birt.core.script.IScriptContext; import org.eclipse.birt.core.script.ParameterAttribute; import org.eclipse.birt.core.script.ScriptContext; import org.eclipse.birt.core.script.ScriptExpression; import org.eclipse.birt.data.engine.api.IConditionalExpression; import org.eclipse.birt.data.engine.api.IDataQueryDefinition; import org.eclipse.birt.data.engine.api.IScriptExpression; import org.eclipse.birt.data.engine.script.ScriptEvalUtil; import org.eclipse.birt.report.data.adapter.api.AdapterException; import org.eclipse.birt.report.data.adapter.api.DataAdapterUtil; import org.eclipse.birt.report.data.adapter.api.DataRequestSession; import org.eclipse.birt.report.data.adapter.api.ILinkedResult; import org.eclipse.birt.report.engine.adapter.ProgressMonitorProxy; import org.eclipse.birt.report.engine.api.EngineConfig; import org.eclipse.birt.report.engine.api.EngineException; import org.eclipse.birt.report.engine.api.IEngineTask; import org.eclipse.birt.report.engine.api.IHTMLActionHandler; import org.eclipse.birt.report.engine.api.IHTMLImageHandler; import org.eclipse.birt.report.engine.api.IProgressMonitor; import org.eclipse.birt.report.engine.api.IRenderOption; import org.eclipse.birt.report.engine.api.IReportDocument; import org.eclipse.birt.report.engine.api.IReportRunnable; import org.eclipse.birt.report.engine.api.IStatusHandler; import org.eclipse.birt.report.engine.api.impl.EngineTask; import org.eclipse.birt.report.engine.api.impl.ReportDocumentWriter; import org.eclipse.birt.report.engine.api.impl.ReportEngine; import org.eclipse.birt.report.engine.api.impl.ReportRunnable; import org.eclipse.birt.report.engine.api.script.IReportContext; import org.eclipse.birt.report.engine.content.IContent; import org.eclipse.birt.report.engine.content.IReportContent; import org.eclipse.birt.report.engine.content.impl.ReportContent; import org.eclipse.birt.report.engine.data.IDataEngine; import org.eclipse.birt.report.engine.data.dte.DocumentDataSource; import org.eclipse.birt.report.engine.executor.optimize.ExecutionOptimize; import org.eclipse.birt.report.engine.executor.optimize.ExecutionPolicy; import org.eclipse.birt.report.engine.extension.IBaseResultSet; import org.eclipse.birt.report.engine.extension.ICubeResultSet; import org.eclipse.birt.report.engine.extension.IQueryResultSet; import org.eclipse.birt.report.engine.i18n.MessageConstants; import org.eclipse.birt.report.engine.ir.Expression; import org.eclipse.birt.report.engine.ir.Report; import org.eclipse.birt.report.engine.ir.ReportElementDesign; import org.eclipse.birt.report.engine.ir.ReportItemDesign; import org.eclipse.birt.report.engine.parser.ReportParser; import org.eclipse.birt.report.engine.toc.TOCBuilder; import org.eclipse.birt.report.engine.util.ResourceLocatorWrapper; import org.eclipse.birt.report.model.api.DesignElementHandle; import org.eclipse.birt.report.model.api.IResourceLocator; import org.eclipse.birt.report.model.api.ModuleHandle; import org.eclipse.birt.report.model.api.ReportDesignHandle; import org.eclipse.birt.report.model.api.simpleapi.IDesignElement; import org.eclipse.birt.report.model.api.simpleapi.SimpleElementFactory; import com.ibm.icu.util.TimeZone; import com.ibm.icu.util.ULocale; /** * Captures the report execution context. This class is needed for accessing * global information during execution as well as for for scripting. It * implements the <code>report</code> Javascript object, as well as other * objects such as <code>report.params</code>,<code>report.config</code>, * <code>report.design</code>, etc. * */ public class ExecutionContext { /** * how many errors or exceptions will be registered. */ protected static final int ERROR_TOTAL_COUNT = 60; // engines used to create the context /** the engine used to create this context */ private ReportEngine engine; /** * task which uses this context. */ private EngineTask task; /** * logger used to log out the excepitons */ private Logger log; /** * execution mode, in this mode, the render operation should be executed. */ private boolean presentationMode = false; /** * execution mode, in this mode, the genreation opration should be executed. */ private boolean factoryMode = true; // utilitis used in this context. /** * The scripting context, used to evaluate the script. */ private ScriptContext scriptContext; /** * data engine, used to evaluate the data related expressions. */ private IDataEngine dataEngine; /** * utility used to create the report content */ private IReportExecutor executor; /** * utility used to create the TOC */ private TOCBuilder tocBuilder; // then is the input content /** * report runnable used to create the report content */ protected ReportRunnable runnable; protected ReportRunnable originalRunnable; /** * Global configuration variables */ private Map configs = new BirtHashMap( ); /** * Report parameters used to create the report content */ private Map params = new BirtHashMap( ); private Map persistentBeans = new HashMap( ); private Map transientBeans = new HashMap( ); private Map<String, PageVariable> pageVariables = new HashMap<String, PageVariable>( ); private ReportDocumentWriter docWriter; protected Report reportIR; /** * app context */ private Map appContext = new HashMap( ); /** * report context used to evaluate the java-based script. */ private IReportContext reportContext; /** * options used to render the report content */ private IRenderOption renderOption; /** * the locale from user level. */ private ULocale ulocale; /** * the locale defined in report level */ private ULocale rlocale; private static final String USER_LOCALE = "user_locale"; /** * define a time zone */ private TimeZone timeZone; // at last the output objects /** * report document, may be the output or input. */ private IReportDocument reportDoc; /** * the created report content */ private IReportContent reportContent; /** * the current executed design. */ private ReportItemDesign design; /** * The current content element to be executed or loaded */ private IContent content; /** * the current opened result set */ private IBaseResultSet[] rsets; /** * A stack of handle objects, with the current one on the top */ private Stack reportHandles = new Stack( ); /** * total page */ private long totalPage; /** * current page number */ private long pageNumber; private long filteredTotalPage; private long filteredPageNumber; /** * Flag to indicate whether task is canceled. */ private boolean isCancelled = false; /** * flag to indicate if the task should be canceled on error */ private boolean cancelOnError = false; /** * utilities used in the report execution. */ private HashMap<String, StringFormatter> stringFormatters = new HashMap<String, StringFormatter>( ); private HashMap<String, NumberFormatter> numberFormatters = new HashMap<String, NumberFormatter>( ); private HashMap<String, DateFormatter> dateFormatters = new HashMap<String, DateFormatter>( ); private ClassLoader applicationClassLoader; private boolean closeClassLoader; private int MAX_ERRORS = 100; /** * */ private DocumentDataSource dataSource; /** * All page break listeners. */ private List pageBreakListeners; /** * an instance of ExtendedItemManager */ private ExtendedItemManager extendedItemManager = new ExtendedItemManager( ); /** * an instance of engine extension manager */ private EngineExtensionManager engineExtensionManager = new EngineExtensionManager( this ); /** * max rows per query. An initial value -1 means it is not set */ private int maxRowsPerQuery = -1; private EventHandlerManager eventHandlerManager; private IProgressMonitor progressMonitor; private boolean needOutputResultSet; private boolean isFixedLayout = false; private IDesignElement element = null; private boolean refreshData = false; protected BookmarkManager bookmarkManager; /** * create a new context. Call close to finish using the execution context */ public ExecutionContext( ) { this( null ); } /** * create a new context. Call close to finish using the execution context */ public ExecutionContext( EngineTask engineTask ) { if ( engineTask != null ) { task = engineTask; engine = (ReportEngine) task.getEngine( ); log = task.getLogger( ); } else { log = Logger.getLogger( ExecutionContext.class.getName( ) ); } ulocale = ULocale.getDefault( ); timeZone = TimeZone.getDefault( ); eventHandlerManager = new EventHandlerManager( ); } private void initializeScriptContext( ) { // FIXME: the root scope defined in the report engine is not used. scriptContext = new ScriptContext( ); if ( engine != null ) { EngineConfig config = engine.getConfig( ); IStatusHandler statusHandler = config.getStatusHandler( ); if ( statusHandler != null ) { scriptContext.setAttribute( "statusHandle", statusHandler ); } } scriptContext.setLocale( ulocale.toLocale( ) ); // create script context used to execute the script statements // register the global variables in the script context scriptContext.setAttribute( "report", new ReportObject( ) ); scriptContext.setAttribute( "params", params ); //$NON-NLS-1$ scriptContext.setAttribute( "config", configs ); //$NON-NLS-1$ scriptContext.setAttribute( "currentPage", Long.valueOf( pageNumber ) ); scriptContext.setAttribute( "totalPage", Long.valueOf( totalPage ) ); scriptContext.setAttribute( "_jsContext", this ); scriptContext.setAttribute( "vars", pageVariables ); if ( runnable != null ) { registerDesign( runnable ); } if ( reportContext != null ) { scriptContext.setAttribute( "reportContext", reportContext ); } scriptContext.setAttribute( "pageNumber", Long.valueOf( pageNumber ) ); scriptContext.setAttribute( "totalPage", Long.valueOf( totalPage ) ); if ( task != null ) { IStatusHandler handler = task.getStatusHandler( ); if ( handler != null ) { handler.initialize( ); } if ( handler == null ) { handler = engine.getConfig( ).getStatusHandler( ); } if ( handler != null ) { scriptContext.setAttribute( "_statusHandle", handler ); } } if ( transientBeans != null ) { Iterator entries = transientBeans.entrySet( ).iterator( ); while ( entries.hasNext( ) ) { Map.Entry entry = (Map.Entry) entries.next( ); scriptContext.setAttribute( (String) entry.getKey( ), entry .getValue( ) ); } } if ( persistentBeans != null ) { Iterator entries = persistentBeans.entrySet( ).iterator( ); while ( entries.hasNext( ) ) { Map.Entry entry = (Map.Entry) entries.next( ); registerInRoot( (String) entry.getKey( ), entry.getValue( ) ); } } scriptContext.setApplicationClassLoader( getApplicationClassLoader( ) ); } /** * get the report engine. In that engine, we create the context. * * @return the report engine used to create the context. */ public ReportEngine getEngine( ) { return engine; } public BookmarkManager getBookmarkManager( ) { if ( bookmarkManager == null ) { bookmarkManager = new BookmarkManager( this, 1000 ); } return bookmarkManager; } /** * Clean up the execution context before finishing using it */ public void close( ) { if ( extendedItemManager != null ) { extendedItemManager.close( ); extendedItemManager = null; } if ( engineExtensionManager != null ) { engineExtensionManager.close( ); engineExtensionManager = null; } if ( scriptContext != null ) { scriptContext.close( ); scriptContext = null; } if ( bookmarkManager != null ) { bookmarkManager.close( ); bookmarkManager = null; } if ( dataSource != null ) { try { dataSource.close( ); } catch ( IOException e ) { log.log( Level.SEVERE, "Failed to close the data source", e ); } dataSource = null; } if ( dataEngine != null ) { dataEngine.shutdown( ); dataEngine = null; } if ( closeClassLoader && applicationClassLoader instanceof ApplicationClassLoader ) { ( (ApplicationClassLoader) applicationClassLoader ).close( ); } IStatusHandler handler = task.getStatusHandler( ); if ( handler != null ) { handler.finish( ); } // RELEASE ALL THE MEMBERS EXPLICTLY AS THIS OBJECT MAY BE REFERENCED BY // THE SCRIPT OBJECT WHICH IS HOLDED IN THE FININALIZER QUEUE applicationClassLoader = null; engine = null; // task = null; executor = null; tocBuilder = null; // runnable = null; // originalRunnable = null; configs = null; params = null; persistentBeans = null; transientBeans = null; pageVariables = null; docWriter = null; reportIR = null; appContext = null; reportContext = null; renderOption = null; reportDoc = null; reportContent = null; design = null; content = null; rsets = null; reportHandles = null; errors.clear(); stringFormatters = null; numberFormatters = null; dateFormatters = null; pageBreakListeners = null; eventHandlerManager = null; progressMonitor = null; element = null; } /** * create a new scope, use the object to create the curren scope. * * @param object * the "this" object in the new scope */ public void newScope( Object object ) { scriptContext = getScriptContext( ).newContext( object ); } /** * exits a variable scope. */ public void exitScope( ) { if ( scriptContext == null ) { throw new IllegalStateException( ); } ScriptContext parent = scriptContext.getParent( ); if ( parent == null ) { throw new IllegalStateException( ); } scriptContext = parent; } /** * register beans in the execution context * * @param map * name value pair. */ public void registerBeans( Map map ) { if ( map != null ) { Iterator iter = map.entrySet( ).iterator( ); while ( iter.hasNext( ) ) { Map.Entry entry = (Map.Entry) iter.next( ); Object keyObj = entry.getKey( ); Object value = entry.getValue( ); if ( keyObj != null ) { String key = keyObj.toString( ); registerBean( key, value ); } } } } /** * declares a variable in the current scope. The variable is then accessible * through JavaScript. * * @param name * variable name * @param value * variable value */ public void registerBean( String name, Object value ) { transientBeans.put( name, value ); if ( scriptContext != null ) { scriptContext.setAttribute( name, value ); } } public void unregisterBean( String name ) { transientBeans.remove( name ); if ( scriptContext != null ) { scriptContext.setAttribute( name, null ); } } public Map getBeans( ) { return transientBeans; } public void registerGlobalBeans( Map map ) { if ( map != null ) { Iterator iter = map.entrySet( ).iterator( ); while ( iter.hasNext( ) ) { Map.Entry entry = (Map.Entry) iter.next( ); Object keyObj = entry.getKey( ); Object value = entry.getValue( ); if ( keyObj != null && value instanceof Serializable ) { String key = keyObj.toString( ); registerGlobalBean( key, (Serializable) value ); } } } } public void registerGlobalBean( String name, Serializable value ) { persistentBeans.put( name, value ); if ( scriptContext != null ) { registerInRoot( name, value ); } } public void unregisterGlobalBean( String name ) { persistentBeans.remove( name ); if ( scriptContext != null ) { registerInRoot( name, null ); } } public Map getGlobalBeans( ) { return persistentBeans; } private void registerInRoot( String name, Object value ) { getRootContext( ).setAttribute( name, value ); } public Object evaluate( Expression expr ) throws BirtException { if ( expr != null ) { switch ( expr.getType( ) ) { case Expression.CONSTANT : Expression.Constant cs = (Expression.Constant) expr; return cs.getValue( ); case Expression.SCRIPT : Expression.Script script = (Expression.Script) expr; ICompiledScript compiledScript = script .getScriptExpression( ); if ( compiledScript == null ) { compiledScript = compile( script.getLanguage( ), script .getFileName( ), script.getLineNumber( ), script.getScriptText( ) ); script.setCompiledScript( compiledScript ); } return evaluate( compiledScript ); case Expression.CONDITIONAL : IConditionalExpression ce = ( (Expression.Conditional) expr ) .getConditionalExpression( ); return evaluateCondExpr( ce ); } } return null; } /** * The expression may be evaluated at onPrepare stage, at that time the * reportIR is not initialized. */ protected String getScriptLanguage( ) { if ( reportIR != null ) { return reportIR.getScriptLanguage( ); } return Expression.SCRIPT_JAVASCRIPT; } public Object evaluate( String scriptText ) throws BirtException { return evaluate( getScriptLanguage( ), "<inline>", 1, scriptText ); } public Object evaluate( String fileName, String scriptText ) throws BirtException { return evaluate( getScriptLanguage( ), fileName, 1, scriptText ); } public Object evaluateInlineScript( String language, String scriptText ) throws BirtException { return evaluate( language, "<inline>", 1, scriptText ); } public Object evaluate( String language, String fileName, int lineNumber, String scriptText ) throws BirtException { if ( scriptText == null ) { return null; } ICompiledScript compiledScript = compile( language, fileName, lineNumber, scriptText ); return evaluate( compiledScript ); } private ICompiledScript compile( String language, String fileName, int lineNumber, String scriptText ) throws BirtException { ICompiledScript compiledScript = runnable.getScript( language, scriptText ); if ( compiledScript == null ) { compiledScript = getScriptContext( ).compile( language, fileName, lineNumber, scriptText ); runnable.putScript( language, scriptText, compiledScript ); } return compiledScript; } private Object evaluate( ICompiledScript compiledScript ) throws BirtException { return getScriptContext( ).evaluate( compiledScript ); } /** * evaluate conditional expression. A conditional expression can have an * operator, one LHS expression, and up to two expressions on RHS, i.e., * * testExpr operator operand1 operand2 or testExpr between 1 20 * * Now only support comparison between the same data type * * @param expr * the conditional expression to be evaluated * @return a boolean value (as an Object) */ public Object evaluateCondExpr( IConditionalExpression expr ) throws BirtException { IScriptExpression testExpr = expr.getExpression( ); ScriptContext scriptContext = getScriptContext( ); if ( testExpr == null ) return Boolean.FALSE; try { return ScriptEvalUtil.evalExpr( expr, scriptContext, ScriptExpression.defaultID, 0 ); } catch ( Throwable e ) { throw new EngineException( MessageConstants.INVALID_EXPRESSION_ERROR, testExpr .getText( ), e ); } } /** * execute the script. Simply evaluate the script, then drop the return * value * * @param script * script statement * @param fileName * file name * @param lineNo * line no */ public void execute( ICompiledScript script ) { try { scriptContext.evaluate( script ); } catch ( BirtException ex ) { addException( this.design, ex ); } } /** * @return Returns the locale. */ public Locale getLocale( ) { if ( rlocale != null ) return rlocale.toLocale( ); return ulocale.toLocale( ); } /** * @param locale * The locale to set. */ public void setLocale( ULocale ulocale ) { this.ulocale = ulocale; if ( rlocale == null ) this.getScriptContext( ).setLocale( ulocale.toLocale( ) ); } public TimeZone getTimeZone( ) { return this.timeZone; } public void setTimeZone( TimeZone timeZone ) { this.timeZone = timeZone; this.getScriptContext( ).setTimeZone( timeZone ); } public void openDataEngine( ) throws EngineException { if ( dataEngine == null ) { try { dataEngine = engine.getDataEngineFactory( ).createDataEngine( this, needOutputResultSet ); } catch ( Exception e ) { throw new EngineException( MessageConstants.CANNOT_CREATE_DATA_ENGINE, e ); } } } /** * @return Returns the dataEngine. */ public IDataEngine getDataEngine( ) throws EngineException { if ( dataEngine == null ) { openDataEngine( ); } return dataEngine; } public void closeDataEngine( ) { if ( dataEngine != null ) { dataEngine.shutdown( ); dataEngine = null; } } /** * @param name * @param value */ public void setParameterValue( String name, Object value ) { Object parameter = params.get( name ); if ( parameter instanceof ParameterAttribute ) { ( (ParameterAttribute) parameter ).setValue( value ); } else { params.put( name, new ParameterAttribute( value, null ) ); } } /** * @param name * @param value */ public void setParameter( String name, Object value, String displayText ) { params.put( name, new ParameterAttribute( value, displayText ) ); } public void clearParameters( ) { params.clear( ); } public Object getParameterValue( String name ) { Object parameter = params.get( name ); if ( parameter != null ) { return ( (ParameterAttribute) parameter ).getValue( ); } return null; } public Map getParameterValues( ) { HashMap result = new HashMap( ); Set entries = params.entrySet( ); Iterator iterator = entries.iterator( ); while ( iterator.hasNext( ) ) { Map.Entry entry = (Map.Entry) iterator.next( ); ParameterAttribute parameter = (ParameterAttribute) entry .getValue( ); result.put( entry.getKey( ), parameter.getValue( ) ); } return result; } public Map getParameterDisplayTexts( ) { Map result = new HashMap( ); Set entries = params.entrySet( ); Iterator iterator = entries.iterator( ); while ( iterator.hasNext( ) ) { Map.Entry entry = (Map.Entry) iterator.next( ); String name = (String) entry.getKey( ); ParameterAttribute value = (ParameterAttribute) entry.getValue( ); result.put( name, value.getDisplayText( ) ); } return result; } public String getParameterDisplayText( String name ) { Object parameter = params.get( name ); if ( parameter != null ) { return ( (ParameterAttribute) parameter ).getDisplayText( ); } return null; } public void setParameterDisplayText( String name, String displayText ) { Object parameter = params.get( name ); if ( parameter != null ) { ( (ParameterAttribute) parameter ).setDisplayText( displayText ); } } /* * (non-Javadoc) * * @see org.eclipse.birt.report.engine.executor.IFactoryContext#getConfigs() */ public Map getConfigs( ) { return configs; } /* * (non-Javadoc) * * @see * org.eclipse.birt.report.engine.executor.IFactoryContext#getReportDesign() */ public ModuleHandle getDesign( ) { return runnable != null ? (ModuleHandle) runnable.getDesignHandle( ) : null; } public ReportDesignHandle getReportDesign( ) { ModuleHandle design = getDesign( ); if ( design instanceof ReportDesignHandle ) { return (ReportDesignHandle) design; } return null; } /** * @return Returns the report. */ public IReportContent getReportContent( ) { return reportContent; } public void setReportContent( ReportContent content ) { this.reportContent = content; if ( contentErrors.size( ) > 0 ) { reportContent.getErrors( ).addAll( contentErrors ); contentErrors.clear( ); } content.setReportContext( reportContext ); } /** * Loads scripts that are stored in an external file. Used to support * include-script. Each script file should be load only once. and the script * in the file must be encoded in UTF-8. * * @param fileName * script file name */ public void loadScript( String language, String fileName ) { ModuleHandle reportDesign = this.getDesign( ); try { // read the script in the URL, and execution. byte[] script = getResourceLocator( ).findResource( reportDesign, fileName, IResourceLocator.LIBRARY, appContext ); ICompiledScript compiledScript = getScriptContext( ).compile( language, fileName, 1, new String( script, "UTF-8" ) ); execute( compiledScript ); //$NON-NLS-1$ } catch ( IOException ex ) { log.log( Level.SEVERE, "loading external script file " + fileName + " failed.", //$NON-NLS-1$ //$NON-NLS-2$ ex ); addException( new EngineException( MessageConstants.SCRIPT_FILE_LOAD_ERROR, fileName, ex ) ); //$NON-NLS-1$ // TODO This is a fatal error. Should throw an exception. } catch ( BirtException e ) { log.log( Level.SEVERE, "Failed to execute script " + fileName + ".", //$NON-NLS-1$ //$NON-NLS-2$ e ); addException( new EngineException( MessageConstants.SCRIPT_EVALUATION_ERROR, fileName, e ) ); //$NON-NLS-1$ } } public ScriptContext getScriptContext( ) { if ( scriptContext == null ) { initializeScriptContext( ); } return this.scriptContext; } /** * @return */ public IContent getContent( ) { return content; } public void setContent( IContent content ) { this.content = content; } public ReportItemDesign getItemDesign( ) { return design; } public void setItemDesign( ReportItemDesign design ) { this.design = design; } /** * @param obj */ public void pushHandle( DesignElementHandle obj ) { reportHandles.push( obj ); } /** * @return */ public DesignElementHandle popHandle( ) { return (DesignElementHandle) reportHandles.pop( ); } /** * @return */ public DesignElementHandle getHandle( ) { if ( reportHandles.empty( ) ) { return null; } return (DesignElementHandle) reportHandles.peek( ); } /** * Adds the exception * * @param ex * the Throwable instance */ public void addException( BirtException ex ) { DesignElementHandle handle = getDesign( ); if ( design != null ) { handle = design.getHandle( ); } addException( handle, ex ); } /** * A list of errors in time order, it is also shared by the report content */ private List<EngineException> errors = new ArrayList<EngineException>( ); private List<ElementExceptionInfo> contentErrors = new ArrayList<ElementExceptionInfo>( ); /** * The exception list grouped by the element */ protected HashMap<DesignElementHandle, ElementExceptionInfo> elementExceptions = new HashMap<DesignElementHandle, ElementExceptionInfo>( ); public void addException( ReportElementDesign design, BirtException ex ) { DesignElementHandle handle = null; if ( null != design ) { handle = design.getHandle( ); } addException( handle, ex ); } public void addException( DesignElementHandle element, BirtException ex ) { if ( errors.size( ) >= ERROR_TOTAL_COUNT ) { if ( cancelOnError && task != null ) { task.cancel( ); } return; } EngineException engineEx = null; if ( ex instanceof EngineException ) { engineEx = (EngineException) ex; } else { engineEx = new EngineException( ex ); } if ( element != null ) { engineEx.setElementID( element.getID( ) ); } errors.add( engineEx ); ElementExceptionInfo exInfo = (ElementExceptionInfo) elementExceptions.get( element ); if ( exInfo == null ) { exInfo = new ElementExceptionInfo( element ); elementExceptions.put( element, exInfo ); if ( reportContent != null ) { reportContent.getErrors( ).add( exInfo ); } else { contentErrors.add( exInfo ); } } exInfo.addException( engineEx ); if ( cancelOnError && task != null ) { task.cancel( ); } } /** * report object is the script object used in the script context. * * All infos can get from this object. * * */ public class ReportObject { /** * get the report design handle * * @return report design object. */ public Object getDesign( ) { return element; } /** * get the report document. * * @return report document. */ public Object getDocument( ) { return reportDoc; } /** * @return a map of name/value pairs for all the parameters and their * values */ public Map getParams( ) { return params; } /** * @return a set of data sets */ public Object getDataSets( ) { return null; } /** * @return a set of data sources */ public Object getDataSources( ) { return null; } /** * @return a map of name/value pairs for all the configuration variables */ public Map getConfig( ) { return configs; } public Object getReportContext( ) { return reportContext; } } /** * @return Returns the runnable. */ public ReportRunnable getRunnable( ) { return runnable; } /** * @param runnable * The runnable to set. */ public void setRunnable( IReportRunnable runnable ) { this.runnable = (ReportRunnable) runnable; if ( scriptContext != null ) { registerDesign( runnable ); } } public void updateRunnable( IReportRunnable newRunnable ) { if ( originalRunnable == null ) { this.originalRunnable = this.runnable; } this.runnable = (ReportRunnable) newRunnable; if ( scriptContext != null ) { registerDesign( runnable ); } reportIR = null; } public ReportRunnable getOriginalRunnable( ) { if ( originalRunnable != null ) { return originalRunnable; } return runnable; } private void registerDesign( IReportRunnable runnable ) { DesignElementHandle design = (ModuleHandle) runnable.getDesignHandle( ); element = SimpleElementFactory.getInstance( ).getElement( design ); } /** * @return Returns the renderOption. */ public IRenderOption getRenderOption( ) { return renderOption; } /** * @param renderOption * The renderOption to set. */ public void setRenderOption( IRenderOption renderOption ) { this.renderOption = renderOption; } public String getOutputFormat( ) { String outputFormat = null; if ( renderOption != null ) { outputFormat = renderOption.getOutputFormat( ); } if ( outputFormat == null ) { if ( isFixedLayout( ) ) { outputFormat = IRenderOption.OUTPUT_FORMAT_PDF; } else { outputFormat = IRenderOption.OUTPUT_FORMAT_HTML; } } return outputFormat; } public static class ElementExceptionInfo { DesignElementHandle element; ArrayList exList = new ArrayList( ); ArrayList countList = new ArrayList( ); public ElementExceptionInfo( DesignElementHandle element ) { this.element = element; } public void addException( BirtException e ) { for ( int i = 0; i < exList.size( ); i++ ) { BirtException err = (BirtException) exList.get( i ); if ( e.getErrorCode( ) != null && e.getErrorCode( ).equals( err.getErrorCode( ) ) && e.getLocalizedMessage( ) != null && e.getLocalizedMessage( ).equals( err.getLocalizedMessage( ) ) ) { countList.set( i, Integer.valueOf( ( (Integer) countList.get( i ) ).intValue( ) + 1 ) ); return; } } exList.add( e ); countList.add( Integer.valueOf( 1 ) ); } public String getType( ) { if ( element == null ) { return "report"; } return element.getDefn( ).getName( ); } public String getName( ) { if ( element == null ) { return "report"; } return element.getName( ); } public String getID( ) { if ( element == null ) return null; else return String.valueOf( element.getID( ) ); } public ArrayList getErrorList( ) { return exList; } public ArrayList getCountList( ) { return countList; } } public Map getAppContext( ) { return appContext; } public void setAppContext( Map appContext ) { this.appContext.clear( ); if ( appContext != null ) { this.appContext.putAll( appContext ); } } public IReportContext getReportContext( ) { return reportContext; } public void setReportContext( IReportContext reportContext ) { this.reportContext = reportContext; if ( scriptContext != null ) { getRootContext( ).setAttribute( "reportContext", reportContext ); } } public void setPageNumber( long pageNo ) { pageNumber = pageNo; if ( scriptContext != null ) { getRootContext( ).setAttribute( "pageNumber", Long.valueOf( pageNumber ) ); } if ( totalPage < pageNumber ) { setTotalPage( pageNumber ); } } /** * set the total page. * * @param totalPage * total page */ public void setTotalPage( long totalPage ) { if ( totalPage > this.totalPage ) { this.totalPage = totalPage; if ( scriptContext != null ) { getRootContext( ).setAttribute( "totalPage", Long.valueOf( totalPage ) ); } if ( reportContent instanceof ReportContent ) { ( (ReportContent) reportContent ).setTotalPage( totalPage ); } } } /** * get the current page number * * @return current page number */ public long getPageNumber( ) { return pageNumber; } /** * get the total page have been created. * * @return total page */ public long getTotalPage( ) { return totalPage; } public void setFilteredPageNumber( long pageNo ) { filteredPageNumber = pageNo; } public void setFilteredTotalPage( long totalPage ) { filteredTotalPage = totalPage; } public long getFilteredPageNumber( ) { if ( filteredPageNumber <= 0 ) { return pageNumber; } return filteredPageNumber; } public long getFilteredTotalPage( ) { if ( filteredTotalPage <= 0 ) { return totalPage; } return filteredTotalPage; } /** * is in factory mode * * @return true, factory mode, false not in factory mode */ public boolean isInFactory( ) { return factoryMode; } /** * is in presentation mode. * * @return true, presentation mode, false otherwise */ public boolean isInPresentation( ) { return presentationMode; } /** * set the in factory mode * * @param mode * factory mode */ public void setFactoryMode( boolean mode ) { this.factoryMode = mode; } public boolean getFactoryMode( ) { return this.factoryMode; } /** * set in presentation mode * * @param mode * presentation mode */ public void setPresentationMode( boolean mode ) { this.presentationMode = mode; } private ULocale determineLocale( String locale ) { ULocale loc = null; if ( locale == null ) { if ( rlocale == null ) loc = ulocale; else loc = rlocale; } else { if ( USER_LOCALE.equals( locale ) ) loc = ulocale; else loc = new ULocale( locale ); } return loc; } /** * get a string formatter object * * @param value * string format * @return formatter object */ public StringFormatter getStringFormatter( String pattern ) { return getStringFormatter( pattern, null ); } public StringFormatter getStringFormatter( String pattern, String locale ) { String key = pattern + ":" + locale; StringFormatter fmt = stringFormatters.get( key ); if ( fmt == null ) { ULocale loc = determineLocale( locale ); fmt = new StringFormatter( pattern, loc ); stringFormatters.put( key, fmt ); } return fmt; } /** * get a number formatter object * * @param pattern * number format * @return formatter object */ public NumberFormatter getNumberFormatter( String pattern ) { return getNumberFormatter( pattern, null ); } public NumberFormatter getNumberFormatter( String pattern, String locale ) { String key = pattern + ":" + locale; NumberFormatter fmt = numberFormatters.get( key ); if ( fmt == null ) { ULocale loc = determineLocale( locale ); fmt = new NumberFormatter( pattern, loc ); numberFormatters.put( key, fmt ); } return fmt; } /** * get a date formatter object * * @param value * date format * @return formatter object */ public DateFormatter getDateFormatter( String pattern ) { return getDateFormatter( pattern, null ); } public DateFormatter getDateFormatter( String pattern, String locale ) { String key = pattern + ":" + locale; DateFormatter fmt = dateFormatters.get( key ); if ( fmt == null ) { ULocale loc = determineLocale( locale ); fmt = new DateFormatter( pattern, loc, timeZone ); dateFormatters.put( key, fmt ); } return fmt; } /** * set the executor used in the execution context * * @param executor */ public void setExecutor( IReportExecutor executor ) { this.executor = executor; } /** * get the executor used to execute the report * * @return report executor */ public IReportExecutor getExecutor( ) { return executor; } public TOCBuilder getTOCBuilder( ) { return tocBuilder; } public void setTOCBuilder( TOCBuilder builder ) { this.tocBuilder = builder; } /** * set the report document used in the context * * @param doc */ public void setReportDocument( IReportDocument doc ) { this.reportDoc = doc; } /** * get the report document used in the context. * * @return */ public IReportDocument getReportDocument( ) { return reportDoc; } public void setReportDocWriter( ReportDocumentWriter docWriter ) { this.docWriter = docWriter; } public ReportDocumentWriter getReportDocWriter( ) { return docWriter; } /** * @return Returns the action handler. */ public IHTMLActionHandler getActionHandler( ) { return renderOption.getActionHandler( ); } /** * @return Returns the action handler. */ public IHTMLImageHandler getImageHandler( ) { return renderOption.getImageHandler( ); } /** * return application class loader. The application class loader is used to * load the report item event handle and java classes called in the * javascript. * * @return class loader */ public ClassLoader getApplicationClassLoader( ) { if ( applicationClassLoader == null ) { closeClassLoader = true; applicationClassLoader = AccessController.doPrivileged( new PrivilegedAction<ApplicationClassLoader>( ) { public ApplicationClassLoader run( ) { return new ApplicationClassLoader( engine, runnable, appContext ); } } ); if ( scriptContext != null ) { scriptContext .setApplicationClassLoader( applicationClassLoader ); } } return applicationClassLoader; } public void setApplicationClassLoader( ClassLoader classLoader ) { if ( classLoader == null ) { throw new NullPointerException( "null classloader" ); } if ( closeClassLoader && applicationClassLoader instanceof ApplicationClassLoader ) { ( (ApplicationClassLoader) applicationClassLoader ).close( ); } closeClassLoader = false; this.applicationClassLoader = classLoader; if ( scriptContext != null ) { scriptContext.setApplicationClassLoader( applicationClassLoader ); } } /** * Set the cancel flag. */ public void cancel( ) { isCancelled = true; // cancel the dte's session if ( dataEngine != null ) { DataRequestSession session = dataEngine.getDTESession( ); if ( session != null ) { session.cancel( ); } } } public boolean isCanceled( ) { return isCancelled; } public void restart( ) throws EngineException { getDataEngine( ).getDTESession( ).restart( ); this.isCancelled = false; } public void setCancelOnError( boolean cancel ) { cancelOnError = cancel; } public void setDataSource( DocumentDataSource dataSource ) throws IOException { this.dataSource = dataSource; this.dataSource.open( ); } public DocumentDataSource getDataSource( ) { return dataSource; } public IBaseResultSet executeQuery( IBaseResultSet parent, IDataQueryDefinition query, Object queryOwner, boolean useCache ) throws BirtException { IDataEngine dataEngine = getDataEngine( ); return dataEngine.execute( parent, query, queryOwner, useCache ); } public IBaseResultSet getResultSet( ) { if ( rsets != null ) { return rsets[0]; } return null; } public void setResultSet( IBaseResultSet rset ) { if ( rset != null ) { if ( rsets != null && rsets.length == 1 && rsets[0] == rset ) { return; } setResultSets( new IBaseResultSet[]{rset} ); } else { setResultSets( null ); } } public IBaseResultSet[] getResultSets( ) { return rsets; } public void setResultSets( IBaseResultSet[] rsets ) { if ( this.rsets == rsets ) { return; } if ( rsets != null ) { this.rsets = rsets; if ( rsets[0] != null ) { try { DataAdapterUtil.registerDataObject( scriptContext, new ResultIteratorTree( rsets[0] ) ); } catch ( AdapterException e ) { log.log( Level.SEVERE, e.getLocalizedMessage( ), e ); } } } else { this.rsets = null; // FIXME: we should also remove the JSObject from scope // Scriptable scope = scriptContext.getRootScope( ); // DataAdapterUtil.registerJSObject( scope, // new ResultIteratorTree( rsets[0] ) ); } } private class ResultIteratorTree implements ILinkedResult { IBaseResultSet currentRset; int resultType = -1; public ResultIteratorTree( IBaseResultSet rset ) { this.currentRset = rset; if ( rset instanceof IQueryResultSet ) { resultType = ILinkedResult.TYPE_TABLE; } else if ( rset instanceof ICubeResultSet ) { resultType = ILinkedResult.TYPE_CUBE; } } public ILinkedResult getParent( ) { return new ResultIteratorTree( currentRset.getParent( ) ); } public Object getCurrentResult( ) { if ( resultType == ILinkedResult.TYPE_TABLE ) { return ( (IQueryResultSet) currentRset ).getResultIterator( ); } else if ( resultType == ILinkedResult.TYPE_CUBE ) { return ( (ICubeResultSet) currentRset ).getCubeCursor( ); } return null; } public int getCurrentResultType( ) { return resultType; } } public boolean hasErrors( ) { return !elementExceptions.isEmpty( ); } /** * Returns list or errors, the max count of the errors is * <code>MAX_ERRORS</code> * * @return error list which has max error size limited to * <code>MAX_ERRORS</code> */ public List getErrors( ) { List errors = this.getAllErrors( ); if ( errors.size( ) > MAX_ERRORS ) { errors = errors.subList( 0, MAX_ERRORS - 1 ); } return errors; } /** * Returns all errors. * * @return list of all the errors. */ public List getAllErrors( ) { return errors; } /** * @return the mAX_ERRORS */ public int getMaxErrors( ) { return MAX_ERRORS; } /** * @param max_errors * the mAX_ERRORS to set */ public void setMaxErrors( int maxErrors ) { MAX_ERRORS = maxErrors; } /** * to remember the current report item is in master page or not. */ boolean isExecutingMasterPage = false; /** * Since the data set in master page will be executed in each page and while * the data set in report body will only be executed once, we need to * remember the current report item is in master page or not. This will be * used to help store the executed resultSetID and load it to distinguish * them. */ public void setExecutingMasterPage( boolean isExecutingMasterPage ) { this.isExecutingMasterPage = isExecutingMasterPage; } public boolean isExecutingMasterPage( ) { return isExecutingMasterPage; } /** * Add a page break listener. * * @param listener * the page break listener. */ public void addPageBreakListener( IPageBreakListener listener ) { if ( pageBreakListeners == null ) { pageBreakListeners = new ArrayList( ); } pageBreakListeners.add( listener ); } /** * Notify page break listeners that page is broken. */ public void firePageBreakEvent( ) { if ( pageBreakListeners != null ) { for ( int i = 0; i < pageBreakListeners.size( ); i++ ) { ( (IPageBreakListener) pageBreakListeners.get( i ) ) .onPageBreak( ); } } } /** * Remove a page break listener. * * @param listener * the page break listener. */ public void removePageBreakListener( IPageBreakListener listener ) { if ( pageBreakListeners != null ) { pageBreakListeners.remove( listener ); } } public IEngineTask getEngineTask( ) { return task; } public Logger getLogger( ) { return log; } public void setLogger( Logger logger ) { log = logger; } protected ExecutionPolicy executionPolicy; public void optimizeExecution( ) { if ( ( task != null ) && ( task.getTaskType( ) == IEngineTask.TASK_RUN ) && !isFixedLayout ) { String[] engineExts = getEngineExtensions( ); if ( engineExts == null || engineExts.length == 0 ) { executionPolicy = new ExecutionOptimize( ) .optimize( getReport( ) ); } } } public ExecutionPolicy getExecutionPolicy( ) { return executionPolicy; } public Report getReport( ) { if ( reportIR != null ) { return reportIR; } if ( runnable != null ) { reportIR = new ReportParser( ).parse( (ReportDesignHandle) runnable .getDesignHandle( ) ); setupFromReport( ); } return reportIR; } public void setReport( Report reportIR ) { this.reportIR = reportIR; setupFromReport( ); } protected void setupFromReport( ) { if ( reportIR == null ) return; String locale = reportIR.getLocale( ); if ( locale != null ) { rlocale = new ULocale( locale ); this.getScriptContext( ).setLocale( rlocale.toLocale( ) ); } } public URL getResource( String resourceName ) { if ( getDesign( ) != null ) { return getDesign( ).findResource( resourceName, IResourceLocator.OTHERS, appContext ); } return null; } public ResourceLocatorWrapper getResourceLocator( ) { return task.getResourceLocatorWrapper( ); } public ExtendedItemManager getExtendedItemManager( ) { return extendedItemManager; } public EngineExtensionManager getEngineExtensionManager( ) { return engineExtensionManager; } public void setMaxRowsPerQuery( int maxRows ) { if ( maxRows >= 0 ) { maxRowsPerQuery = maxRows; } } public int getMaxRowsPerQuery( ) { return maxRowsPerQuery; } private String[] engineExts; public String[] getEngineExtensions( ) { if ( engineExts != null ) { return engineExts; } engineExts = engine.getEngineExtensions( runnable ); if ( engineExts == null ) { engineExts = new String[]{}; } return engineExts; } private boolean enableProgreesiveViewing = true; public void enableProgressiveViewing( boolean enabled ) { enableProgreesiveViewing = enabled; } public boolean isProgressiveViewingEnable( ) { return enableProgreesiveViewing; } public EventHandlerManager getEventHandlerManager( ) { return eventHandlerManager; } public void setProgressMonitor( IProgressMonitor monitor ) { progressMonitor = new ProgressMonitorProxy( monitor ); } public IProgressMonitor getProgressMonitor( ) { if ( progressMonitor == null ) { progressMonitor = new ProgressMonitorProxy( null ); } return progressMonitor; } public boolean needOutputResultSet( ) { return needOutputResultSet; } public void setNeedOutputResultSet( boolean needOutputResultSet ) { this.needOutputResultSet = needOutputResultSet; } public Object getPageVariable( String name ) { if ( "totalPage".equals( name ) ) { return Long.valueOf( totalPage ); } if ( "pageNumber".equals( name ) ) { return Long.valueOf( totalPage ); } PageVariable var = pageVariables.get( name ); if ( var != null ) { return var.getValue( ); } return null; } public void setPageVariable( String name, Object value ) { PageVariable var = pageVariables.get( name ); if ( var != null ) { var.setValue( value ); } // lazy add special page variables else if ( IReportContext.PAGE_VAR_PAGE_LABEL.equals( name ) ) { addPageVariable( new PageVariable( IReportContext.PAGE_VAR_PAGE_LABEL, PageVariable.SCOPE_PAGE, value ) ); } } public void addPageVariables( Collection<PageVariable> vars ) { for ( PageVariable var : vars ) { pageVariables.put( var.getName( ), var ); } } public Collection<PageVariable> getPageVariables( ) { return pageVariables.values( ); } public void addPageVariable( PageVariable var ) { pageVariables.put( var.getName( ), var ); } public boolean isFixedLayout( ) { return isFixedLayout; } public void setFixedLayout( boolean isFixedLayout ) { this.isFixedLayout = isFixedLayout; } public int getTaskType( ) { return task.getTaskType( ); } private IScriptContext getRootContext( ) { ScriptContext result = scriptContext; while ( result.getParent( ) != null ) { result = result.getParent( ); } return result; } public boolean needRefreshData() { return this.refreshData; } public void setRefreshData(boolean refreshData) { this.refreshData = refreshData; } }
engine/org.eclipse.birt.report.engine/src/org/eclipse/birt/report/engine/executor/ExecutionContext.java
/******************************************************************************* * Copyright (c) 2004, 2009 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.birt.report.engine.executor; import java.io.IOException; import java.io.Serializable; import java.net.URL; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.Stack; import java.util.logging.Level; import java.util.logging.Logger; import org.eclipse.birt.core.exception.BirtException; import org.eclipse.birt.core.format.DateFormatter; import org.eclipse.birt.core.format.NumberFormatter; import org.eclipse.birt.core.format.StringFormatter; import org.eclipse.birt.core.script.BirtHashMap; import org.eclipse.birt.core.script.ICompiledScript; import org.eclipse.birt.core.script.IScriptContext; import org.eclipse.birt.core.script.ParameterAttribute; import org.eclipse.birt.core.script.ScriptContext; import org.eclipse.birt.core.script.ScriptExpression; import org.eclipse.birt.data.engine.api.IConditionalExpression; import org.eclipse.birt.data.engine.api.IDataQueryDefinition; import org.eclipse.birt.data.engine.api.IScriptExpression; import org.eclipse.birt.data.engine.script.ScriptEvalUtil; import org.eclipse.birt.report.data.adapter.api.AdapterException; import org.eclipse.birt.report.data.adapter.api.DataAdapterUtil; import org.eclipse.birt.report.data.adapter.api.DataRequestSession; import org.eclipse.birt.report.data.adapter.api.ILinkedResult; import org.eclipse.birt.report.engine.adapter.ProgressMonitorProxy; import org.eclipse.birt.report.engine.api.EngineConfig; import org.eclipse.birt.report.engine.api.EngineException; import org.eclipse.birt.report.engine.api.IEngineTask; import org.eclipse.birt.report.engine.api.IHTMLActionHandler; import org.eclipse.birt.report.engine.api.IHTMLImageHandler; import org.eclipse.birt.report.engine.api.IProgressMonitor; import org.eclipse.birt.report.engine.api.IRenderOption; import org.eclipse.birt.report.engine.api.IReportDocument; import org.eclipse.birt.report.engine.api.IReportRunnable; import org.eclipse.birt.report.engine.api.IStatusHandler; import org.eclipse.birt.report.engine.api.impl.EngineTask; import org.eclipse.birt.report.engine.api.impl.ReportDocumentWriter; import org.eclipse.birt.report.engine.api.impl.ReportEngine; import org.eclipse.birt.report.engine.api.impl.ReportRunnable; import org.eclipse.birt.report.engine.api.script.IReportContext; import org.eclipse.birt.report.engine.content.IContent; import org.eclipse.birt.report.engine.content.IReportContent; import org.eclipse.birt.report.engine.content.impl.ReportContent; import org.eclipse.birt.report.engine.data.IDataEngine; import org.eclipse.birt.report.engine.data.dte.DocumentDataSource; import org.eclipse.birt.report.engine.executor.optimize.ExecutionOptimize; import org.eclipse.birt.report.engine.executor.optimize.ExecutionPolicy; import org.eclipse.birt.report.engine.extension.IBaseResultSet; import org.eclipse.birt.report.engine.extension.ICubeResultSet; import org.eclipse.birt.report.engine.extension.IQueryResultSet; import org.eclipse.birt.report.engine.i18n.MessageConstants; import org.eclipse.birt.report.engine.ir.Expression; import org.eclipse.birt.report.engine.ir.Report; import org.eclipse.birt.report.engine.ir.ReportElementDesign; import org.eclipse.birt.report.engine.ir.ReportItemDesign; import org.eclipse.birt.report.engine.parser.ReportParser; import org.eclipse.birt.report.engine.toc.TOCBuilder; import org.eclipse.birt.report.engine.util.ResourceLocatorWrapper; import org.eclipse.birt.report.model.api.DesignElementHandle; import org.eclipse.birt.report.model.api.IResourceLocator; import org.eclipse.birt.report.model.api.ModuleHandle; import org.eclipse.birt.report.model.api.ReportDesignHandle; import org.eclipse.birt.report.model.api.simpleapi.IDesignElement; import org.eclipse.birt.report.model.api.simpleapi.SimpleElementFactory; import com.ibm.icu.util.TimeZone; import com.ibm.icu.util.ULocale; /** * Captures the report execution context. This class is needed for accessing * global information during execution as well as for for scripting. It * implements the <code>report</code> Javascript object, as well as other * objects such as <code>report.params</code>,<code>report.config</code>, * <code>report.design</code>, etc. * */ public class ExecutionContext { /** * how many errors or exceptions will be registered. */ protected static final int ERROR_TOTAL_COUNT = 60; // engines used to create the context /** the engine used to create this context */ private ReportEngine engine; /** * task which uses this context. */ private EngineTask task; /** * logger used to log out the excepitons */ private Logger log; /** * execution mode, in this mode, the render operation should be executed. */ private boolean presentationMode = false; /** * execution mode, in this mode, the genreation opration should be executed. */ private boolean factoryMode = true; // utilitis used in this context. /** * The scripting context, used to evaluate the script. */ private ScriptContext scriptContext; /** * data engine, used to evaluate the data related expressions. */ private IDataEngine dataEngine; /** * utility used to create the report content */ private IReportExecutor executor; /** * utility used to create the TOC */ private TOCBuilder tocBuilder; // then is the input content /** * report runnable used to create the report content */ protected ReportRunnable runnable; protected ReportRunnable originalRunnable; /** * Global configuration variables */ private Map configs = new BirtHashMap( ); /** * Report parameters used to create the report content */ private Map params = new BirtHashMap( ); private Map persistentBeans = new HashMap( ); private Map transientBeans = new HashMap( ); private Map<String, PageVariable> pageVariables = new HashMap<String, PageVariable>( ); private ReportDocumentWriter docWriter; protected Report reportIR; /** * app context */ private Map appContext = new HashMap( ); /** * report context used to evaluate the java-based script. */ private IReportContext reportContext; /** * options used to render the report content */ private IRenderOption renderOption; /** * the locale from user level. */ private ULocale ulocale; /** * the locale defined in report level */ private ULocale rlocale; private static final String USER_LOCALE = "user_locale"; /** * define a time zone */ private TimeZone timeZone; // at last the output objects /** * report document, may be the output or input. */ private IReportDocument reportDoc; /** * the created report content */ private IReportContent reportContent; /** * the current executed design. */ private ReportItemDesign design; /** * The current content element to be executed or loaded */ private IContent content; /** * the current opened result set */ private IBaseResultSet[] rsets; /** * A stack of handle objects, with the current one on the top */ private Stack reportHandles = new Stack( ); /** * total page */ private long totalPage; /** * current page number */ private long pageNumber; private long filteredTotalPage; private long filteredPageNumber; /** * Flag to indicate whether task is canceled. */ private boolean isCancelled = false; /** * flag to indicate if the task should be canceled on error */ private boolean cancelOnError = false; /** * utilities used in the report execution. */ private HashMap<String, StringFormatter> stringFormatters = new HashMap<String, StringFormatter>( ); private HashMap<String, NumberFormatter> numberFormatters = new HashMap<String, NumberFormatter>( ); private HashMap<String, DateFormatter> dateFormatters = new HashMap<String, DateFormatter>( ); private ClassLoader applicationClassLoader; private boolean closeClassLoader; private int MAX_ERRORS = 100; /** * */ private DocumentDataSource dataSource; /** * All page break listeners. */ private List pageBreakListeners; /** * an instance of ExtendedItemManager */ private ExtendedItemManager extendedItemManager = new ExtendedItemManager( ); /** * an instance of engine extension manager */ private EngineExtensionManager engineExtensionManager = new EngineExtensionManager( this ); /** * max rows per query. An initial value -1 means it is not set */ private int maxRowsPerQuery = -1; private EventHandlerManager eventHandlerManager; private IProgressMonitor progressMonitor; private boolean needOutputResultSet; private boolean isFixedLayout = false; private IDesignElement element = null; private boolean refreshData = false; protected BookmarkManager bookmarkManager; /** * create a new context. Call close to finish using the execution context */ public ExecutionContext( ) { this( null ); } /** * create a new context. Call close to finish using the execution context */ public ExecutionContext( EngineTask engineTask ) { if ( engineTask != null ) { task = engineTask; engine = (ReportEngine) task.getEngine( ); log = task.getLogger( ); } else { log = Logger.getLogger( ExecutionContext.class.getName( ) ); } ulocale = ULocale.getDefault( ); timeZone = TimeZone.getDefault( ); eventHandlerManager = new EventHandlerManager( ); } private void initializeScriptContext( ) { // FIXME: the root scope defined in the report engine is not used. scriptContext = new ScriptContext( ); if ( engine != null ) { EngineConfig config = engine.getConfig( ); IStatusHandler statusHandler = config.getStatusHandler( ); if ( statusHandler != null ) { scriptContext.setAttribute( "statusHandle", statusHandler ); } } scriptContext.setLocale( ulocale.toLocale( ) ); // create script context used to execute the script statements // register the global variables in the script context scriptContext.setAttribute( "report", new ReportObject( ) ); scriptContext.setAttribute( "params", params ); //$NON-NLS-1$ scriptContext.setAttribute( "config", configs ); //$NON-NLS-1$ scriptContext.setAttribute( "currentPage", Long.valueOf( pageNumber ) ); scriptContext.setAttribute( "totalPage", Long.valueOf( totalPage ) ); scriptContext.setAttribute( "_jsContext", this ); scriptContext.setAttribute( "vars", pageVariables ); if ( runnable != null ) { registerDesign( runnable ); } if ( reportContext != null ) { scriptContext.setAttribute( "reportContext", reportContext ); } scriptContext.setAttribute( "pageNumber", Long.valueOf( pageNumber ) ); scriptContext.setAttribute( "totalPage", Long.valueOf( totalPage ) ); if ( task != null ) { IStatusHandler handler = task.getStatusHandler( ); if ( handler != null ) { handler.initialize( ); } if ( handler == null ) { handler = engine.getConfig( ).getStatusHandler( ); } if ( handler != null ) { scriptContext.setAttribute( "_statusHandle", handler ); } } if ( transientBeans != null ) { Iterator entries = transientBeans.entrySet( ).iterator( ); while ( entries.hasNext( ) ) { Map.Entry entry = (Map.Entry) entries.next( ); scriptContext.setAttribute( (String) entry.getKey( ), entry .getValue( ) ); } } if ( persistentBeans != null ) { Iterator entries = persistentBeans.entrySet( ).iterator( ); while ( entries.hasNext( ) ) { Map.Entry entry = (Map.Entry) entries.next( ); registerInRoot( (String) entry.getKey( ), entry.getValue( ) ); } } scriptContext.setApplicationClassLoader( getApplicationClassLoader( ) ); } /** * get the report engine. In that engine, we create the context. * * @return the report engine used to create the context. */ public ReportEngine getEngine( ) { return engine; } public BookmarkManager getBookmarkManager( ) { if ( bookmarkManager == null ) { bookmarkManager = new BookmarkManager( this, 1000 ); } return bookmarkManager; } /** * Clean up the execution context before finishing using it */ public void close( ) { if ( extendedItemManager != null ) { extendedItemManager.close( ); extendedItemManager = null; } if ( engineExtensionManager != null ) { engineExtensionManager.close( ); engineExtensionManager = null; } if ( scriptContext != null ) { scriptContext.close( ); scriptContext = null; } if ( bookmarkManager != null ) { bookmarkManager.close( ); bookmarkManager = null; } if ( dataSource != null ) { try { dataSource.close( ); } catch ( IOException e ) { log.log( Level.SEVERE, "Failed to close the data source", e ); } dataSource = null; } if ( dataEngine != null ) { dataEngine.shutdown( ); dataEngine = null; } if ( closeClassLoader && applicationClassLoader instanceof ApplicationClassLoader ) { ( (ApplicationClassLoader) applicationClassLoader ).close( ); } IStatusHandler handler = task.getStatusHandler( ); if ( handler != null ) { handler.finish( ); } // RELEASE ALL THE MEMBERS EXPLICTLY AS THIS OBJECT MAY BE REFERENCED BY // THE SCRIPT OBJECT WHICH IS HOLDED IN THE FININALIZER QUEUE applicationClassLoader = null; engine = null; // task = null; executor = null; tocBuilder = null; // runnable = null; // originalRunnable = null; configs = null; params = null; persistentBeans = null; transientBeans = null; pageVariables = null; docWriter = null; reportIR = null; appContext = null; reportContext = null; renderOption = null; reportDoc = null; reportContent = null; design = null; content = null; rsets = null; reportHandles = null; errors.clear(); stringFormatters = null; numberFormatters = null; dateFormatters = null; pageBreakListeners = null; eventHandlerManager = null; progressMonitor = null; element = null; } /** * create a new scope, use the object to create the curren scope. * * @param object * the "this" object in the new scope */ public void newScope( Object object ) { scriptContext = getScriptContext( ).newContext( object ); } /** * exits a variable scope. */ public void exitScope( ) { if ( scriptContext == null ) { throw new IllegalStateException( ); } ScriptContext parent = scriptContext.getParent( ); if ( parent == null ) { throw new IllegalStateException( ); } scriptContext = parent; } /** * register beans in the execution context * * @param map * name value pair. */ public void registerBeans( Map map ) { if ( map != null ) { Iterator iter = map.entrySet( ).iterator( ); while ( iter.hasNext( ) ) { Map.Entry entry = (Map.Entry) iter.next( ); Object keyObj = entry.getKey( ); Object value = entry.getValue( ); if ( keyObj != null ) { String key = keyObj.toString( ); registerBean( key, value ); } } } } /** * declares a variable in the current scope. The variable is then accessible * through JavaScript. * * @param name * variable name * @param value * variable value */ public void registerBean( String name, Object value ) { transientBeans.put( name, value ); if ( scriptContext != null ) { scriptContext.setAttribute( name, value ); } } public void unregisterBean( String name ) { transientBeans.remove( name ); if ( scriptContext != null ) { scriptContext.setAttribute( name, null ); } } public Map getBeans( ) { return transientBeans; } public void registerGlobalBeans( Map map ) { if ( map != null ) { Iterator iter = map.entrySet( ).iterator( ); while ( iter.hasNext( ) ) { Map.Entry entry = (Map.Entry) iter.next( ); Object keyObj = entry.getKey( ); Object value = entry.getValue( ); if ( keyObj != null && value instanceof Serializable ) { String key = keyObj.toString( ); registerGlobalBean( key, (Serializable) value ); } } } } public void registerGlobalBean( String name, Serializable value ) { persistentBeans.put( name, value ); if ( scriptContext != null ) { registerInRoot( name, value ); } } public void unregisterGlobalBean( String name ) { persistentBeans.remove( name ); if ( scriptContext != null ) { registerInRoot( name, null ); } } public Map getGlobalBeans( ) { return persistentBeans; } private void registerInRoot( String name, Object value ) { getRootContext( ).setAttribute( name, value ); } public Object evaluate( Expression expr ) throws BirtException { if ( expr != null ) { switch ( expr.getType( ) ) { case Expression.CONSTANT : Expression.Constant cs = (Expression.Constant) expr; return cs.getValue( ); case Expression.SCRIPT : Expression.Script script = (Expression.Script) expr; ICompiledScript compiledScript = script .getScriptExpression( ); if ( compiledScript == null ) { compiledScript = compile( script.getLanguage( ), script .getFileName( ), script.getLineNumber( ), script.getScriptText( ) ); script.setCompiledScript( compiledScript ); } return evaluate( compiledScript ); case Expression.CONDITIONAL : IConditionalExpression ce = ( (Expression.Conditional) expr ) .getConditionalExpression( ); return evaluateCondExpr( ce ); } } return null; } /** * The expression may be evaluated at onPrepare stage, at that time the * reportIR is not initialized. */ protected String getScriptLanguage( ) { if ( reportIR != null ) { return reportIR.getScriptLanguage( ); } return Expression.SCRIPT_JAVASCRIPT; } public Object evaluate( String scriptText ) throws BirtException { return evaluate( getScriptLanguage( ), "<inline>", 1, scriptText ); } public Object evaluate( String fileName, String scriptText ) throws BirtException { return evaluate( getScriptLanguage( ), fileName, 1, scriptText ); } public Object evaluateInlineScript( String language, String scriptText ) throws BirtException { return evaluate( language, "<inline>", 1, scriptText ); } public Object evaluate( String language, String fileName, int lineNumber, String scriptText ) throws BirtException { if ( scriptText == null ) { return null; } ICompiledScript compiledScript = compile( language, fileName, lineNumber, scriptText ); return evaluate( compiledScript ); } private ICompiledScript compile( String language, String fileName, int lineNumber, String scriptText ) throws BirtException { ICompiledScript compiledScript = runnable.getScript( language, scriptText ); if ( compiledScript == null ) { compiledScript = getScriptContext( ).compile( language, fileName, lineNumber, scriptText ); runnable.putScript( language, scriptText, compiledScript ); } return compiledScript; } private Object evaluate( ICompiledScript compiledScript ) throws BirtException { return getScriptContext( ).evaluate( compiledScript ); } /** * evaluate conditional expression. A conditional expression can have an * operator, one LHS expression, and up to two expressions on RHS, i.e., * * testExpr operator operand1 operand2 or testExpr between 1 20 * * Now only support comparison between the same data type * * @param expr * the conditional expression to be evaluated * @return a boolean value (as an Object) */ public Object evaluateCondExpr( IConditionalExpression expr ) throws BirtException { IScriptExpression testExpr = expr.getExpression( ); ScriptContext scriptContext = getScriptContext( ); if ( testExpr == null ) return Boolean.FALSE; try { return ScriptEvalUtil.evalExpr( expr, scriptContext, ScriptExpression.defaultID, 0 ); } catch ( Throwable e ) { throw new EngineException( MessageConstants.INVALID_EXPRESSION_ERROR, testExpr .getText( ), e ); } } /** * execute the script. Simply evaluate the script, then drop the return * value * * @param script * script statement * @param fileName * file name * @param lineNo * line no */ public void execute( ICompiledScript script ) { try { scriptContext.evaluate( script ); } catch ( BirtException ex ) { addException( this.design, ex ); } } /** * @return Returns the locale. */ public Locale getLocale( ) { if ( rlocale != null ) return rlocale.toLocale( ); return ulocale.toLocale( ); } /** * @param locale * The locale to set. */ public void setLocale( ULocale ulocale ) { this.ulocale = ulocale; if ( rlocale == null ) this.getScriptContext( ).setLocale( ulocale.toLocale( ) ); } public TimeZone getTimeZone( ) { return this.timeZone; } public void setTimeZone( TimeZone timeZone ) { this.timeZone = timeZone; this.getScriptContext( ).setTimeZone( timeZone ); } public void openDataEngine( ) throws EngineException { if ( dataEngine == null ) { try { dataEngine = engine.getDataEngineFactory( ).createDataEngine( this, needOutputResultSet ); } catch ( Exception e ) { throw new EngineException( MessageConstants.CANNOT_CREATE_DATA_ENGINE, e ); } } } /** * @return Returns the dataEngine. */ public IDataEngine getDataEngine( ) throws EngineException { if ( dataEngine == null ) { openDataEngine( ); } return dataEngine; } public void closeDataEngine( ) { if ( dataEngine != null ) { dataEngine.shutdown( ); dataEngine = null; } } /** * @param name * @param value */ public void setParameterValue( String name, Object value ) { Object parameter = params.get( name ); if ( parameter instanceof ParameterAttribute ) { ( (ParameterAttribute) parameter ).setValue( value ); } else { params.put( name, new ParameterAttribute( value, null ) ); } } /** * @param name * @param value */ public void setParameter( String name, Object value, String displayText ) { params.put( name, new ParameterAttribute( value, displayText ) ); } public void clearParameters( ) { params.clear( ); } public Object getParameterValue( String name ) { Object parameter = params.get( name ); if ( parameter != null ) { return ( (ParameterAttribute) parameter ).getValue( ); } return null; } public Map getParameterValues( ) { HashMap result = new HashMap( ); Set entries = params.entrySet( ); Iterator iterator = entries.iterator( ); while ( iterator.hasNext( ) ) { Map.Entry entry = (Map.Entry) iterator.next( ); ParameterAttribute parameter = (ParameterAttribute) entry .getValue( ); result.put( entry.getKey( ), parameter.getValue( ) ); } return result; } public Map getParameterDisplayTexts( ) { Map result = new HashMap( ); Set entries = params.entrySet( ); Iterator iterator = entries.iterator( ); while ( iterator.hasNext( ) ) { Map.Entry entry = (Map.Entry) iterator.next( ); String name = (String) entry.getKey( ); ParameterAttribute value = (ParameterAttribute) entry.getValue( ); result.put( name, value.getDisplayText( ) ); } return result; } public String getParameterDisplayText( String name ) { Object parameter = params.get( name ); if ( parameter != null ) { return ( (ParameterAttribute) parameter ).getDisplayText( ); } return null; } public void setParameterDisplayText( String name, String displayText ) { Object parameter = params.get( name ); if ( parameter != null ) { ( (ParameterAttribute) parameter ).setDisplayText( displayText ); } } /* * (non-Javadoc) * * @see org.eclipse.birt.report.engine.executor.IFactoryContext#getConfigs() */ public Map getConfigs( ) { return configs; } /* * (non-Javadoc) * * @see * org.eclipse.birt.report.engine.executor.IFactoryContext#getReportDesign() */ public ModuleHandle getDesign( ) { return runnable != null ? (ModuleHandle) runnable.getDesignHandle( ) : null; } public ReportDesignHandle getReportDesign( ) { ModuleHandle design = getDesign( ); if ( design instanceof ReportDesignHandle ) { return (ReportDesignHandle) design; } return null; } /** * @return Returns the report. */ public IReportContent getReportContent( ) { return reportContent; } public void setReportContent( ReportContent content ) { this.reportContent = content; content.setReportContext( reportContext ); } /** * Loads scripts that are stored in an external file. Used to support * include-script. Each script file should be load only once. and the script * in the file must be encoded in UTF-8. * * @param fileName * script file name */ public void loadScript( String language, String fileName ) { ModuleHandle reportDesign = this.getDesign( ); try { // read the script in the URL, and execution. byte[] script = getResourceLocator( ).findResource( reportDesign, fileName, IResourceLocator.LIBRARY, appContext ); ICompiledScript compiledScript = getScriptContext( ).compile( language, fileName, 1, new String( script, "UTF-8" ) ); execute( compiledScript ); //$NON-NLS-1$ } catch ( IOException ex ) { log.log( Level.SEVERE, "loading external script file " + fileName + " failed.", //$NON-NLS-1$ //$NON-NLS-2$ ex ); addException( new EngineException( MessageConstants.SCRIPT_FILE_LOAD_ERROR, fileName, ex ) ); //$NON-NLS-1$ // TODO This is a fatal error. Should throw an exception. } catch ( BirtException e ) { log.log( Level.SEVERE, "Failed to execute script " + fileName + ".", //$NON-NLS-1$ //$NON-NLS-2$ e ); addException( new EngineException( MessageConstants.SCRIPT_EVALUATION_ERROR, fileName, e ) ); //$NON-NLS-1$ } } public ScriptContext getScriptContext( ) { if ( scriptContext == null ) { initializeScriptContext( ); } return this.scriptContext; } /** * @return */ public IContent getContent( ) { return content; } public void setContent( IContent content ) { this.content = content; } public ReportItemDesign getItemDesign( ) { return design; } public void setItemDesign( ReportItemDesign design ) { this.design = design; } /** * @param obj */ public void pushHandle( DesignElementHandle obj ) { reportHandles.push( obj ); } /** * @return */ public DesignElementHandle popHandle( ) { return (DesignElementHandle) reportHandles.pop( ); } /** * @return */ public DesignElementHandle getHandle( ) { if ( reportHandles.empty( ) ) { return null; } return (DesignElementHandle) reportHandles.peek( ); } /** * Adds the exception * * @param ex * the Throwable instance */ public void addException( BirtException ex ) { DesignElementHandle handle = getDesign( ); if ( design != null ) { handle = design.getHandle( ); } addException( handle, ex ); } /** * A list of errors in time order, it is also shared by the report content */ private List<EngineException> errors = new ArrayList<EngineException>( ); /** * The exception list grouped by the element */ protected HashMap<DesignElementHandle, ElementExceptionInfo> elementExceptions = new HashMap<DesignElementHandle, ElementExceptionInfo>( ); public void addException( ReportElementDesign design, BirtException ex ) { DesignElementHandle handle = null; if ( null != design ) { handle = design.getHandle( ); } addException( handle, ex ); } public void addException( DesignElementHandle element, BirtException ex ) { if ( errors.size( ) >= ERROR_TOTAL_COUNT ) { if ( cancelOnError && task != null ) { task.cancel( ); } return; } if ( reportContent != null ) { EngineException engineEx = null; if ( ex instanceof EngineException ) { engineEx = (EngineException) ex; } else { engineEx = new EngineException( ex ); } if ( element != null ) { engineEx.setElementID( element.getID( ) ); } errors.add( engineEx ); ElementExceptionInfo exInfo = (ElementExceptionInfo) elementExceptions.get( element ); if ( exInfo == null ) { exInfo = new ElementExceptionInfo( element ); elementExceptions.put( element, exInfo ); reportContent.getErrors( ).add( exInfo ); } exInfo.addException( engineEx ); } if ( cancelOnError && task != null ) { task.cancel( ); } } /** * report object is the script object used in the script context. * * All infos can get from this object. * * */ public class ReportObject { /** * get the report design handle * * @return report design object. */ public Object getDesign( ) { return element; } /** * get the report document. * * @return report document. */ public Object getDocument( ) { return reportDoc; } /** * @return a map of name/value pairs for all the parameters and their * values */ public Map getParams( ) { return params; } /** * @return a set of data sets */ public Object getDataSets( ) { return null; } /** * @return a set of data sources */ public Object getDataSources( ) { return null; } /** * @return a map of name/value pairs for all the configuration variables */ public Map getConfig( ) { return configs; } public Object getReportContext( ) { return reportContext; } } /** * @return Returns the runnable. */ public ReportRunnable getRunnable( ) { return runnable; } /** * @param runnable * The runnable to set. */ public void setRunnable( IReportRunnable runnable ) { this.runnable = (ReportRunnable) runnable; if ( scriptContext != null ) { registerDesign( runnable ); } } public void updateRunnable( IReportRunnable newRunnable ) { if ( originalRunnable == null ) { this.originalRunnable = this.runnable; } this.runnable = (ReportRunnable) newRunnable; if ( scriptContext != null ) { registerDesign( runnable ); } reportIR = null; } public ReportRunnable getOriginalRunnable( ) { if ( originalRunnable != null ) { return originalRunnable; } return runnable; } private void registerDesign( IReportRunnable runnable ) { DesignElementHandle design = (ModuleHandle) runnable.getDesignHandle( ); element = SimpleElementFactory.getInstance( ).getElement( design ); } /** * @return Returns the renderOption. */ public IRenderOption getRenderOption( ) { return renderOption; } /** * @param renderOption * The renderOption to set. */ public void setRenderOption( IRenderOption renderOption ) { this.renderOption = renderOption; } public String getOutputFormat( ) { String outputFormat = null; if ( renderOption != null ) { outputFormat = renderOption.getOutputFormat( ); } if ( outputFormat == null ) { if ( isFixedLayout( ) ) { outputFormat = IRenderOption.OUTPUT_FORMAT_PDF; } else { outputFormat = IRenderOption.OUTPUT_FORMAT_HTML; } } return outputFormat; } public static class ElementExceptionInfo { DesignElementHandle element; ArrayList exList = new ArrayList( ); ArrayList countList = new ArrayList( ); public ElementExceptionInfo( DesignElementHandle element ) { this.element = element; } public void addException( BirtException e ) { for ( int i = 0; i < exList.size( ); i++ ) { BirtException err = (BirtException) exList.get( i ); if ( e.getErrorCode( ) != null && e.getErrorCode( ).equals( err.getErrorCode( ) ) && e.getLocalizedMessage( ) != null && e.getLocalizedMessage( ).equals( err.getLocalizedMessage( ) ) ) { countList.set( i, Integer.valueOf( ( (Integer) countList.get( i ) ).intValue( ) + 1 ) ); return; } } exList.add( e ); countList.add( Integer.valueOf( 1 ) ); } public String getType( ) { if ( element == null ) { return "report"; } return element.getDefn( ).getName( ); } public String getName( ) { if ( element == null ) { return "report"; } return element.getName( ); } public String getID( ) { if ( element == null ) return null; else return String.valueOf( element.getID( ) ); } public ArrayList getErrorList( ) { return exList; } public ArrayList getCountList( ) { return countList; } } public Map getAppContext( ) { return appContext; } public void setAppContext( Map appContext ) { this.appContext.clear( ); if ( appContext != null ) { this.appContext.putAll( appContext ); } } public IReportContext getReportContext( ) { return reportContext; } public void setReportContext( IReportContext reportContext ) { this.reportContext = reportContext; if ( scriptContext != null ) { getRootContext( ).setAttribute( "reportContext", reportContext ); } } public void setPageNumber( long pageNo ) { pageNumber = pageNo; if ( scriptContext != null ) { getRootContext( ).setAttribute( "pageNumber", Long.valueOf( pageNumber ) ); } if ( totalPage < pageNumber ) { setTotalPage( pageNumber ); } } /** * set the total page. * * @param totalPage * total page */ public void setTotalPage( long totalPage ) { if ( totalPage > this.totalPage ) { this.totalPage = totalPage; if ( scriptContext != null ) { getRootContext( ).setAttribute( "totalPage", Long.valueOf( totalPage ) ); } if ( reportContent instanceof ReportContent ) { ( (ReportContent) reportContent ).setTotalPage( totalPage ); } } } /** * get the current page number * * @return current page number */ public long getPageNumber( ) { return pageNumber; } /** * get the total page have been created. * * @return total page */ public long getTotalPage( ) { return totalPage; } public void setFilteredPageNumber( long pageNo ) { filteredPageNumber = pageNo; } public void setFilteredTotalPage( long totalPage ) { filteredTotalPage = totalPage; } public long getFilteredPageNumber( ) { if ( filteredPageNumber <= 0 ) { return pageNumber; } return filteredPageNumber; } public long getFilteredTotalPage( ) { if ( filteredTotalPage <= 0 ) { return totalPage; } return filteredTotalPage; } /** * is in factory mode * * @return true, factory mode, false not in factory mode */ public boolean isInFactory( ) { return factoryMode; } /** * is in presentation mode. * * @return true, presentation mode, false otherwise */ public boolean isInPresentation( ) { return presentationMode; } /** * set the in factory mode * * @param mode * factory mode */ public void setFactoryMode( boolean mode ) { this.factoryMode = mode; } public boolean getFactoryMode( ) { return this.factoryMode; } /** * set in presentation mode * * @param mode * presentation mode */ public void setPresentationMode( boolean mode ) { this.presentationMode = mode; } private ULocale determineLocale( String locale ) { ULocale loc = null; if ( locale == null ) { if ( rlocale == null ) loc = ulocale; else loc = rlocale; } else { if ( USER_LOCALE.equals( locale ) ) loc = ulocale; else loc = new ULocale( locale ); } return loc; } /** * get a string formatter object * * @param value * string format * @return formatter object */ public StringFormatter getStringFormatter( String pattern ) { return getStringFormatter( pattern, null ); } public StringFormatter getStringFormatter( String pattern, String locale ) { String key = pattern + ":" + locale; StringFormatter fmt = stringFormatters.get( key ); if ( fmt == null ) { ULocale loc = determineLocale( locale ); fmt = new StringFormatter( pattern, loc ); stringFormatters.put( key, fmt ); } return fmt; } /** * get a number formatter object * * @param pattern * number format * @return formatter object */ public NumberFormatter getNumberFormatter( String pattern ) { return getNumberFormatter( pattern, null ); } public NumberFormatter getNumberFormatter( String pattern, String locale ) { String key = pattern + ":" + locale; NumberFormatter fmt = numberFormatters.get( key ); if ( fmt == null ) { ULocale loc = determineLocale( locale ); fmt = new NumberFormatter( pattern, loc ); numberFormatters.put( key, fmt ); } return fmt; } /** * get a date formatter object * * @param value * date format * @return formatter object */ public DateFormatter getDateFormatter( String pattern ) { return getDateFormatter( pattern, null ); } public DateFormatter getDateFormatter( String pattern, String locale ) { String key = pattern + ":" + locale; DateFormatter fmt = dateFormatters.get( key ); if ( fmt == null ) { ULocale loc = determineLocale( locale ); fmt = new DateFormatter( pattern, loc, timeZone ); dateFormatters.put( key, fmt ); } return fmt; } /** * set the executor used in the execution context * * @param executor */ public void setExecutor( IReportExecutor executor ) { this.executor = executor; } /** * get the executor used to execute the report * * @return report executor */ public IReportExecutor getExecutor( ) { return executor; } public TOCBuilder getTOCBuilder( ) { return tocBuilder; } public void setTOCBuilder( TOCBuilder builder ) { this.tocBuilder = builder; } /** * set the report document used in the context * * @param doc */ public void setReportDocument( IReportDocument doc ) { this.reportDoc = doc; } /** * get the report document used in the context. * * @return */ public IReportDocument getReportDocument( ) { return reportDoc; } public void setReportDocWriter( ReportDocumentWriter docWriter ) { this.docWriter = docWriter; } public ReportDocumentWriter getReportDocWriter( ) { return docWriter; } /** * @return Returns the action handler. */ public IHTMLActionHandler getActionHandler( ) { return renderOption.getActionHandler( ); } /** * @return Returns the action handler. */ public IHTMLImageHandler getImageHandler( ) { return renderOption.getImageHandler( ); } /** * return application class loader. The application class loader is used to * load the report item event handle and java classes called in the * javascript. * * @return class loader */ public ClassLoader getApplicationClassLoader( ) { if ( applicationClassLoader == null ) { closeClassLoader = true; applicationClassLoader = AccessController.doPrivileged( new PrivilegedAction<ApplicationClassLoader>( ) { public ApplicationClassLoader run( ) { return new ApplicationClassLoader( engine, runnable, appContext ); } } ); if ( scriptContext != null ) { scriptContext .setApplicationClassLoader( applicationClassLoader ); } } return applicationClassLoader; } public void setApplicationClassLoader( ClassLoader classLoader ) { if ( classLoader == null ) { throw new NullPointerException( "null classloader" ); } if ( closeClassLoader && applicationClassLoader instanceof ApplicationClassLoader ) { ( (ApplicationClassLoader) applicationClassLoader ).close( ); } closeClassLoader = false; this.applicationClassLoader = classLoader; if ( scriptContext != null ) { scriptContext.setApplicationClassLoader( applicationClassLoader ); } } /** * Set the cancel flag. */ public void cancel( ) { isCancelled = true; // cancel the dte's session if ( dataEngine != null ) { DataRequestSession session = dataEngine.getDTESession( ); if ( session != null ) { session.cancel( ); } } } public boolean isCanceled( ) { return isCancelled; } public void restart( ) throws EngineException { getDataEngine( ).getDTESession( ).restart( ); this.isCancelled = false; } public void setCancelOnError( boolean cancel ) { cancelOnError = cancel; } public void setDataSource( DocumentDataSource dataSource ) throws IOException { this.dataSource = dataSource; this.dataSource.open( ); } public DocumentDataSource getDataSource( ) { return dataSource; } public IBaseResultSet executeQuery( IBaseResultSet parent, IDataQueryDefinition query, Object queryOwner, boolean useCache ) throws BirtException { IDataEngine dataEngine = getDataEngine( ); return dataEngine.execute( parent, query, queryOwner, useCache ); } public IBaseResultSet getResultSet( ) { if ( rsets != null ) { return rsets[0]; } return null; } public void setResultSet( IBaseResultSet rset ) { if ( rset != null ) { if ( rsets != null && rsets.length == 1 && rsets[0] == rset ) { return; } setResultSets( new IBaseResultSet[]{rset} ); } else { setResultSets( null ); } } public IBaseResultSet[] getResultSets( ) { return rsets; } public void setResultSets( IBaseResultSet[] rsets ) { if ( this.rsets == rsets ) { return; } if ( rsets != null ) { this.rsets = rsets; if ( rsets[0] != null ) { try { DataAdapterUtil.registerDataObject( scriptContext, new ResultIteratorTree( rsets[0] ) ); } catch ( AdapterException e ) { log.log( Level.SEVERE, e.getLocalizedMessage( ), e ); } } } else { this.rsets = null; // FIXME: we should also remove the JSObject from scope // Scriptable scope = scriptContext.getRootScope( ); // DataAdapterUtil.registerJSObject( scope, // new ResultIteratorTree( rsets[0] ) ); } } private class ResultIteratorTree implements ILinkedResult { IBaseResultSet currentRset; int resultType = -1; public ResultIteratorTree( IBaseResultSet rset ) { this.currentRset = rset; if ( rset instanceof IQueryResultSet ) { resultType = ILinkedResult.TYPE_TABLE; } else if ( rset instanceof ICubeResultSet ) { resultType = ILinkedResult.TYPE_CUBE; } } public ILinkedResult getParent( ) { return new ResultIteratorTree( currentRset.getParent( ) ); } public Object getCurrentResult( ) { if ( resultType == ILinkedResult.TYPE_TABLE ) { return ( (IQueryResultSet) currentRset ).getResultIterator( ); } else if ( resultType == ILinkedResult.TYPE_CUBE ) { return ( (ICubeResultSet) currentRset ).getCubeCursor( ); } return null; } public int getCurrentResultType( ) { return resultType; } } public boolean hasErrors( ) { return !elementExceptions.isEmpty( ); } /** * Returns list or errors, the max count of the errors is * <code>MAX_ERRORS</code> * * @return error list which has max error size limited to * <code>MAX_ERRORS</code> */ public List getErrors( ) { List errors = this.getAllErrors( ); if ( errors.size( ) > MAX_ERRORS ) { errors = errors.subList( 0, MAX_ERRORS - 1 ); } return errors; } /** * Returns all errors. * * @return list of all the errors. */ public List getAllErrors( ) { return errors; } /** * @return the mAX_ERRORS */ public int getMaxErrors( ) { return MAX_ERRORS; } /** * @param max_errors * the mAX_ERRORS to set */ public void setMaxErrors( int maxErrors ) { MAX_ERRORS = maxErrors; } /** * to remember the current report item is in master page or not. */ boolean isExecutingMasterPage = false; /** * Since the data set in master page will be executed in each page and while * the data set in report body will only be executed once, we need to * remember the current report item is in master page or not. This will be * used to help store the executed resultSetID and load it to distinguish * them. */ public void setExecutingMasterPage( boolean isExecutingMasterPage ) { this.isExecutingMasterPage = isExecutingMasterPage; } public boolean isExecutingMasterPage( ) { return isExecutingMasterPage; } /** * Add a page break listener. * * @param listener * the page break listener. */ public void addPageBreakListener( IPageBreakListener listener ) { if ( pageBreakListeners == null ) { pageBreakListeners = new ArrayList( ); } pageBreakListeners.add( listener ); } /** * Notify page break listeners that page is broken. */ public void firePageBreakEvent( ) { if ( pageBreakListeners != null ) { for ( int i = 0; i < pageBreakListeners.size( ); i++ ) { ( (IPageBreakListener) pageBreakListeners.get( i ) ) .onPageBreak( ); } } } /** * Remove a page break listener. * * @param listener * the page break listener. */ public void removePageBreakListener( IPageBreakListener listener ) { if ( pageBreakListeners != null ) { pageBreakListeners.remove( listener ); } } public IEngineTask getEngineTask( ) { return task; } public Logger getLogger( ) { return log; } public void setLogger( Logger logger ) { log = logger; } protected ExecutionPolicy executionPolicy; public void optimizeExecution( ) { if ( ( task != null ) && ( task.getTaskType( ) == IEngineTask.TASK_RUN ) && !isFixedLayout ) { String[] engineExts = getEngineExtensions( ); if ( engineExts == null || engineExts.length == 0 ) { executionPolicy = new ExecutionOptimize( ) .optimize( getReport( ) ); } } } public ExecutionPolicy getExecutionPolicy( ) { return executionPolicy; } public Report getReport( ) { if ( reportIR != null ) { return reportIR; } if ( runnable != null ) { reportIR = new ReportParser( ).parse( (ReportDesignHandle) runnable .getDesignHandle( ) ); setupFromReport( ); } return reportIR; } public void setReport( Report reportIR ) { this.reportIR = reportIR; setupFromReport( ); } protected void setupFromReport( ) { if ( reportIR == null ) return; String locale = reportIR.getLocale( ); if ( locale != null ) { rlocale = new ULocale( locale ); this.getScriptContext( ).setLocale( rlocale.toLocale( ) ); } } public URL getResource( String resourceName ) { if ( getDesign( ) != null ) { return getDesign( ).findResource( resourceName, IResourceLocator.OTHERS, appContext ); } return null; } public ResourceLocatorWrapper getResourceLocator( ) { return task.getResourceLocatorWrapper( ); } public ExtendedItemManager getExtendedItemManager( ) { return extendedItemManager; } public EngineExtensionManager getEngineExtensionManager( ) { return engineExtensionManager; } public void setMaxRowsPerQuery( int maxRows ) { if ( maxRows >= 0 ) { maxRowsPerQuery = maxRows; } } public int getMaxRowsPerQuery( ) { return maxRowsPerQuery; } private String[] engineExts; public String[] getEngineExtensions( ) { if ( engineExts != null ) { return engineExts; } engineExts = engine.getEngineExtensions( runnable ); if ( engineExts == null ) { engineExts = new String[]{}; } return engineExts; } private boolean enableProgreesiveViewing = true; public void enableProgressiveViewing( boolean enabled ) { enableProgreesiveViewing = enabled; } public boolean isProgressiveViewingEnable( ) { return enableProgreesiveViewing; } public EventHandlerManager getEventHandlerManager( ) { return eventHandlerManager; } public void setProgressMonitor( IProgressMonitor monitor ) { progressMonitor = new ProgressMonitorProxy( monitor ); } public IProgressMonitor getProgressMonitor( ) { if ( progressMonitor == null ) { progressMonitor = new ProgressMonitorProxy( null ); } return progressMonitor; } public boolean needOutputResultSet( ) { return needOutputResultSet; } public void setNeedOutputResultSet( boolean needOutputResultSet ) { this.needOutputResultSet = needOutputResultSet; } public Object getPageVariable( String name ) { if ( "totalPage".equals( name ) ) { return Long.valueOf( totalPage ); } if ( "pageNumber".equals( name ) ) { return Long.valueOf( totalPage ); } PageVariable var = pageVariables.get( name ); if ( var != null ) { return var.getValue( ); } return null; } public void setPageVariable( String name, Object value ) { PageVariable var = pageVariables.get( name ); if ( var != null ) { var.setValue( value ); } // lazy add special page variables else if ( IReportContext.PAGE_VAR_PAGE_LABEL.equals( name ) ) { addPageVariable( new PageVariable( IReportContext.PAGE_VAR_PAGE_LABEL, PageVariable.SCOPE_PAGE, value ) ); } } public void addPageVariables( Collection<PageVariable> vars ) { for ( PageVariable var : vars ) { pageVariables.put( var.getName( ), var ); } } public Collection<PageVariable> getPageVariables( ) { return pageVariables.values( ); } public void addPageVariable( PageVariable var ) { pageVariables.put( var.getName( ), var ); } public boolean isFixedLayout( ) { return isFixedLayout; } public void setFixedLayout( boolean isFixedLayout ) { this.isFixedLayout = isFixedLayout; } public int getTaskType( ) { return task.getTaskType( ); } private IScriptContext getRootContext( ) { ScriptContext result = scriptContext; while ( result.getParent( ) != null ) { result = result.getParent( ); } return result; } public boolean needRefreshData() { return this.refreshData; } public void setRefreshData(boolean refreshData) { this.refreshData = refreshData; } }
Fix No error message about the incorrect script(T34245)
engine/org.eclipse.birt.report.engine/src/org/eclipse/birt/report/engine/executor/ExecutionContext.java
Fix No error message about the incorrect script(T34245)
Java
agpl-3.0
c0323339e8978d7cacc46c9b4be669a9432d0eb5
0
ivanovlev/Gadgetbridge,Freeyourgadget/Gadgetbridge,ivanovlev/Gadgetbridge,Freeyourgadget/Gadgetbridge,ivanovlev/Gadgetbridge,rosenpin/Gadgetbridge,rosenpin/Gadgetbridge,gtank/Gadgetbridge,roidelapluie/Gadgetbridge,Freeyourgadget/Gadgetbridge,Freeyourgadget/Gadgetbridge,roidelapluie/Gadgetbridge,rosenpin/Gadgetbridge,roidelapluie/Gadgetbridge
package nodomain.freeyourgadget.gadgetbridge.service.devices.pebble; import android.util.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.Arrays; import java.util.Random; import java.util.SimpleTimeZone; import java.util.UUID; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEvent; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventAppInfo; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventAppManagement; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventCallControl; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventDismissNotification; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventMusicControl; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventScreenshot; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventSendBytes; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventVersionInfo; import nodomain.freeyourgadget.gadgetbridge.impl.GBDeviceApp; import nodomain.freeyourgadget.gadgetbridge.model.ServiceCommand; import nodomain.freeyourgadget.gadgetbridge.service.bt.GBDeviceProtocol; public class PebbleProtocol extends GBDeviceProtocol { private static final Logger LOG = LoggerFactory.getLogger(PebbleProtocol.class); static final short ENDPOINT_TIME = 11; static final short ENDPOINT_FIRMWAREVERSION = 16; public static final short ENDPOINT_PHONEVERSION = 17; static final short ENDPOINT_SYSTEMMESSAGE = 18; static final short ENDPOINT_MUSICCONTROL = 32; static final short ENDPOINT_PHONECONTROL = 33; static final short ENDPOINT_APPLICATIONMESSAGE = 48; static final short ENDPOINT_LAUNCHER = 49; static final short ENDPOINT_APPRUNSTATE = 52; static final short ENDPOINT_LOGS = 2000; static final short ENDPOINT_PING = 2001; static final short ENDPOINT_LOGDUMP = 2002; static final short ENDPOINT_RESET = 2003; static final short ENDPOINT_APP = 2004; static final short ENDPOINT_APPLOGS = 2006; static final short ENDPOINT_NOTIFICATION = 3000; static final short ENDPOINT_EXTENSIBLENOTIFS = 3010; static final short ENDPOINT_RESOURCE = 4000; static final short ENDPOINT_SYSREG = 5000; static final short ENDPOINT_FCTREG = 5001; static final short ENDPOINT_APPMANAGER = 6000; static final short ENDPOINT_APPFETCH = 6001; // 3.x only public static final short ENDPOINT_DATALOG = 6778; static final short ENDPOINT_RUNKEEPER = 7000; static final short ENDPOINT_SCREENSHOT = 8000; static final short ENDPOINT_NOTIFICATIONACTION = 11440; // 3.x only, TODO: find a better name static final short ENDPOINT_BLOBDB = (short) 45531; // 3.x only static final short ENDPOINT_PUTBYTES = (short) 48879; static final byte APPRUNSTATE_START = 1; static final byte BLOBDB_INSERT = 1; static final byte BLOBDB_DELETE = 4; static final byte BLOBDB_CLEAR = 5; static final byte BLOBDB_PIN = 1; static final byte BLOBDB_APP = 2; static final byte BLOBDB_REMINDER = 3; static final byte BLOBDB_NOTIFICATION = 4; static final byte NOTIFICATION_EMAIL = 0; static final byte NOTIFICATION_SMS = 1; static final byte NOTIFICATION_TWITTER = 2; static final byte NOTIFICATION_FACEBOOK = 3; static final byte PHONECONTROL_ANSWER = 1; static final byte PHONECONTROL_HANGUP = 2; static final byte PHONECONTROL_GETSTATE = 3; static final byte PHONECONTROL_INCOMINGCALL = 4; static final byte PHONECONTROL_OUTGOINGCALL = 5; static final byte PHONECONTROL_MISSEDCALL = 6; static final byte PHONECONTROL_RING = 7; static final byte PHONECONTROL_START = 8; static final byte PHONECONTROL_END = 9; static final byte MUSICCONTROL_SETMUSICINFO = 16; static final byte MUSICCONTROL_PLAYPAUSE = 1; static final byte MUSICCONTROL_PAUSE = 2; static final byte MUSICCONTROL_PLAY = 3; static final byte MUSICCONTROL_NEXT = 4; static final byte MUSICCONTROL_PREVIOUS = 5; static final byte MUSICCONTROL_VOLUMEUP = 6; static final byte MUSICCONTROL_VOLUMEDOWN = 7; static final byte MUSICCONTROL_GETNOWPLAYING = 7; static final byte TIME_GETTIME = 0; static final byte TIME_SETTIME = 2; static final byte TIME_SETTIME_UTC = 3; static final byte FIRMWAREVERSION_GETVERSION = 0; static final byte APPMANAGER_GETAPPBANKSTATUS = 1; static final byte APPMANAGER_REMOVEAPP = 2; static final byte APPMANAGER_REFRESHAPP = 3; static final byte APPMANAGER_GETUUIDS = 5; static final int APPMANAGER_RES_SUCCESS = 1; static final byte APPLICATIONMESSAGE_PUSH = 1; static final byte APPLICATIONMESSAGE_REQUEST = 2; static final byte APPLICATIONMESSAGE_ACK = (byte) 0xff; static final byte APPLICATIONMESSAGE_NACK = (byte) 0x7f; static final byte DATALOG_CLOSE = (byte) 0x03; static final byte DATALOG_TIMEOUT = 0x07; static final byte DATALOG_ACK = (byte) 0x85; static final byte DATALOG_NACK = (byte) 0x86; static final byte PING_PING = 0; static final byte PING_PONG = 1; static final byte PUTBYTES_INIT = 1; static final byte PUTBYTES_SEND = 2; static final byte PUTBYTES_COMMIT = 3; static final byte PUTBYTES_ABORT = 4; static final byte PUTBYTES_COMPLETE = 5; public static final byte PUTBYTES_TYPE_FIRMWARE = 1; public static final byte PUTBYTES_TYPE_RECOVERY = 2; public static final byte PUTBYTES_TYPE_SYSRESOURCES = 3; public static final byte PUTBYTES_TYPE_RESOURCES = 4; public static final byte PUTBYTES_TYPE_BINARY = 5; static final byte PUTBYTES_TYPE_FILE = 6; public static final byte PUTBYTES_TYPE_WORKER = 7; static final byte RESET_REBOOT = 0; static final byte SCREENSHOT_TAKE = 0; static final byte SYSTEMMESSAGE_FIRMWARESTART = 1; static final byte SYSTEMMESSAGE_FIRMWARECOMPLETE = 2; static final byte SYSTEMMESSAGE_FIRMWAREFAIL = 3; static final byte PHONEVERSION_REQUEST = 0; static final byte PHONEVERSION_APPVERSION_MAGIC = 2; // increase this if pebble complains static final byte PHONEVERSION_APPVERSION_MAJOR = 2; static final byte PHONEVERSION_APPVERSION_MINOR = 3; static final byte PHONEVERSION_APPVERSION_PATCH = 0; static final int PHONEVERSION_SESSION_CAPS_GAMMARAY = 0x80000000; static final int PHONEVERSION_REMOTE_CAPS_TELEPHONY = 0x00000010; static final int PHONEVERSION_REMOTE_CAPS_SMS = 0x00000020; static final int PHONEVERSION_REMOTE_CAPS_GPS = 0x00000040; static final int PHONEVERSION_REMOTE_CAPS_BTLE = 0x00000080; static final int PHONEVERSION_REMOTE_CAPS_REARCAMERA = 0x00000100; static final int PHONEVERSION_REMOTE_CAPS_ACCEL = 0x00000200; static final int PHONEVERSION_REMOTE_CAPS_GYRO = 0x00000400; static final int PHONEVERSION_REMOTE_CAPS_COMPASS = 0x00000800; static final byte PHONEVERSION_REMOTE_OS_UNKNOWN = 0; static final byte PHONEVERSION_REMOTE_OS_IOS = 1; static final byte PHONEVERSION_REMOTE_OS_ANDROID = 2; static final byte PHONEVERSION_REMOTE_OS_OSX = 3; static final byte PHONEVERSION_REMOTE_OS_LINUX = 4; static final byte PHONEVERSION_REMOTE_OS_WINDOWS = 5; static final byte TYPE_BYTEARRAY = 0; static final byte TYPE_CSTRING = 1; static final byte TYPE_UINT32 = 2; static final byte TYPE_INT32 = 3; static final short LENGTH_PREFIX = 4; static final short LENGTH_SIMPLEMESSAGE = 1; static final short LENGTH_APPFETCH = 2; static final short LENGTH_APPRUNSTATE = 17; static final short LENGTH_PING = 5; static final short LENGTH_PHONEVERSION = 17; static final short LENGTH_REMOVEAPP_2X = 17; static final short LENGTH_REMOVEAPP_3X = 21; static final short LENGTH_REFRESHAPP = 5; static final short LENGTH_SETTIME = 5; static final short LENGTH_SYSTEMMESSAGE = 2; static final short LENGTH_UPLOADSTART_2X = 7; static final short LENGTH_UPLOADSTART_3X = 10; static final short LENGTH_UPLOADCHUNK = 9; static final short LENGTH_UPLOADCOMMIT = 9; static final short LENGTH_UPLOADCOMPLETE = 5; static final short LENGTH_UPLOADCANCEL = 5; static final byte LENGTH_UUID = 16; private static final String[] hwRevisions = {"unknown", "ev1", "ev2", "ev2_3", "ev2_4", "v1_5", "v2_0", "evt2", "dvt"}; private static Random mRandom = new Random(); boolean isFw3x = false; boolean mForceProtocol = false; GBDeviceEventScreenshot mDevEventScreenshot = null; int mScreenshotRemaining = -1; byte last_id = -1; private ArrayList<UUID> tmpUUIDS = new ArrayList<>(); private MorpheuzSupport mMorpheuzSupport = new MorpheuzSupport(PebbleProtocol.this); private WeatherNeatSupport mWeatherNeatSupport = new WeatherNeatSupport(PebbleProtocol.this); private GadgetbridgePblSupport mGadgetbridgePblSupport = new GadgetbridgePblSupport(PebbleProtocol.this); private static byte[] encodeSimpleMessage(short endpoint, byte command) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_SIMPLEMESSAGE); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_SIMPLEMESSAGE); buf.putShort(endpoint); buf.put(command); return buf.array(); } private static byte[] encodeMessage(short endpoint, byte type, int cookie, String[] parts) { // Calculate length first int length = LENGTH_PREFIX + 1; if (parts != null) { for (String s : parts) { if (s == null || s.equals("")) { length++; // encode null or empty strings as 0x00 later continue; } length += (1 + s.getBytes().length); } } if (endpoint == ENDPOINT_PHONECONTROL) { length += 4; //for cookie; } // Encode Prefix ByteBuffer buf = ByteBuffer.allocate(length); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) (length - LENGTH_PREFIX)); buf.putShort(endpoint); buf.put(type); if (endpoint == ENDPOINT_PHONECONTROL) { buf.putInt(cookie); } // Encode Pascal-Style Strings if (parts != null) { for (String s : parts) { if (s == null || s.equals("")) { //buf.put((byte)0x01); buf.put((byte) 0x00); continue; } int partlength = s.getBytes().length; if (partlength > 255) partlength = 255; buf.put((byte) partlength); buf.put(s.getBytes(), 0, partlength); } } return buf.array(); } private byte[] encodeNotification(int id, String title, String subtitle, String body, byte type) { Long ts = System.currentTimeMillis(); if (!isFw3x) { ts += (SimpleTimeZone.getDefault().getOffset(ts)); } ts /= 1000; if (isFw3x) { // 3.x notification return encodeBlobdbNotification((int) (ts & 0xffffffff), title, subtitle, body, type); } else if (mForceProtocol || type != NOTIFICATION_EMAIL) { // 2.x notification return encodeExtensibleNotification(id, (int) (ts & 0xffffffff), title, subtitle, body, type); } else { // 1.x notification on FW 2.X String[] parts = {title, body, ts.toString(), subtitle}; return encodeMessage(ENDPOINT_NOTIFICATION, type, 0, parts); } } @Override public byte[] encodeSMS(String from, String body) { return encodeNotification(mRandom.nextInt(), from, null, body, NOTIFICATION_SMS); } @Override public byte[] encodeEmail(String from, String subject, String body) { return encodeNotification(mRandom.nextInt(), from, subject, body, NOTIFICATION_EMAIL); } @Override public byte[] encodeGenericNotification(String title, String details) { return encodeNotification(mRandom.nextInt(), title, null, details, NOTIFICATION_SMS); } @Override public byte[] encodeSetTime(long ts) { if (ts == -1) { ts = System.currentTimeMillis(); } long ts_offset = (SimpleTimeZone.getDefault().getOffset(ts)); ByteBuffer buf; if (isFw3x) { String timezone = SimpleTimeZone.getDefault().getDisplayName(false, SimpleTimeZone.SHORT); short length = (short) (LENGTH_SETTIME + timezone.length() + 3); buf = ByteBuffer.allocate(LENGTH_PREFIX + length); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(length); buf.putShort(ENDPOINT_TIME); buf.put(TIME_SETTIME_UTC); buf.putInt((int) (ts / 1000)); buf.putShort((short) (ts_offset / 60000)); buf.put((byte) timezone.length()); buf.put(timezone.getBytes()); LOG.info(timezone); } else { buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_SETTIME); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_SETTIME); buf.putShort(ENDPOINT_TIME); buf.put(TIME_SETTIME); buf.putInt((int) ((ts + ts_offset) / 1000)); } return buf.array(); } @Override public byte[] encodeFindDevice(boolean start) { return encodeSetCallState("Where are you?", "Gadgetbridge", start ? ServiceCommand.CALL_INCOMING : ServiceCommand.CALL_END); } private static byte[] encodeExtensibleNotification(int id, int timestamp, String title, String subtitle, String body, byte type) { String[] parts = {title, subtitle, body}; // Calculate length first byte attributes_count = 0; int length = 21 + 17; if (parts != null) { for (String s : parts) { if (s == null || s.equals("")) { continue; } attributes_count++; length += (3 + s.getBytes().length); } } // Encode Prefix ByteBuffer buf = ByteBuffer.allocate(length + LENGTH_PREFIX); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) (length)); buf.putShort(ENDPOINT_EXTENSIBLENOTIFS); buf.order(ByteOrder.LITTLE_ENDIAN); // ! buf.put((byte) 0x00); // ? buf.put((byte) 0x01); // add notifications buf.putInt(0x00000002); // flags - ? buf.putInt(id); buf.putInt(0x00000000); // ANCS id buf.putInt(timestamp); buf.put((byte) 0x01); // layout - ? buf.put(attributes_count); // length attributes buf.put((byte) 1); // len actions - only dismiss byte attribute_id = 0; // Encode Pascal-Style Strings if (parts != null) { for (String s : parts) { attribute_id++; if (s == null || s.equals("")) { continue; } int partlength = s.getBytes().length; if (partlength > 255) partlength = 255; buf.put(attribute_id); buf.putShort((short) partlength); buf.put(s.getBytes(), 0, partlength); } } // ACTION buf.put((byte) 0x01); // id buf.put((byte) 0x04); // dismiss action buf.put((byte) 0x01); // number attributes buf.put((byte) 0x01); // attribute id (title) String actionstring = "dismiss all"; buf.putShort((short) actionstring.length()); buf.put(actionstring.getBytes()); return buf.array(); } private byte[] encodeBlobdbNotification(int timestamp, String title, String subtitle, String body, byte type) { String[] parts = {title, subtitle, body}; int icon_id = 1; switch (type) { case NOTIFICATION_EMAIL: icon_id = 19; break; case NOTIFICATION_SMS: icon_id = 45; } // Calculate length first final short BLOBDB_LENGTH = 23; final short NOTIFICATION_PIN_LENGTH = 46; final short ACTIONS_LENGTH = 17; byte attributes_count = 1; // icon short attributes_length = 7; // icon if (parts != null) { for (String s : parts) { if (s == null || s.equals("")) { continue; } attributes_count++; attributes_length += (3 + s.getBytes().length); } } byte actions_count = 0; if (mForceProtocol) { actions_count = 1; attributes_length += ACTIONS_LENGTH; } short length = (short) (BLOBDB_LENGTH + NOTIFICATION_PIN_LENGTH + attributes_length); short pin_length = (short) (NOTIFICATION_PIN_LENGTH + attributes_length); // Encode Prefix ByteBuffer buf = ByteBuffer.allocate(length + LENGTH_PREFIX); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(length); buf.putShort(ENDPOINT_BLOBDB); buf.order(ByteOrder.LITTLE_ENDIAN); // blobdb - 23 bytes buf.put(BLOBDB_INSERT); buf.putShort((short) mRandom.nextInt()); // token buf.put(BLOBDB_NOTIFICATION); buf.put(LENGTH_UUID); // uuid length byte[] uuid_buf = new byte[LENGTH_UUID]; mRandom.nextBytes(uuid_buf); buf.put(uuid_buf); // random UUID buf.putShort(pin_length); // length of the encapsulated data // pin - 46 bytes buf.put(uuid_buf); // random UUID buf.put(uuid_buf); // parent UUID buf.putInt(timestamp); // 32-bit timestamp buf.putShort((short) 0); // duration buf.put((byte) 0x01); // type (0x01 = notification) buf.putShort((short) 0x0001); // flags 0x0001 = ? buf.put((byte) 0x04); // layout (0x04 = notification?) buf.putShort(attributes_length); // total length of all attributes and actions in bytes buf.put(attributes_count); buf.put(actions_count); byte attribute_id = 0; // Encode Pascal-Style Strings if (parts != null) { for (String s : parts) { attribute_id++; if (s == null || s.equals("")) { continue; } int partlength = s.getBytes().length; if (partlength > 255) partlength = 255; buf.put(attribute_id); buf.putShort((short) partlength); buf.put(s.getBytes(), 0, partlength); } } buf.put((byte) 4); // icon buf.putShort((short) 4); // length of int buf.putInt(icon_id); if (mForceProtocol) { // ACTION buf.put((byte) 0x01); // id buf.put((byte) 0x04); // dismiss action buf.put((byte) 0x01); // number attributes buf.put((byte) 0x01); // attribute id (title) String actionstring = "dismiss all"; buf.putShort((short) actionstring.length()); buf.put(actionstring.getBytes()); } return buf.array(); } public byte[] encodeInstallMetadata(UUID uuid, String appName, short appVersion, short sdkVersion, int iconId) { // Calculate length first final short BLOBDB_LENGTH = 23; final short METADATA_LENGTH = 126; final short length = (short) (BLOBDB_LENGTH + METADATA_LENGTH); byte[] name_buf = new byte[96]; System.arraycopy(appName.getBytes(), 0, name_buf, 0, appName.length()); ByteBuffer buf = ByteBuffer.allocate(length + LENGTH_PREFIX); // Encode Prefix buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(length); buf.putShort(ENDPOINT_BLOBDB); buf.order(ByteOrder.LITTLE_ENDIAN); // blobdb - 23 bytes buf.put(BLOBDB_INSERT); // insert buf.putShort((short) mRandom.nextInt()); // token buf.put(BLOBDB_APP); buf.put(LENGTH_UUID); buf.order(ByteOrder.BIG_ENDIAN); buf.putLong(uuid.getMostSignificantBits()); // watchapp uuid buf.putLong(uuid.getLeastSignificantBits()); buf.order(ByteOrder.LITTLE_ENDIAN); buf.putShort(METADATA_LENGTH); // length of the encapsulated data buf.order(ByteOrder.BIG_ENDIAN); buf.putLong(uuid.getMostSignificantBits()); // watchapp uuid buf.putLong(uuid.getLeastSignificantBits()); buf.order(ByteOrder.LITTLE_ENDIAN); buf.putInt(iconId); buf.putShort(appVersion); buf.putShort(sdkVersion); buf.put((byte) 0); // app_face_bgcolor buf.put((byte) 0); // app_face_template_id buf.put(name_buf); // 96 bytes return buf.array(); } public byte[] encodeAppFetchAck() { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_APPFETCH); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_APPFETCH); buf.putShort(ENDPOINT_APPFETCH); buf.put((byte) 0x01); buf.put((byte) 0x01); return buf.array(); } public byte[] encodeGetTime() { return encodeSimpleMessage(ENDPOINT_TIME, TIME_GETTIME); } @Override public byte[] encodeSetCallState(String number, String name, ServiceCommand command) { String[] parts = {number, name}; byte pebbleCmd; switch (command) { case CALL_START: pebbleCmd = PHONECONTROL_START; break; case CALL_END: pebbleCmd = PHONECONTROL_END; break; case CALL_INCOMING: pebbleCmd = PHONECONTROL_INCOMINGCALL; break; case CALL_OUTGOING: // pebbleCmd = PHONECONTROL_OUTGOINGCALL; /* * HACK/WORKAROUND for non-working outgoing call display. * Just send a incoming call command immediately followed by a start call command * This prevents vibration of the Pebble. */ byte[] callmsg = encodeMessage(ENDPOINT_PHONECONTROL, PHONECONTROL_INCOMINGCALL, 0, parts); byte[] startmsg = encodeMessage(ENDPOINT_PHONECONTROL, PHONECONTROL_START, 0, parts); byte[] msg = new byte[callmsg.length + startmsg.length]; System.arraycopy(callmsg, 0, msg, 0, callmsg.length); System.arraycopy(startmsg, 0, msg, startmsg.length, startmsg.length); return msg; // END HACK default: return null; } return encodeMessage(ENDPOINT_PHONECONTROL, pebbleCmd, 0, parts); } @Override public byte[] encodeSetMusicInfo(String artist, String album, String track) { String[] parts = {artist, album, track}; return encodeMessage(ENDPOINT_MUSICCONTROL, MUSICCONTROL_SETMUSICINFO, 0, parts); } @Override public byte[] encodeFirmwareVersionReq() { return encodeSimpleMessage(ENDPOINT_FIRMWAREVERSION, FIRMWAREVERSION_GETVERSION); } @Override public byte[] encodeAppInfoReq() { return encodeSimpleMessage(ENDPOINT_APPMANAGER, APPMANAGER_GETUUIDS); } @Override public byte[] encodeAppStart(UUID uuid) { if (isFw3x) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_APPRUNSTATE); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_APPRUNSTATE); buf.putShort(ENDPOINT_APPRUNSTATE); buf.put(APPRUNSTATE_START); buf.putLong(uuid.getMostSignificantBits()); buf.putLong(uuid.getLeastSignificantBits()); return buf.array(); } else { ArrayList<Pair<Integer, Object>> pairs = new ArrayList<>(); pairs.add(new Pair<>(1, (Object) 1)); // launch return encodeApplicationMessagePush(ENDPOINT_LAUNCHER, uuid, pairs); } } @Override public byte[] encodeAppDelete(UUID uuid) { ByteBuffer buf; if (isFw3x) { buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_REMOVEAPP_3X); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_REMOVEAPP_3X); buf.putShort(ENDPOINT_BLOBDB); buf.order(ByteOrder.LITTLE_ENDIAN); buf.put(BLOBDB_DELETE); buf.putShort((short) mRandom.nextInt()); // token buf.put(BLOBDB_APP); buf.put(LENGTH_UUID); buf.order(ByteOrder.BIG_ENDIAN); } else { buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_REMOVEAPP_2X); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_REMOVEAPP_2X); buf.putShort(ENDPOINT_APPMANAGER); buf.put(APPMANAGER_REMOVEAPP); } buf.putLong(uuid.getMostSignificantBits()); buf.putLong(uuid.getLeastSignificantBits()); return buf.array(); } private byte[] encodePhoneVersion2x(byte os) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_PHONEVERSION); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_PHONEVERSION); buf.putShort(ENDPOINT_PHONEVERSION); buf.put((byte) 0x01); buf.putInt(-1); //0xffffffff if (os == PHONEVERSION_REMOTE_OS_ANDROID) { buf.putInt(PHONEVERSION_SESSION_CAPS_GAMMARAY); } else { buf.putInt(0); } buf.putInt(PHONEVERSION_REMOTE_CAPS_SMS | PHONEVERSION_REMOTE_CAPS_TELEPHONY | os); buf.put(PHONEVERSION_APPVERSION_MAGIC); buf.put(PHONEVERSION_APPVERSION_MAJOR); buf.put(PHONEVERSION_APPVERSION_MINOR); buf.put(PHONEVERSION_APPVERSION_PATCH); return buf.array(); } private byte[] encodePhoneVersion3x(byte os) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + 25); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) 25); buf.putShort(ENDPOINT_PHONEVERSION); buf.put((byte) 0x01); buf.putInt(-1); //0xffffffff buf.putInt(0); buf.putInt(os); buf.put(PHONEVERSION_APPVERSION_MAGIC); buf.put((byte) 3); // major? buf.put((byte) 0); // minor? buf.put((byte) 1); // patch? buf.put((byte) 3); // ??? buf.put((byte) 0); // ??? buf.put((byte) 0); // ??? buf.put((byte) 0); // ??? buf.putInt(0); // ??? return buf.array(); } @Override public byte[] encodePhoneVersion(byte os) { return encodePhoneVersion3x(os); } @Override public byte[] encodeReboot() { return encodeSimpleMessage(ENDPOINT_RESET, RESET_REBOOT); } @Override public byte[] encodeScreenshotReq() { return encodeSimpleMessage(ENDPOINT_SCREENSHOT, SCREENSHOT_TAKE); } /* pebble specific install methods */ public byte[] encodeUploadStart(byte type, int app_id, int size) { short length; if (isFw3x) { length = LENGTH_UPLOADSTART_3X; type |= 0b10000000; } else { length = LENGTH_UPLOADSTART_2X; } ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + length); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(length); buf.putShort(ENDPOINT_PUTBYTES); buf.put(PUTBYTES_INIT); buf.putInt(size); buf.put(type); if (isFw3x) { buf.putInt(app_id); } else { // slot buf.put((byte) app_id); } return buf.array(); } public byte[] encodeUploadChunk(int token, byte[] buffer, int size) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_UPLOADCHUNK + size); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) (LENGTH_UPLOADCHUNK + size)); buf.putShort(ENDPOINT_PUTBYTES); buf.put(PUTBYTES_SEND); buf.putInt(token); buf.putInt(size); buf.put(buffer, 0, size); return buf.array(); } public byte[] encodeUploadCommit(int token, int crc) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_UPLOADCOMMIT); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_UPLOADCOMMIT); buf.putShort(ENDPOINT_PUTBYTES); buf.put(PUTBYTES_COMMIT); buf.putInt(token); buf.putInt(crc); return buf.array(); } public byte[] encodeUploadComplete(int token) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_UPLOADCOMPLETE); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_UPLOADCOMPLETE); buf.putShort(ENDPOINT_PUTBYTES); buf.put(PUTBYTES_COMPLETE); buf.putInt(token); return buf.array(); } public byte[] encodeUploadCancel(int token) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_UPLOADCANCEL); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_UPLOADCANCEL); buf.putShort(ENDPOINT_PUTBYTES); buf.put(PUTBYTES_ABORT); buf.putInt(token); return buf.array(); } private byte[] encodeSystemMessage(byte systemMessage) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_SYSTEMMESSAGE); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_SYSTEMMESSAGE); buf.putShort(ENDPOINT_SYSTEMMESSAGE); buf.put((byte) 0); buf.put(systemMessage); return buf.array(); } public byte[] encodeInstallFirmwareStart() { return encodeSystemMessage(SYSTEMMESSAGE_FIRMWARESTART); } public byte[] encodeInstallFirmwareComplete() { return encodeSystemMessage(SYSTEMMESSAGE_FIRMWARECOMPLETE); } public byte[] encodeInstallFirmwareError() { return encodeSystemMessage(SYSTEMMESSAGE_FIRMWAREFAIL); } public byte[] encodeAppRefresh(int index) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_REFRESHAPP); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_REFRESHAPP); buf.putShort(ENDPOINT_APPMANAGER); buf.put(APPMANAGER_REFRESHAPP); buf.putInt(index); return buf.array(); } public byte[] encodeDatalog(byte handle, byte reply) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + 2); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) 2); buf.putShort(ENDPOINT_DATALOG); buf.put(reply); buf.put(handle); return buf.array(); } byte[] encodeApplicationMessageAck(UUID uuid, byte id) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + 18); // +ACK buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) 18); buf.putShort(ENDPOINT_APPLICATIONMESSAGE); buf.put(APPLICATIONMESSAGE_ACK); buf.put(id); buf.putLong(uuid.getMostSignificantBits()); buf.putLong(uuid.getMostSignificantBits()); return buf.array(); } private static byte[] encodePing(byte command, int cookie) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_PING); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_PING); buf.putShort(ENDPOINT_PING); buf.put(command); buf.putInt(cookie); return buf.array(); } private ArrayList<Pair<Integer, Object>> decodeDict(ByteBuffer buf) { ArrayList<Pair<Integer, Object>> dict = new ArrayList<Pair<Integer, Object>>(); buf.order(ByteOrder.LITTLE_ENDIAN); byte dictSize = buf.get(); while (dictSize-- > 0) { Integer key = buf.getInt(); byte type = buf.get(); short length = buf.getShort(); // length switch (type) { case TYPE_INT32: case TYPE_UINT32: dict.add(new Pair<Integer, Object>(key, buf.getInt())); break; case TYPE_CSTRING: case TYPE_BYTEARRAY: byte[] bytes = new byte[length]; buf.get(bytes); if (type == TYPE_BYTEARRAY) { dict.add(new Pair<Integer, Object>(key, bytes)); } else { dict.add(new Pair<Integer, Object>(key, Arrays.toString(bytes))); } break; default: } } return dict; } byte[] encodeApplicationMessagePush(short endpoint, UUID uuid, ArrayList<Pair<Integer, Object>> pairs) { int length = LENGTH_UUID + 3; // UUID + (PUSH + id + length of dict) for (Pair<Integer, Object> pair : pairs) { length += 7; // key + type + length if (pair.second instanceof Integer) { length += 4; } else if (pair.second instanceof String) { length += ((String) pair.second).length() + 1; } } ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + length); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) length); buf.putShort(endpoint); // 48 or 49 buf.put(APPLICATIONMESSAGE_PUSH); buf.put(++last_id); buf.putLong(uuid.getMostSignificantBits()); buf.putLong(uuid.getLeastSignificantBits()); buf.put((byte) pairs.size()); buf.order(ByteOrder.LITTLE_ENDIAN); // Um, yes, really for (Pair<Integer, Object> pair : pairs) { buf.putInt(pair.first); if (pair.second instanceof Integer) { buf.put(TYPE_INT32); buf.putShort((short) 4); // length of int buf.putInt((int) pair.second); } else if (pair.second instanceof String) { buf.put(TYPE_CSTRING); buf.putShort((short) (((String) pair.second).length() + 1)); buf.put(((String) pair.second).getBytes()); buf.put((byte) 0); } } return buf.array(); } private static byte reverseBits(byte in) { byte out = 0; for (int i = 0; i < 8; i++) { byte bit = (byte) (in & 1); out = (byte) ((out << 1) | bit); in = (byte) (in >> 1); } return out; } private GBDeviceEventScreenshot decodeResponseScreenshot(ByteBuffer buf, int length) { if (mDevEventScreenshot == null) { byte result = buf.get(); mDevEventScreenshot = new GBDeviceEventScreenshot(); int version = buf.getInt(); if (result != 0 || version != 1) { // pebble time not yet return null; } mDevEventScreenshot.width = buf.getInt(); mDevEventScreenshot.height = buf.getInt(); mDevEventScreenshot.bpp = 1; mDevEventScreenshot.clut = new byte[]{ 0x00, 0x00, 0x00, 0x00, (byte) 0xff, (byte) 0xff, (byte) 0xff, 0x00 }; mScreenshotRemaining = (mDevEventScreenshot.width * mDevEventScreenshot.height) / 8; if (mScreenshotRemaining > 50000) { mScreenshotRemaining = -1; // ignore too big values return null; } mDevEventScreenshot.data = new byte[mScreenshotRemaining]; length -= 13; } if (mScreenshotRemaining == -1) { return null; } for (int i = 0; i < length; i++) { byte corrected = reverseBits(buf.get()); mDevEventScreenshot.data[mDevEventScreenshot.data.length - mScreenshotRemaining + i] = corrected; } mScreenshotRemaining -= length; LOG.info("Screenshot remaining bytes " + mScreenshotRemaining); if (mScreenshotRemaining == 0) { mScreenshotRemaining = -1; LOG.info("Got screenshot : " + mDevEventScreenshot.width + "x" + mDevEventScreenshot.height + " " + "pixels"); GBDeviceEventScreenshot devEventScreenshot = mDevEventScreenshot; mDevEventScreenshot = null; return devEventScreenshot; } return null; } private GBDeviceEventDismissNotification decodeResponseNotificationAction(ByteBuffer buf) { buf.order(ByteOrder.LITTLE_ENDIAN); byte command = buf.get(); if (command == 0x02) { // dismiss notification ? if (isFw3x) { buf.getLong(); // skip 8 bytes of UUID buf.getInt(); // skip 4 bytes of UUID } int id = buf.getInt(); short action = buf.getShort(); // at least the low byte should be the action - or not? if (action == 0x0001) { GBDeviceEventDismissNotification devEvtDismissNotification = new GBDeviceEventDismissNotification(); devEvtDismissNotification.notificationID = id; return devEvtDismissNotification; } LOG.info("unexpected paramerter in dismiss action: " + action); } return null; } private GBDeviceEventSendBytes decodeResponsePing(ByteBuffer buf) { byte command = buf.get(); if (command == PING_PING) { int cookie = buf.getInt(); LOG.info("Received PING - will reply"); GBDeviceEventSendBytes sendBytes = new GBDeviceEventSendBytes(); sendBytes.encodedBytes = encodePing(PING_PONG, cookie); return sendBytes; } return null; } private GBDeviceEventAppManagement decodeAppFetch(ByteBuffer buf) { byte command = buf.get(); if (command == 0x01) { long uuid_high = buf.getLong(); long uuid_low = buf.getLong(); UUID uuid = new UUID(uuid_high, uuid_low); buf.order(ByteOrder.LITTLE_ENDIAN); int app_id = buf.getInt(); GBDeviceEventAppManagement fetchRequest = new GBDeviceEventAppManagement(); fetchRequest.type = GBDeviceEventAppManagement.EventType.INSTALL; fetchRequest.event = GBDeviceEventAppManagement.Event.REQUEST; fetchRequest.token = app_id; fetchRequest.uuid = uuid; return fetchRequest; } return null; } @Override public GBDeviceEvent decodeResponse(byte[] responseData) { ByteBuffer buf = ByteBuffer.wrap(responseData); buf.order(ByteOrder.BIG_ENDIAN); short length = buf.getShort(); short endpoint = buf.getShort(); GBDeviceEvent devEvt = null; byte pebbleCmd = -1; switch (endpoint) { case ENDPOINT_MUSICCONTROL: pebbleCmd = buf.get(); GBDeviceEventMusicControl musicCmd = new GBDeviceEventMusicControl(); switch (pebbleCmd) { case MUSICCONTROL_NEXT: musicCmd.event = GBDeviceEventMusicControl.Event.NEXT; break; case MUSICCONTROL_PREVIOUS: musicCmd.event = GBDeviceEventMusicControl.Event.PREVIOUS; break; case MUSICCONTROL_PLAY: musicCmd.event = GBDeviceEventMusicControl.Event.PLAY; break; case MUSICCONTROL_PAUSE: musicCmd.event = GBDeviceEventMusicControl.Event.PAUSE; break; case MUSICCONTROL_PLAYPAUSE: musicCmd.event = GBDeviceEventMusicControl.Event.PLAYPAUSE; break; case MUSICCONTROL_VOLUMEUP: musicCmd.event = GBDeviceEventMusicControl.Event.VOLUMEUP; break; case MUSICCONTROL_VOLUMEDOWN: musicCmd.event = GBDeviceEventMusicControl.Event.VOLUMEDOWN; break; default: break; } devEvt = musicCmd; break; case ENDPOINT_PHONECONTROL: pebbleCmd = buf.get(); GBDeviceEventCallControl callCmd = new GBDeviceEventCallControl(); switch (pebbleCmd) { case PHONECONTROL_HANGUP: callCmd.event = GBDeviceEventCallControl.Event.END; break; default: LOG.info("Unknown PHONECONTROL event" + pebbleCmd); break; } devEvt = callCmd; break; case ENDPOINT_FIRMWAREVERSION: pebbleCmd = buf.get(); GBDeviceEventVersionInfo versionCmd = new GBDeviceEventVersionInfo(); buf.getInt(); // skip byte[] tmp = new byte[32]; buf.get(tmp, 0, 32); versionCmd.fwVersion = new String(tmp).trim(); if (versionCmd.fwVersion.startsWith("v3")) { isFw3x = true; } buf.get(tmp, 0, 9); Byte hwRev = buf.get(); if (hwRev > 0 && hwRev < hwRevisions.length) { versionCmd.hwVersion = hwRevisions[hwRev]; } else if (hwRev == -3) { // basalt emulator versionCmd.hwVersion = "dvt"; } devEvt = versionCmd; break; case ENDPOINT_APPMANAGER: pebbleCmd = buf.get(); switch (pebbleCmd) { case APPMANAGER_GETAPPBANKSTATUS: GBDeviceEventAppInfo appInfoCmd = new GBDeviceEventAppInfo(); int slotCount = buf.getInt(); int slotsUsed = buf.getInt(); byte[] appName = new byte[32]; byte[] appCreator = new byte[32]; appInfoCmd.apps = new GBDeviceApp[slotsUsed]; boolean[] slotInUse = new boolean[slotCount]; for (int i = 0; i < slotsUsed; i++) { int id = buf.getInt(); int index = buf.getInt(); slotInUse[index] = true; buf.get(appName, 0, 32); buf.get(appCreator, 0, 32); int flags = buf.getInt(); GBDeviceApp.Type appType; if ((flags & 16) == 16) { // FIXME: verify this assumption appType = GBDeviceApp.Type.APP_ACTIVITYTRACKER; } else if ((flags & 1) == 1) { // FIXME: verify this assumption appType = GBDeviceApp.Type.WATCHFACE; } else { appType = GBDeviceApp.Type.APP_GENERIC; } Short appVersion = buf.getShort(); appInfoCmd.apps[i] = new GBDeviceApp(tmpUUIDS.get(i), new String(appName).trim(), new String(appCreator).trim(), appVersion.toString(), appType); } for (int i = 0; i < slotCount; i++) { if (!slotInUse[i]) { appInfoCmd.freeSlot = (byte) i; LOG.info("found free slot " + i); break; } } devEvt = appInfoCmd; break; case APPMANAGER_GETUUIDS: GBDeviceEventSendBytes sendBytes = new GBDeviceEventSendBytes(); sendBytes.encodedBytes = encodeSimpleMessage(ENDPOINT_APPMANAGER, APPMANAGER_GETAPPBANKSTATUS); devEvt = sendBytes; tmpUUIDS.clear(); slotsUsed = buf.getInt(); for (int i = 0; i < slotsUsed; i++) { long uuid_high = buf.getLong(); long uuid_low = buf.getLong(); UUID uuid = new UUID(uuid_high, uuid_low); LOG.info("found uuid: " + uuid); tmpUUIDS.add(uuid); } break; case APPMANAGER_REMOVEAPP: GBDeviceEventAppManagement deleteRes = new GBDeviceEventAppManagement(); deleteRes.type = GBDeviceEventAppManagement.EventType.DELETE; int result = buf.getInt(); switch (result) { case APPMANAGER_RES_SUCCESS: deleteRes.event = GBDeviceEventAppManagement.Event.SUCCESS; break; default: deleteRes.event = GBDeviceEventAppManagement.Event.FAILURE; break; } devEvt = deleteRes; break; default: LOG.info("Unknown APPMANAGER event" + pebbleCmd); break; } break; case ENDPOINT_PUTBYTES: pebbleCmd = buf.get(); GBDeviceEventAppManagement installRes = new GBDeviceEventAppManagement(); installRes.type = GBDeviceEventAppManagement.EventType.INSTALL; switch (pebbleCmd) { case PUTBYTES_INIT: installRes.token = buf.getInt(); installRes.event = GBDeviceEventAppManagement.Event.SUCCESS; break; default: installRes.token = buf.getInt(); installRes.event = GBDeviceEventAppManagement.Event.FAILURE; break; } devEvt = installRes; break; case ENDPOINT_APPLICATIONMESSAGE: pebbleCmd = buf.get(); last_id = buf.get(); long uuid_high = buf.getLong(); long uuid_low = buf.getLong(); switch (pebbleCmd) { case APPLICATIONMESSAGE_PUSH: UUID uuid = new UUID(uuid_high, uuid_low); LOG.info("got APPLICATIONMESSAGE PUSH from UUID " + uuid); if (WeatherNeatSupport.uuid.equals(uuid)) { ArrayList<Pair<Integer, Object>> dict = decodeDict(buf); devEvt = mWeatherNeatSupport.handleMessage(dict); } else if (MorpheuzSupport.uuid.equals(uuid)) { ArrayList<Pair<Integer, Object>> dict = decodeDict(buf); devEvt = mMorpheuzSupport.handleMessage(dict); } else if (GadgetbridgePblSupport.uuid.equals(uuid)) { ArrayList<Pair<Integer, Object>> dict = decodeDict(buf); devEvt = mGadgetbridgePblSupport.handleMessage(dict); } break; case APPLICATIONMESSAGE_ACK: LOG.info("got APPLICATIONMESSAGE ACK"); break; case APPLICATIONMESSAGE_NACK: LOG.info("got APPLICATIONMESSAGE NACK"); break; case APPLICATIONMESSAGE_REQUEST: LOG.info("got APPLICATIONMESSAGE REQUEST"); break; default: break; } break; case ENDPOINT_DATALOG: pebbleCmd = buf.get(); if (pebbleCmd != DATALOG_TIMEOUT) { byte id = buf.get(); LOG.info("DATALOG id " + id + " - sending ACK (0x85)"); GBDeviceEventSendBytes sendBytes = new GBDeviceEventSendBytes(); sendBytes.encodedBytes = encodeDatalog(id, DATALOG_ACK); devEvt = sendBytes; } else { LOG.info("DATALOG TIMEOUT - ignoring"); } break; case ENDPOINT_PHONEVERSION: pebbleCmd = buf.get(); switch (pebbleCmd) { case PHONEVERSION_REQUEST: LOG.info("Pebble asked for Phone/App Version - repLYING!"); GBDeviceEventSendBytes sendBytes = new GBDeviceEventSendBytes(); sendBytes.encodedBytes = encodePhoneVersion(PHONEVERSION_REMOTE_OS_ANDROID); devEvt = sendBytes; break; default: break; } break; case ENDPOINT_SCREENSHOT: devEvt = decodeResponseScreenshot(buf, length); break; case ENDPOINT_EXTENSIBLENOTIFS: case ENDPOINT_NOTIFICATIONACTION: devEvt = decodeResponseNotificationAction(buf); break; case ENDPOINT_PING: devEvt = decodeResponsePing(buf); break; case ENDPOINT_APPFETCH: devEvt = decodeAppFetch(buf); break; default: break; } return devEvt; } public void setForceProtocol(boolean force) { LOG.info("setting force protocol to " + force); mForceProtocol = force; } }
app/src/main/java/nodomain/freeyourgadget/gadgetbridge/service/devices/pebble/PebbleProtocol.java
package nodomain.freeyourgadget.gadgetbridge.service.devices.pebble; import android.util.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.Arrays; import java.util.Random; import java.util.SimpleTimeZone; import java.util.UUID; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEvent; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventAppInfo; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventAppManagement; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventCallControl; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventDismissNotification; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventMusicControl; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventScreenshot; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventSendBytes; import nodomain.freeyourgadget.gadgetbridge.deviceevents.GBDeviceEventVersionInfo; import nodomain.freeyourgadget.gadgetbridge.impl.GBDeviceApp; import nodomain.freeyourgadget.gadgetbridge.model.ServiceCommand; import nodomain.freeyourgadget.gadgetbridge.service.bt.GBDeviceProtocol; public class PebbleProtocol extends GBDeviceProtocol { private static final Logger LOG = LoggerFactory.getLogger(PebbleProtocol.class); static final short ENDPOINT_TIME = 11; static final short ENDPOINT_FIRMWAREVERSION = 16; public static final short ENDPOINT_PHONEVERSION = 17; static final short ENDPOINT_SYSTEMMESSAGE = 18; static final short ENDPOINT_MUSICCONTROL = 32; static final short ENDPOINT_PHONECONTROL = 33; static final short ENDPOINT_APPLICATIONMESSAGE = 48; static final short ENDPOINT_LAUNCHER = 49; static final short ENDPOINT_APPRUNSTATE = 52; static final short ENDPOINT_LOGS = 2000; static final short ENDPOINT_PING = 2001; static final short ENDPOINT_LOGDUMP = 2002; static final short ENDPOINT_RESET = 2003; static final short ENDPOINT_APP = 2004; static final short ENDPOINT_APPLOGS = 2006; static final short ENDPOINT_NOTIFICATION = 3000; static final short ENDPOINT_EXTENSIBLENOTIFS = 3010; static final short ENDPOINT_RESOURCE = 4000; static final short ENDPOINT_SYSREG = 5000; static final short ENDPOINT_FCTREG = 5001; static final short ENDPOINT_APPMANAGER = 6000; static final short ENDPOINT_APPFETCH = 6001; // 3.x only public static final short ENDPOINT_DATALOG = 6778; static final short ENDPOINT_RUNKEEPER = 7000; static final short ENDPOINT_SCREENSHOT = 8000; static final short ENDPOINT_NOTIFICATIONACTION = 11440; // 3.x only, TODO: find a better name static final short ENDPOINT_BLOBDB = (short) 45531; // 3.x only static final short ENDPOINT_PUTBYTES = (short) 48879; static final byte APPRUNSTATE_START = 1; static final byte BLOBDB_INSERT = 1; static final byte BLOBDB_DELETE = 4; static final byte BLOBDB_CLEAR = 5; static final byte BLOBDB_PIN = 1; static final byte BLOBDB_APP = 2; static final byte BLOBDB_REMINDER = 3; static final byte BLOBDB_NOTIFICATION = 4; static final byte NOTIFICATION_EMAIL = 0; static final byte NOTIFICATION_SMS = 1; static final byte NOTIFICATION_TWITTER = 2; static final byte NOTIFICATION_FACEBOOK = 3; static final byte PHONECONTROL_ANSWER = 1; static final byte PHONECONTROL_HANGUP = 2; static final byte PHONECONTROL_GETSTATE = 3; static final byte PHONECONTROL_INCOMINGCALL = 4; static final byte PHONECONTROL_OUTGOINGCALL = 5; static final byte PHONECONTROL_MISSEDCALL = 6; static final byte PHONECONTROL_RING = 7; static final byte PHONECONTROL_START = 8; static final byte PHONECONTROL_END = 9; static final byte MUSICCONTROL_SETMUSICINFO = 16; static final byte MUSICCONTROL_PLAYPAUSE = 1; static final byte MUSICCONTROL_PAUSE = 2; static final byte MUSICCONTROL_PLAY = 3; static final byte MUSICCONTROL_NEXT = 4; static final byte MUSICCONTROL_PREVIOUS = 5; static final byte MUSICCONTROL_VOLUMEUP = 6; static final byte MUSICCONTROL_VOLUMEDOWN = 7; static final byte MUSICCONTROL_GETNOWPLAYING = 7; static final byte TIME_GETTIME = 0; static final byte TIME_SETTIME = 2; static final byte FIRMWAREVERSION_GETVERSION = 0; static final byte APPMANAGER_GETAPPBANKSTATUS = 1; static final byte APPMANAGER_REMOVEAPP = 2; static final byte APPMANAGER_REFRESHAPP = 3; static final byte APPMANAGER_GETUUIDS = 5; static final int APPMANAGER_RES_SUCCESS = 1; static final byte APPLICATIONMESSAGE_PUSH = 1; static final byte APPLICATIONMESSAGE_REQUEST = 2; static final byte APPLICATIONMESSAGE_ACK = (byte) 0xff; static final byte APPLICATIONMESSAGE_NACK = (byte) 0x7f; static final byte DATALOG_CLOSE = (byte) 0x03; static final byte DATALOG_TIMEOUT = 0x07; static final byte DATALOG_ACK = (byte) 0x85; static final byte DATALOG_NACK = (byte) 0x86; static final byte PING_PING = 0; static final byte PING_PONG = 1; static final byte PUTBYTES_INIT = 1; static final byte PUTBYTES_SEND = 2; static final byte PUTBYTES_COMMIT = 3; static final byte PUTBYTES_ABORT = 4; static final byte PUTBYTES_COMPLETE = 5; public static final byte PUTBYTES_TYPE_FIRMWARE = 1; public static final byte PUTBYTES_TYPE_RECOVERY = 2; public static final byte PUTBYTES_TYPE_SYSRESOURCES = 3; public static final byte PUTBYTES_TYPE_RESOURCES = 4; public static final byte PUTBYTES_TYPE_BINARY = 5; static final byte PUTBYTES_TYPE_FILE = 6; public static final byte PUTBYTES_TYPE_WORKER = 7; static final byte RESET_REBOOT = 0; static final byte SCREENSHOT_TAKE = 0; static final byte SYSTEMMESSAGE_FIRMWARESTART = 1; static final byte SYSTEMMESSAGE_FIRMWARECOMPLETE = 2; static final byte SYSTEMMESSAGE_FIRMWAREFAIL = 3; static final byte PHONEVERSION_REQUEST = 0; static final byte PHONEVERSION_APPVERSION_MAGIC = 2; // increase this if pebble complains static final byte PHONEVERSION_APPVERSION_MAJOR = 2; static final byte PHONEVERSION_APPVERSION_MINOR = 3; static final byte PHONEVERSION_APPVERSION_PATCH = 0; static final int PHONEVERSION_SESSION_CAPS_GAMMARAY = 0x80000000; static final int PHONEVERSION_REMOTE_CAPS_TELEPHONY = 0x00000010; static final int PHONEVERSION_REMOTE_CAPS_SMS = 0x00000020; static final int PHONEVERSION_REMOTE_CAPS_GPS = 0x00000040; static final int PHONEVERSION_REMOTE_CAPS_BTLE = 0x00000080; static final int PHONEVERSION_REMOTE_CAPS_REARCAMERA = 0x00000100; static final int PHONEVERSION_REMOTE_CAPS_ACCEL = 0x00000200; static final int PHONEVERSION_REMOTE_CAPS_GYRO = 0x00000400; static final int PHONEVERSION_REMOTE_CAPS_COMPASS = 0x00000800; static final byte PHONEVERSION_REMOTE_OS_UNKNOWN = 0; static final byte PHONEVERSION_REMOTE_OS_IOS = 1; static final byte PHONEVERSION_REMOTE_OS_ANDROID = 2; static final byte PHONEVERSION_REMOTE_OS_OSX = 3; static final byte PHONEVERSION_REMOTE_OS_LINUX = 4; static final byte PHONEVERSION_REMOTE_OS_WINDOWS = 5; static final byte TYPE_BYTEARRAY = 0; static final byte TYPE_CSTRING = 1; static final byte TYPE_UINT32 = 2; static final byte TYPE_INT32 = 3; static final short LENGTH_PREFIX = 4; static final short LENGTH_SIMPLEMESSAGE = 1; static final short LENGTH_APPFETCH = 2; static final short LENGTH_APPRUNSTATE = 17; static final short LENGTH_PING = 5; static final short LENGTH_PHONEVERSION = 17; static final short LENGTH_REMOVEAPP_2X = 17; static final short LENGTH_REMOVEAPP_3X = 21; static final short LENGTH_REFRESHAPP = 5; static final short LENGTH_SETTIME = 5; static final short LENGTH_SYSTEMMESSAGE = 2; static final short LENGTH_UPLOADSTART_2X = 7; static final short LENGTH_UPLOADSTART_3X = 10; static final short LENGTH_UPLOADCHUNK = 9; static final short LENGTH_UPLOADCOMMIT = 9; static final short LENGTH_UPLOADCOMPLETE = 5; static final short LENGTH_UPLOADCANCEL = 5; static final byte LENGTH_UUID = 16; private static final String[] hwRevisions = {"unknown", "ev1", "ev2", "ev2_3", "ev2_4", "v1_5", "v2_0", "evt2", "dvt"}; private static Random mRandom = new Random(); boolean isFw3x = false; boolean mForceProtocol = false; GBDeviceEventScreenshot mDevEventScreenshot = null; int mScreenshotRemaining = -1; byte last_id = -1; private ArrayList<UUID> tmpUUIDS = new ArrayList<>(); private MorpheuzSupport mMorpheuzSupport = new MorpheuzSupport(PebbleProtocol.this); private WeatherNeatSupport mWeatherNeatSupport = new WeatherNeatSupport(PebbleProtocol.this); private GadgetbridgePblSupport mGadgetbridgePblSupport = new GadgetbridgePblSupport(PebbleProtocol.this); private static byte[] encodeSimpleMessage(short endpoint, byte command) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_SIMPLEMESSAGE); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_SIMPLEMESSAGE); buf.putShort(endpoint); buf.put(command); return buf.array(); } private static byte[] encodeMessage(short endpoint, byte type, int cookie, String[] parts) { // Calculate length first int length = LENGTH_PREFIX + 1; if (parts != null) { for (String s : parts) { if (s == null || s.equals("")) { length++; // encode null or empty strings as 0x00 later continue; } length += (1 + s.getBytes().length); } } if (endpoint == ENDPOINT_PHONECONTROL) { length += 4; //for cookie; } // Encode Prefix ByteBuffer buf = ByteBuffer.allocate(length); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) (length - LENGTH_PREFIX)); buf.putShort(endpoint); buf.put(type); if (endpoint == ENDPOINT_PHONECONTROL) { buf.putInt(cookie); } // Encode Pascal-Style Strings if (parts != null) { for (String s : parts) { if (s == null || s.equals("")) { //buf.put((byte)0x01); buf.put((byte) 0x00); continue; } int partlength = s.getBytes().length; if (partlength > 255) partlength = 255; buf.put((byte) partlength); buf.put(s.getBytes(), 0, partlength); } } return buf.array(); } private byte[] encodeNotification(int id, String title, String subtitle, String body, byte type) { Long ts = System.currentTimeMillis(); ts += (SimpleTimeZone.getDefault().getOffset(ts)); ts /= 1000; if (isFw3x) { // 3.x notification return encodeBlobdbNotification((int) (ts & 0xffffffff), title, subtitle, body, type); } else if (mForceProtocol || type != NOTIFICATION_EMAIL) { // 2.x notification return encodeExtensibleNotification(id, (int) (ts & 0xffffffff), title, subtitle, body, type); } else { // 1.x notification on FW 2.X String[] parts = {title, body, ts.toString(), subtitle}; return encodeMessage(ENDPOINT_NOTIFICATION, type, 0, parts); } } @Override public byte[] encodeSMS(String from, String body) { return encodeNotification(mRandom.nextInt(), from, null, body, NOTIFICATION_SMS); } @Override public byte[] encodeEmail(String from, String subject, String body) { return encodeNotification(mRandom.nextInt(), from, subject, body, NOTIFICATION_EMAIL); } @Override public byte[] encodeGenericNotification(String title, String details) { return encodeNotification(mRandom.nextInt(), title, null, details, NOTIFICATION_SMS); } @Override public byte[] encodeSetTime(long ts) { if (ts == -1) { ts = System.currentTimeMillis(); ts += (SimpleTimeZone.getDefault().getOffset(ts)); } ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_SETTIME); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_SETTIME); buf.putShort(ENDPOINT_TIME); buf.put(TIME_SETTIME); buf.putInt((int) (ts / 1000)); return buf.array(); } @Override public byte[] encodeFindDevice(boolean start) { return encodeSetCallState("Where are you?", "Gadgetbridge", start ? ServiceCommand.CALL_INCOMING : ServiceCommand.CALL_END); } private static byte[] encodeExtensibleNotification(int id, int timestamp, String title, String subtitle, String body, byte type) { String[] parts = {title, subtitle, body}; // Calculate length first byte attributes_count = 0; int length = 21 + 17; if (parts != null) { for (String s : parts) { if (s == null || s.equals("")) { continue; } attributes_count++; length += (3 + s.getBytes().length); } } // Encode Prefix ByteBuffer buf = ByteBuffer.allocate(length + LENGTH_PREFIX); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) (length)); buf.putShort(ENDPOINT_EXTENSIBLENOTIFS); buf.order(ByteOrder.LITTLE_ENDIAN); // ! buf.put((byte) 0x00); // ? buf.put((byte) 0x01); // add notifications buf.putInt(0x00000002); // flags - ? buf.putInt(id); buf.putInt(0x00000000); // ANCS id buf.putInt(timestamp); buf.put((byte) 0x01); // layout - ? buf.put(attributes_count); // length attributes buf.put((byte) 1); // len actions - only dismiss byte attribute_id = 0; // Encode Pascal-Style Strings if (parts != null) { for (String s : parts) { attribute_id++; if (s == null || s.equals("")) { continue; } int partlength = s.getBytes().length; if (partlength > 255) partlength = 255; buf.put(attribute_id); buf.putShort((short) partlength); buf.put(s.getBytes(), 0, partlength); } } // ACTION buf.put((byte) 0x01); // id buf.put((byte) 0x04); // dismiss action buf.put((byte) 0x01); // number attributes buf.put((byte) 0x01); // attribute id (title) String actionstring = "dismiss all"; buf.putShort((short) actionstring.length()); buf.put(actionstring.getBytes()); return buf.array(); } private byte[] encodeBlobdbNotification(int timestamp, String title, String subtitle, String body, byte type) { String[] parts = {title, subtitle, body}; int icon_id = 1; switch (type) { case NOTIFICATION_EMAIL: icon_id = 19; break; case NOTIFICATION_SMS: icon_id = 45; } // Calculate length first final short BLOBDB_LENGTH = 23; final short NOTIFICATION_PIN_LENGTH = 46; final short ACTIONS_LENGTH = 17; byte attributes_count = 1; // icon short attributes_length = 7; // icon if (parts != null) { for (String s : parts) { if (s == null || s.equals("")) { continue; } attributes_count++; attributes_length += (3 + s.getBytes().length); } } byte actions_count = 0; if (mForceProtocol) { actions_count = 1; attributes_length += ACTIONS_LENGTH; } short length = (short) (BLOBDB_LENGTH + NOTIFICATION_PIN_LENGTH + attributes_length); short pin_length = (short) (NOTIFICATION_PIN_LENGTH + attributes_length); // Encode Prefix ByteBuffer buf = ByteBuffer.allocate(length + LENGTH_PREFIX); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(length); buf.putShort(ENDPOINT_BLOBDB); buf.order(ByteOrder.LITTLE_ENDIAN); // blobdb - 23 bytes buf.put(BLOBDB_INSERT); buf.putShort((short) mRandom.nextInt()); // token buf.put(BLOBDB_NOTIFICATION); buf.put(LENGTH_UUID); // uuid length byte[] uuid_buf = new byte[LENGTH_UUID]; mRandom.nextBytes(uuid_buf); buf.put(uuid_buf); // random UUID buf.putShort(pin_length); // length of the encapsulated data // pin - 46 bytes buf.put(uuid_buf); // random UUID buf.put(uuid_buf); // parent UUID buf.putInt(timestamp); // 32-bit timestamp buf.putShort((short) 0); // duration buf.put((byte) 0x01); // type (0x01 = notification) buf.putShort((short) 0x0010); // flags 0x0010 = read? buf.put((byte) 0x01); // layout (0x01 = default?) buf.putShort(attributes_length); // total length of all attributes and actions in bytes buf.put(attributes_count); buf.put(actions_count); byte attribute_id = 0; // Encode Pascal-Style Strings if (parts != null) { for (String s : parts) { attribute_id++; if (s == null || s.equals("")) { continue; } int partlength = s.getBytes().length; if (partlength > 255) partlength = 255; buf.put(attribute_id); buf.putShort((short) partlength); buf.put(s.getBytes(), 0, partlength); } } buf.put((byte) 4); // icon buf.putShort((short) 4); // length of int buf.putInt(icon_id); if (mForceProtocol) { // ACTION buf.put((byte) 0x01); // id buf.put((byte) 0x04); // dismiss action buf.put((byte) 0x01); // number attributes buf.put((byte) 0x01); // attribute id (title) String actionstring = "dismiss all"; buf.putShort((short) actionstring.length()); buf.put(actionstring.getBytes()); } return buf.array(); } public byte[] encodeInstallMetadata(UUID uuid, String appName, short appVersion, short sdkVersion, int iconId) { // Calculate length first final short BLOBDB_LENGTH = 23; final short METADATA_LENGTH = 126; final short length = (short) (BLOBDB_LENGTH + METADATA_LENGTH); byte[] name_buf = new byte[96]; System.arraycopy(appName.getBytes(), 0, name_buf, 0, appName.length()); ByteBuffer buf = ByteBuffer.allocate(length + LENGTH_PREFIX); // Encode Prefix buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(length); buf.putShort(ENDPOINT_BLOBDB); buf.order(ByteOrder.LITTLE_ENDIAN); // blobdb - 23 bytes buf.put(BLOBDB_INSERT); // insert buf.putShort((short) mRandom.nextInt()); // token buf.put(BLOBDB_APP); buf.put(LENGTH_UUID); buf.order(ByteOrder.BIG_ENDIAN); buf.putLong(uuid.getMostSignificantBits()); // watchapp uuid buf.putLong(uuid.getLeastSignificantBits()); buf.order(ByteOrder.LITTLE_ENDIAN); buf.putShort(METADATA_LENGTH); // length of the encapsulated data buf.order(ByteOrder.BIG_ENDIAN); buf.putLong(uuid.getMostSignificantBits()); // watchapp uuid buf.putLong(uuid.getLeastSignificantBits()); buf.order(ByteOrder.LITTLE_ENDIAN); buf.putInt(iconId); buf.putShort(appVersion); buf.putShort(sdkVersion); buf.put((byte) 0); // app_face_bgcolor buf.put((byte) 0); // app_face_template_id buf.put(name_buf); // 96 bytes return buf.array(); } public byte[] encodeAppFetchAck() { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_APPFETCH); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_APPFETCH); buf.putShort(ENDPOINT_APPFETCH); buf.put((byte) 0x01); buf.put((byte) 0x01); return buf.array(); } public byte[] encodeGetTime() { return encodeSimpleMessage(ENDPOINT_TIME, TIME_GETTIME); } @Override public byte[] encodeSetCallState(String number, String name, ServiceCommand command) { String[] parts = {number, name}; byte pebbleCmd; switch (command) { case CALL_START: pebbleCmd = PHONECONTROL_START; break; case CALL_END: pebbleCmd = PHONECONTROL_END; break; case CALL_INCOMING: pebbleCmd = PHONECONTROL_INCOMINGCALL; break; case CALL_OUTGOING: // pebbleCmd = PHONECONTROL_OUTGOINGCALL; /* * HACK/WORKAROUND for non-working outgoing call display. * Just send a incoming call command immediately followed by a start call command * This prevents vibration of the Pebble. */ byte[] callmsg = encodeMessage(ENDPOINT_PHONECONTROL, PHONECONTROL_INCOMINGCALL, 0, parts); byte[] startmsg = encodeMessage(ENDPOINT_PHONECONTROL, PHONECONTROL_START, 0, parts); byte[] msg = new byte[callmsg.length + startmsg.length]; System.arraycopy(callmsg, 0, msg, 0, callmsg.length); System.arraycopy(startmsg, 0, msg, startmsg.length, startmsg.length); return msg; // END HACK default: return null; } return encodeMessage(ENDPOINT_PHONECONTROL, pebbleCmd, 0, parts); } @Override public byte[] encodeSetMusicInfo(String artist, String album, String track) { String[] parts = {artist, album, track}; return encodeMessage(ENDPOINT_MUSICCONTROL, MUSICCONTROL_SETMUSICINFO, 0, parts); } @Override public byte[] encodeFirmwareVersionReq() { return encodeSimpleMessage(ENDPOINT_FIRMWAREVERSION, FIRMWAREVERSION_GETVERSION); } @Override public byte[] encodeAppInfoReq() { return encodeSimpleMessage(ENDPOINT_APPMANAGER, APPMANAGER_GETUUIDS); } @Override public byte[] encodeAppStart(UUID uuid) { if (isFw3x) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_APPRUNSTATE); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_APPRUNSTATE); buf.putShort(ENDPOINT_APPRUNSTATE); buf.put(APPRUNSTATE_START); buf.putLong(uuid.getMostSignificantBits()); buf.putLong(uuid.getLeastSignificantBits()); return buf.array(); } else { ArrayList<Pair<Integer, Object>> pairs = new ArrayList<>(); pairs.add(new Pair<>(1, (Object) 1)); // launch return encodeApplicationMessagePush(ENDPOINT_LAUNCHER, uuid, pairs); } } @Override public byte[] encodeAppDelete(UUID uuid) { ByteBuffer buf; if (isFw3x) { buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_REMOVEAPP_3X); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_REMOVEAPP_3X); buf.putShort(ENDPOINT_BLOBDB); buf.order(ByteOrder.LITTLE_ENDIAN); buf.put(BLOBDB_DELETE); buf.putShort((short) mRandom.nextInt()); // token buf.put(BLOBDB_APP); buf.put(LENGTH_UUID); buf.order(ByteOrder.BIG_ENDIAN); } else { buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_REMOVEAPP_2X); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_REMOVEAPP_2X); buf.putShort(ENDPOINT_APPMANAGER); buf.put(APPMANAGER_REMOVEAPP); } buf.putLong(uuid.getMostSignificantBits()); buf.putLong(uuid.getLeastSignificantBits()); return buf.array(); } private byte[] encodePhoneVersion2x(byte os) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_PHONEVERSION); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_PHONEVERSION); buf.putShort(ENDPOINT_PHONEVERSION); buf.put((byte) 0x01); buf.putInt(-1); //0xffffffff if (os == PHONEVERSION_REMOTE_OS_ANDROID) { buf.putInt(PHONEVERSION_SESSION_CAPS_GAMMARAY); } else { buf.putInt(0); } buf.putInt(PHONEVERSION_REMOTE_CAPS_SMS | PHONEVERSION_REMOTE_CAPS_TELEPHONY | os); buf.put(PHONEVERSION_APPVERSION_MAGIC); buf.put(PHONEVERSION_APPVERSION_MAJOR); buf.put(PHONEVERSION_APPVERSION_MINOR); buf.put(PHONEVERSION_APPVERSION_PATCH); return buf.array(); } private byte[] encodePhoneVersion3x(byte os) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + 25); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) 25); buf.putShort(ENDPOINT_PHONEVERSION); buf.put((byte) 0x01); buf.putInt(-1); //0xffffffff buf.putInt(0); buf.putInt(os); buf.put(PHONEVERSION_APPVERSION_MAGIC); buf.put((byte) 3); // major? buf.put((byte) 0); // minor? buf.put((byte) 1); // patch? buf.put((byte) 3); // ??? buf.put((byte) 0); // ??? buf.put((byte) 0); // ??? buf.put((byte) 0); // ??? buf.putInt(0); // ??? return buf.array(); } @Override public byte[] encodePhoneVersion(byte os) { return encodePhoneVersion3x(os); } @Override public byte[] encodeReboot() { return encodeSimpleMessage(ENDPOINT_RESET, RESET_REBOOT); } @Override public byte[] encodeScreenshotReq() { return encodeSimpleMessage(ENDPOINT_SCREENSHOT, SCREENSHOT_TAKE); } /* pebble specific install methods */ public byte[] encodeUploadStart(byte type, int app_id, int size) { short length; if (isFw3x) { length = LENGTH_UPLOADSTART_3X; type |= 0b10000000; } else { length = LENGTH_UPLOADSTART_2X; } ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + length); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(length); buf.putShort(ENDPOINT_PUTBYTES); buf.put(PUTBYTES_INIT); buf.putInt(size); buf.put(type); if (isFw3x) { buf.putInt(app_id); } else { // slot buf.put((byte) app_id); } return buf.array(); } public byte[] encodeUploadChunk(int token, byte[] buffer, int size) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_UPLOADCHUNK + size); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) (LENGTH_UPLOADCHUNK + size)); buf.putShort(ENDPOINT_PUTBYTES); buf.put(PUTBYTES_SEND); buf.putInt(token); buf.putInt(size); buf.put(buffer, 0, size); return buf.array(); } public byte[] encodeUploadCommit(int token, int crc) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_UPLOADCOMMIT); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_UPLOADCOMMIT); buf.putShort(ENDPOINT_PUTBYTES); buf.put(PUTBYTES_COMMIT); buf.putInt(token); buf.putInt(crc); return buf.array(); } public byte[] encodeUploadComplete(int token) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_UPLOADCOMPLETE); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_UPLOADCOMPLETE); buf.putShort(ENDPOINT_PUTBYTES); buf.put(PUTBYTES_COMPLETE); buf.putInt(token); return buf.array(); } public byte[] encodeUploadCancel(int token) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_UPLOADCANCEL); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_UPLOADCANCEL); buf.putShort(ENDPOINT_PUTBYTES); buf.put(PUTBYTES_ABORT); buf.putInt(token); return buf.array(); } private byte[] encodeSystemMessage(byte systemMessage) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_SYSTEMMESSAGE); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_SYSTEMMESSAGE); buf.putShort(ENDPOINT_SYSTEMMESSAGE); buf.put((byte) 0); buf.put(systemMessage); return buf.array(); } public byte[] encodeInstallFirmwareStart() { return encodeSystemMessage(SYSTEMMESSAGE_FIRMWARESTART); } public byte[] encodeInstallFirmwareComplete() { return encodeSystemMessage(SYSTEMMESSAGE_FIRMWARECOMPLETE); } public byte[] encodeInstallFirmwareError() { return encodeSystemMessage(SYSTEMMESSAGE_FIRMWAREFAIL); } public byte[] encodeAppRefresh(int index) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_REFRESHAPP); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_REFRESHAPP); buf.putShort(ENDPOINT_APPMANAGER); buf.put(APPMANAGER_REFRESHAPP); buf.putInt(index); return buf.array(); } public byte[] encodeDatalog(byte handle, byte reply) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + 2); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) 2); buf.putShort(ENDPOINT_DATALOG); buf.put(reply); buf.put(handle); return buf.array(); } byte[] encodeApplicationMessageAck(UUID uuid, byte id) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + 18); // +ACK buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) 18); buf.putShort(ENDPOINT_APPLICATIONMESSAGE); buf.put(APPLICATIONMESSAGE_ACK); buf.put(id); buf.putLong(uuid.getMostSignificantBits()); buf.putLong(uuid.getMostSignificantBits()); return buf.array(); } private static byte[] encodePing(byte command, int cookie) { ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + LENGTH_PING); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort(LENGTH_PING); buf.putShort(ENDPOINT_PING); buf.put(command); buf.putInt(cookie); return buf.array(); } private ArrayList<Pair<Integer, Object>> decodeDict(ByteBuffer buf) { ArrayList<Pair<Integer, Object>> dict = new ArrayList<Pair<Integer, Object>>(); buf.order(ByteOrder.LITTLE_ENDIAN); byte dictSize = buf.get(); while (dictSize-- > 0) { Integer key = buf.getInt(); byte type = buf.get(); short length = buf.getShort(); // length switch (type) { case TYPE_INT32: case TYPE_UINT32: dict.add(new Pair<Integer, Object>(key, buf.getInt())); break; case TYPE_CSTRING: case TYPE_BYTEARRAY: byte[] bytes = new byte[length]; buf.get(bytes); if (type == TYPE_BYTEARRAY) { dict.add(new Pair<Integer, Object>(key, bytes)); } else { dict.add(new Pair<Integer, Object>(key, Arrays.toString(bytes))); } break; default: } } return dict; } byte[] encodeApplicationMessagePush(short endpoint, UUID uuid, ArrayList<Pair<Integer, Object>> pairs) { int length = LENGTH_UUID + 3; // UUID + (PUSH + id + length of dict) for (Pair<Integer, Object> pair : pairs) { length += 7; // key + type + length if (pair.second instanceof Integer) { length += 4; } else if (pair.second instanceof String) { length += ((String) pair.second).length() + 1; } } ByteBuffer buf = ByteBuffer.allocate(LENGTH_PREFIX + length); buf.order(ByteOrder.BIG_ENDIAN); buf.putShort((short) length); buf.putShort(endpoint); // 48 or 49 buf.put(APPLICATIONMESSAGE_PUSH); buf.put(++last_id); buf.putLong(uuid.getMostSignificantBits()); buf.putLong(uuid.getLeastSignificantBits()); buf.put((byte) pairs.size()); buf.order(ByteOrder.LITTLE_ENDIAN); // Um, yes, really for (Pair<Integer, Object> pair : pairs) { buf.putInt(pair.first); if (pair.second instanceof Integer) { buf.put(TYPE_INT32); buf.putShort((short) 4); // length of int buf.putInt((int) pair.second); } else if (pair.second instanceof String) { buf.put(TYPE_CSTRING); buf.putShort((short) (((String) pair.second).length() + 1)); buf.put(((String) pair.second).getBytes()); buf.put((byte) 0); } } return buf.array(); } private static byte reverseBits(byte in) { byte out = 0; for (int i = 0; i < 8; i++) { byte bit = (byte) (in & 1); out = (byte) ((out << 1) | bit); in = (byte) (in >> 1); } return out; } private GBDeviceEventScreenshot decodeResponseScreenshot(ByteBuffer buf, int length) { if (mDevEventScreenshot == null) { byte result = buf.get(); mDevEventScreenshot = new GBDeviceEventScreenshot(); int version = buf.getInt(); if (result != 0 || version != 1) { // pebble time not yet return null; } mDevEventScreenshot.width = buf.getInt(); mDevEventScreenshot.height = buf.getInt(); mDevEventScreenshot.bpp = 1; mDevEventScreenshot.clut = new byte[]{ 0x00, 0x00, 0x00, 0x00, (byte) 0xff, (byte) 0xff, (byte) 0xff, 0x00 }; mScreenshotRemaining = (mDevEventScreenshot.width * mDevEventScreenshot.height) / 8; if (mScreenshotRemaining > 50000) { mScreenshotRemaining = -1; // ignore too big values return null; } mDevEventScreenshot.data = new byte[mScreenshotRemaining]; length -= 13; } if (mScreenshotRemaining == -1) { return null; } for (int i = 0; i < length; i++) { byte corrected = reverseBits(buf.get()); mDevEventScreenshot.data[mDevEventScreenshot.data.length - mScreenshotRemaining + i] = corrected; } mScreenshotRemaining -= length; LOG.info("Screenshot remaining bytes " + mScreenshotRemaining); if (mScreenshotRemaining == 0) { mScreenshotRemaining = -1; LOG.info("Got screenshot : " + mDevEventScreenshot.width + "x" + mDevEventScreenshot.height + " " + "pixels"); GBDeviceEventScreenshot devEventScreenshot = mDevEventScreenshot; mDevEventScreenshot = null; return devEventScreenshot; } return null; } private GBDeviceEventDismissNotification decodeResponseNotificationAction(ByteBuffer buf) { buf.order(ByteOrder.LITTLE_ENDIAN); byte command = buf.get(); if (command == 0x02) { // dismiss notification ? if (isFw3x) { buf.getLong(); // skip 8 bytes of UUID buf.getInt(); // skip 4 bytes of UUID } int id = buf.getInt(); short action = buf.getShort(); // at least the low byte should be the action - or not? if (action == 0x0001) { GBDeviceEventDismissNotification devEvtDismissNotification = new GBDeviceEventDismissNotification(); devEvtDismissNotification.notificationID = id; return devEvtDismissNotification; } LOG.info("unexpected paramerter in dismiss action: " + action); } return null; } private GBDeviceEventSendBytes decodeResponsePing(ByteBuffer buf) { byte command = buf.get(); if (command == PING_PING) { int cookie = buf.getInt(); LOG.info("Received PING - will reply"); GBDeviceEventSendBytes sendBytes = new GBDeviceEventSendBytes(); sendBytes.encodedBytes = encodePing(PING_PONG, cookie); return sendBytes; } return null; } private GBDeviceEventAppManagement decodeAppFetch(ByteBuffer buf) { byte command = buf.get(); if (command == 0x01) { long uuid_high = buf.getLong(); long uuid_low = buf.getLong(); UUID uuid = new UUID(uuid_high, uuid_low); buf.order(ByteOrder.LITTLE_ENDIAN); int app_id = buf.getInt(); GBDeviceEventAppManagement fetchRequest = new GBDeviceEventAppManagement(); fetchRequest.type = GBDeviceEventAppManagement.EventType.INSTALL; fetchRequest.event = GBDeviceEventAppManagement.Event.REQUEST; fetchRequest.token = app_id; fetchRequest.uuid = uuid; return fetchRequest; } return null; } @Override public GBDeviceEvent decodeResponse(byte[] responseData) { ByteBuffer buf = ByteBuffer.wrap(responseData); buf.order(ByteOrder.BIG_ENDIAN); short length = buf.getShort(); short endpoint = buf.getShort(); GBDeviceEvent devEvt = null; byte pebbleCmd = -1; switch (endpoint) { case ENDPOINT_MUSICCONTROL: pebbleCmd = buf.get(); GBDeviceEventMusicControl musicCmd = new GBDeviceEventMusicControl(); switch (pebbleCmd) { case MUSICCONTROL_NEXT: musicCmd.event = GBDeviceEventMusicControl.Event.NEXT; break; case MUSICCONTROL_PREVIOUS: musicCmd.event = GBDeviceEventMusicControl.Event.PREVIOUS; break; case MUSICCONTROL_PLAY: musicCmd.event = GBDeviceEventMusicControl.Event.PLAY; break; case MUSICCONTROL_PAUSE: musicCmd.event = GBDeviceEventMusicControl.Event.PAUSE; break; case MUSICCONTROL_PLAYPAUSE: musicCmd.event = GBDeviceEventMusicControl.Event.PLAYPAUSE; break; case MUSICCONTROL_VOLUMEUP: musicCmd.event = GBDeviceEventMusicControl.Event.VOLUMEUP; break; case MUSICCONTROL_VOLUMEDOWN: musicCmd.event = GBDeviceEventMusicControl.Event.VOLUMEDOWN; break; default: break; } devEvt = musicCmd; break; case ENDPOINT_PHONECONTROL: pebbleCmd = buf.get(); GBDeviceEventCallControl callCmd = new GBDeviceEventCallControl(); switch (pebbleCmd) { case PHONECONTROL_HANGUP: callCmd.event = GBDeviceEventCallControl.Event.END; break; default: LOG.info("Unknown PHONECONTROL event" + pebbleCmd); break; } devEvt = callCmd; break; case ENDPOINT_FIRMWAREVERSION: pebbleCmd = buf.get(); GBDeviceEventVersionInfo versionCmd = new GBDeviceEventVersionInfo(); buf.getInt(); // skip byte[] tmp = new byte[32]; buf.get(tmp, 0, 32); versionCmd.fwVersion = new String(tmp).trim(); if (versionCmd.fwVersion.startsWith("v3")) { isFw3x = true; } buf.get(tmp, 0, 9); Byte hwRev = buf.get(); if (hwRev > 0 && hwRev < hwRevisions.length) { versionCmd.hwVersion = hwRevisions[hwRev]; } else if (hwRev == -3) { // basalt emulator versionCmd.hwVersion = "dvt"; } devEvt = versionCmd; break; case ENDPOINT_APPMANAGER: pebbleCmd = buf.get(); switch (pebbleCmd) { case APPMANAGER_GETAPPBANKSTATUS: GBDeviceEventAppInfo appInfoCmd = new GBDeviceEventAppInfo(); int slotCount = buf.getInt(); int slotsUsed = buf.getInt(); byte[] appName = new byte[32]; byte[] appCreator = new byte[32]; appInfoCmd.apps = new GBDeviceApp[slotsUsed]; boolean[] slotInUse = new boolean[slotCount]; for (int i = 0; i < slotsUsed; i++) { int id = buf.getInt(); int index = buf.getInt(); slotInUse[index] = true; buf.get(appName, 0, 32); buf.get(appCreator, 0, 32); int flags = buf.getInt(); GBDeviceApp.Type appType; if ((flags & 16) == 16) { // FIXME: verify this assumption appType = GBDeviceApp.Type.APP_ACTIVITYTRACKER; } else if ((flags & 1) == 1) { // FIXME: verify this assumption appType = GBDeviceApp.Type.WATCHFACE; } else { appType = GBDeviceApp.Type.APP_GENERIC; } Short appVersion = buf.getShort(); appInfoCmd.apps[i] = new GBDeviceApp(tmpUUIDS.get(i), new String(appName).trim(), new String(appCreator).trim(), appVersion.toString(), appType); } for (int i = 0; i < slotCount; i++) { if (!slotInUse[i]) { appInfoCmd.freeSlot = (byte) i; LOG.info("found free slot " + i); break; } } devEvt = appInfoCmd; break; case APPMANAGER_GETUUIDS: GBDeviceEventSendBytes sendBytes = new GBDeviceEventSendBytes(); sendBytes.encodedBytes = encodeSimpleMessage(ENDPOINT_APPMANAGER, APPMANAGER_GETAPPBANKSTATUS); devEvt = sendBytes; tmpUUIDS.clear(); slotsUsed = buf.getInt(); for (int i = 0; i < slotsUsed; i++) { long uuid_high = buf.getLong(); long uuid_low = buf.getLong(); UUID uuid = new UUID(uuid_high, uuid_low); LOG.info("found uuid: " + uuid); tmpUUIDS.add(uuid); } break; case APPMANAGER_REMOVEAPP: GBDeviceEventAppManagement deleteRes = new GBDeviceEventAppManagement(); deleteRes.type = GBDeviceEventAppManagement.EventType.DELETE; int result = buf.getInt(); switch (result) { case APPMANAGER_RES_SUCCESS: deleteRes.event = GBDeviceEventAppManagement.Event.SUCCESS; break; default: deleteRes.event = GBDeviceEventAppManagement.Event.FAILURE; break; } devEvt = deleteRes; break; default: LOG.info("Unknown APPMANAGER event" + pebbleCmd); break; } break; case ENDPOINT_PUTBYTES: pebbleCmd = buf.get(); GBDeviceEventAppManagement installRes = new GBDeviceEventAppManagement(); installRes.type = GBDeviceEventAppManagement.EventType.INSTALL; switch (pebbleCmd) { case PUTBYTES_INIT: installRes.token = buf.getInt(); installRes.event = GBDeviceEventAppManagement.Event.SUCCESS; break; default: installRes.token = buf.getInt(); installRes.event = GBDeviceEventAppManagement.Event.FAILURE; break; } devEvt = installRes; break; case ENDPOINT_APPLICATIONMESSAGE: pebbleCmd = buf.get(); last_id = buf.get(); long uuid_high = buf.getLong(); long uuid_low = buf.getLong(); switch (pebbleCmd) { case APPLICATIONMESSAGE_PUSH: UUID uuid = new UUID(uuid_high, uuid_low); LOG.info("got APPLICATIONMESSAGE PUSH from UUID " + uuid); if (WeatherNeatSupport.uuid.equals(uuid)) { ArrayList<Pair<Integer, Object>> dict = decodeDict(buf); devEvt = mWeatherNeatSupport.handleMessage(dict); } else if (MorpheuzSupport.uuid.equals(uuid)) { ArrayList<Pair<Integer, Object>> dict = decodeDict(buf); devEvt = mMorpheuzSupport.handleMessage(dict); } else if (GadgetbridgePblSupport.uuid.equals(uuid)) { ArrayList<Pair<Integer, Object>> dict = decodeDict(buf); devEvt = mGadgetbridgePblSupport.handleMessage(dict); } break; case APPLICATIONMESSAGE_ACK: LOG.info("got APPLICATIONMESSAGE ACK"); break; case APPLICATIONMESSAGE_NACK: LOG.info("got APPLICATIONMESSAGE NACK"); break; case APPLICATIONMESSAGE_REQUEST: LOG.info("got APPLICATIONMESSAGE REQUEST"); break; default: break; } break; case ENDPOINT_DATALOG: pebbleCmd = buf.get(); if (pebbleCmd != DATALOG_TIMEOUT) { byte id = buf.get(); LOG.info("DATALOG id " + id + " - sending ACK (0x85)"); GBDeviceEventSendBytes sendBytes = new GBDeviceEventSendBytes(); sendBytes.encodedBytes = encodeDatalog(id, DATALOG_ACK); devEvt = sendBytes; } else { LOG.info("DATALOG TIMEOUT - ignoring"); } break; case ENDPOINT_PHONEVERSION: pebbleCmd = buf.get(); switch (pebbleCmd) { case PHONEVERSION_REQUEST: LOG.info("Pebble asked for Phone/App Version - repLYING!"); GBDeviceEventSendBytes sendBytes = new GBDeviceEventSendBytes(); sendBytes.encodedBytes = encodePhoneVersion(PHONEVERSION_REMOTE_OS_ANDROID); devEvt = sendBytes; break; default: break; } break; case ENDPOINT_SCREENSHOT: devEvt = decodeResponseScreenshot(buf, length); break; case ENDPOINT_EXTENSIBLENOTIFS: case ENDPOINT_NOTIFICATIONACTION: devEvt = decodeResponseNotificationAction(buf); break; case ENDPOINT_PING: devEvt = decodeResponsePing(buf); break; case ENDPOINT_APPFETCH: devEvt = decodeAppFetch(buf); break; default: break; } return devEvt; } public void setForceProtocol(boolean force) { LOG.info("setting force protocol to " + force); mForceProtocol = force; } }
Pebble: set UTC time and offset on FW 3.x
app/src/main/java/nodomain/freeyourgadget/gadgetbridge/service/devices/pebble/PebbleProtocol.java
Pebble: set UTC time and offset on FW 3.x
Java
agpl-3.0
4e4b1895d1279748fef5091d6a87f960cbedd80f
0
pivotal-nathan-sentjens/tigase-xmpp-java,nate-sentjens/tigase-xmpp-java,sourcebits-praveenkh/Tagase,f24-ag/tigase,amikey/tigase-server,pivotal-nathan-sentjens/tigase-xmpp-java,pivotal-nathan-sentjens/tigase-xmpp-java,pivotal-nathan-sentjens/tigase-xmpp-java,cgvarela/tigase-server,caiyingyuan/tigase71,fanout/tigase-server,fanout/tigase-server,caiyingyuan/tigase71,wangningbo/tigase-server,nate-sentjens/tigase-xmpp-java,wangningbo/tigase-server,f24-ag/tigase,nate-sentjens/tigase-xmpp-java,cgvarela/tigase-server,f24-ag/tigase,amikey/tigase-server,f24-ag/tigase,caiyingyuan/tigase71,caiyingyuan/tigase71,nate-sentjens/tigase-xmpp-java,caiyingyuan/tigase71,nate-sentjens/tigase-xmpp-java,pivotal-nathan-sentjens/tigase-xmpp-java,wangningbo/tigase-server,cgvarela/tigase-server,amikey/tigase-server,fanout/tigase-server,sourcebits-praveenkh/Tagase,fanout/tigase-server,f24-ag/tigase,amikey/tigase-server,wangningbo/tigase-server,caiyingyuan/tigase71,wangningbo/tigase-server,cgvarela/tigase-server,sourcebits-praveenkh/Tagase,fanout/tigase-server,sourcebits-praveenkh/Tagase,sourcebits-praveenkh/Tagase,cgvarela/tigase-server,wangningbo/tigase-server,cgvarela/tigase-server,wangningbo/tigase-server,pivotal-nathan-sentjens/tigase-xmpp-java,f24-ag/tigase,nate-sentjens/tigase-xmpp-java,amikey/tigase-server,amikey/tigase-server,fanout/tigase-server,sourcebits-praveenkh/Tagase
/* * WebSocketXMPPIOService.java * * Tigase Jabber/XMPP Server * Copyright (C) 2004-2012 "Artur Hefczyc" <artur.hefczyc@tigase.org> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. Look for COPYING file in the top folder. * If not, see http://www.gnu.org/licenses/. * */ package tigase.server.websocket; //~--- non-JDK imports -------------------------------------------------------- import tigase.util.Base64; import tigase.xmpp.XMPPIOService; //~--- JDK imports ------------------------------------------------------------ import java.io.IOException; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.CharacterCodingException; import java.nio.charset.CoderResult; import java.nio.charset.MalformedInputException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; /** * Class implements basic support for WebSocket protocol. It extends * XMPPIOService so it can be used instead of XMPPIOService in * ClientConnectionManager to allow web clients to connect to it without using * BOSH extension. * * @param <RefObject> */ public class WebSocketXMPPIOService<RefObject> extends XMPPIOService<RefObject> { private static final String BAD_REQUEST = "HTTP/1.0 400 Bad request\r\n\r\n"; private static final String CONNECTION_KEY = "Connection"; private static final String GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"; private static final String HOST_KEY = "Host"; private static final Logger log = Logger.getLogger(WebSocketXMPPIOService.class.getCanonicalName()); private static final String ORIGIN_KEY = "Origin"; /* static variables used by WebSocket protocol */ private static final String RESPONSE_HEADER = "HTTP/1.1 101 Switching Protocols\r\n" + "Upgrade: websocket\r\n" + "Connection: Upgrade\r\n" + "Access-Control-Allow-Origin: *\r\n" + "Access-Control-Allow-Methods: GET, POST, OPTIONS\r\n" + "Access-Control-Allow-Headers: Content-Type\r\n" + "Access-Control-Max-Age: 86400\r\n"; private static final String WS_ACCEPT_KEY = "Sec-WebSocket-Accept"; private static final String WS_KEY_KEY = "Sec-WebSocket-Key"; private static final String WS_PROTOCOL_KEY = "Sec-WebSocket-Protocol"; private static final String WS_VERSION_KEY = "Sec-WebSocket-Version"; //~--- fields --------------------------------------------------------------- private byte[] buf = null; private long frameLength = -1; private byte[] maskingKey = null; private int pos = 0; private int version = 0; private byte[] partialFrame = null; // internal properties private boolean websocket = false; private boolean started = false; //~--- methods -------------------------------------------------------------- /** * Custom implementation of readData function which decodes WebSocket * protocol frames * * @return * @throws IOException */ @Override protected char[] readData() throws IOException { ByteBuffer cb = super.readBytes(); if (cb == null) { return null; } if (websocket) { // handling partialy decoded frame if (partialFrame != null) { ByteBuffer oldtmp = cb; cb = ByteBuffer.allocate(partialFrame.length + oldtmp.remaining()); cb.order(oldtmp.order()); cb.put(partialFrame); cb.put(oldtmp); cb.flip(); oldtmp.clear(); partialFrame = null; } // data needs to be decoded fully not just first frame!! ByteBuffer tmp = ByteBuffer.allocate(cb.remaining()); // here we got buffer overflow ByteBuffer decoded = null; while (cb.hasRemaining() && (decoded = decodeFrame(cb)) != null) { //decoded = decodeFrame(cb); if (decoded != null && decoded.hasRemaining()) { tmp.put(decoded); } } // handling data which were not decoded - not complete data if (cb.hasRemaining()) { partialFrame = new byte[cb.remaining()]; cb.get(partialFrame); } // compact buffer after reading all frames cb.compact(); if (tmp.capacity() > 0) { tmp.flip(); } cb = tmp; } if (started) { return decode(cb); } if (!cb.hasRemaining()) { return null; } if ((pos == 0) && (cb.get(0) != (byte) 'G')) { started = true; return decode(cb); } if (buf == null) { buf = new byte[1024]; } try { int read = cb.remaining(); cb.get(buf, pos, read); pos += read; cb.compact(); if ((pos > 100) && (((buf[pos - 1] == '\n') && (buf[pos - 1] == buf[pos - 3])) || ((buf[pos - 9] == '\n') && (buf[pos - 9] == buf[pos - 11])))) { started = true; processWebSocketHandshake(); websocket = true; buf = null; } } catch (Exception ex) { if (log.isLoggable(Level.FINE)) { log.log(Level.FINE, "exception processing websocket handshake", ex); } this.forceStop(); } return null; } /** * Custom implementation of writeData function which encodes data * in WebSocket protocol frames * * @param data */ @Override protected void writeData(final String data) { // Try to lock the data writing method // If cannot lock and nothing to send, just leave boolean locked = writeInProgress.tryLock(); // Otherwise wait..... if (!locked) { if (data == null) { return; } writeInProgress.lock(); } try { if (websocket) { try { if (data != null) { if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "sending data = {0}", data); } ByteBuffer buf = encode(data); int size = buf.remaining(); // set type as finally part (0x80) of message of type text (0x01) if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "sending encoded data size = {0}", size); } ByteBuffer bbuf = createFrameHeader((byte) 0x81, size); // send frame header writeBytes(bbuf); // send frame content writeBytes(buf); buf.compact(); } else { writeBytes(null); } } catch (Exception ex) { if (log.isLoggable(Level.FINE)) { log.log(Level.FINE, "exception writing data", ex); } forceStop(); } } else { super.writeData(data); } } finally { writeInProgress.unlock(); } } /** * Process data from internal temporary buffer used to decode HTTP request * used by WebSocket protocol to switch protocol to WebSocket protocol * * @throws NoSuchAlgorithmException * @throws IOException */ private void processWebSocketHandshake() throws NoSuchAlgorithmException, IOException { HashMap<String, String> headers = new HashMap<String, String>(); int i = 0; while (buf[i] != '\n') { i++; } i++; if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "parsing request = \n{0}", new String(buf)); } StringBuilder builder = new StringBuilder(64); String key = null; for (; i < pos; i++) { switch (buf[i]) { case ':' : if (key == null) { key = builder.toString(); builder = new StringBuilder(64); i++; } else { builder.append((char) buf[i]); } break; case '\r' : headers.put(key, builder.toString()); key = null; builder = new StringBuilder(64); if (buf[i + 2] == '\r') { i += 3; } else { i++; } break; default : builder.append((char) buf[i]); } } if (!headers.containsKey(CONNECTION_KEY) || !headers.get(CONNECTION_KEY).contains("Upgrade")) { writeRawData(BAD_REQUEST); dumpHeaders(headers); return; } if (!headers.containsKey(WS_PROTOCOL_KEY) || !headers.get(WS_PROTOCOL_KEY).contains("xmpp")) { writeRawData(BAD_REQUEST); dumpHeaders(headers); return; } StringBuilder response = new StringBuilder(RESPONSE_HEADER.length() * 2); response.append(RESPONSE_HEADER); if (headers.containsKey(WS_VERSION_KEY)) { version = Integer.parseInt(headers.get(WS_VERSION_KEY)); key = headers.get(WS_KEY_KEY) + GUID; MessageDigest md = MessageDigest.getInstance("SHA1"); byte[] resp = md.digest(key.getBytes()); response.append(WS_PROTOCOL_KEY); response.append(": xmpp\r\n"); response.append(WS_ACCEPT_KEY + ": "); response.append(Base64.encode(resp)); response.append("\r\n"); response.append("\r\n"); maskingKey = new byte[4]; writeRawData(response.toString()); } } /** * Decode data encoded in WebSocket frames from buffer * * @param buf * @return */ private ByteBuffer decodeFrame(ByteBuffer buf) { if (!buf.hasRemaining()) { if (log.isLoggable(Level.FINEST)) { log.finest("no content remainging to process"); } return null; } boolean masked = false; byte type = 0x00; int position = buf.position(); if (frameLength == -1) { type = buf.get(); if ((type & 0x08) == 0x08) { // close request if (log.isLoggable(Level.FINEST)) { log.finest("closing connection due to client request"); } forceStop(); return null; } byte b2 = buf.get(); // check if content is masked masked = (b2 & 0x80) == 0x80; // ignore sign bit frameLength = (b2 & 0x7F); if (frameLength > 125) { // if frame length is bigger than 125 then // if is 126 - size is short // is is 127 - size is long frameLength = (frameLength == 126) ? buf.getShort() : buf.getLong(); } if (masked) { // if content is masked get masking key buf.get(maskingKey); } } ByteBuffer unmasked = null; if (buf.remaining() >= frameLength) { byte[] data = new byte[(int) frameLength]; buf.get(data); // if content is masked then unmask content if (masked) { for (int i = 0; i < data.length; i++) { data[i] = (byte) (data[i] ^ maskingKey[i % 4]); } } unmasked = ByteBuffer.wrap(data); frameLength = -1; } else { // not enought data so reset buffer position buf.position(position); frameLength = -1; return null; } if (frameLength == -1) { // we need to ignore pong frame if ((type & 0x0A) == 0x0A) { if (log.isLoggable(Level.FINEST)) { log.finest("ignoring pong frame"); } unmasked = null; } // if it ping request send pong response else if ((type & 0x09) == 0x09) { if (log.isLoggable(Level.FINEST)) { log.finest("sending response on ping frame"); } type = (byte) (((byte) (type ^ 0x09)) | 0x0A); try { ByteBuffer header = createFrameHeader(type, unmasked.remaining()); writeInProgress.lock(); writeBytes(header); writeBytes(unmasked); } finally { writeInProgress.unlock(); } unmasked = null; } } return unmasked; } /** * Create WebSocket frame header with specific type and size * * @param type * @param size * @return */ private ByteBuffer createFrameHeader(byte type, int size) { ByteBuffer bbuf = ByteBuffer.allocate(9); bbuf.put(type); if (size <= 125) { bbuf.put((byte) size); } else if (size <= 0xFFFF) { bbuf.put((byte) 0x7E); bbuf.putShort((short) size); } else { bbuf.put((byte) 0x7F); bbuf.putLong((long) size); } bbuf.flip(); return bbuf; } /** * Decode data from buffer to chars array * * @param tmpBuffer * @return * @throws MalformedInputException */ private char[] decode(ByteBuffer tmpBuffer) throws MalformedInputException { if (tmpBuffer == null) { return null; } char[] result = null; // Restore the partial bytes for multibyte UTF8 characters if (partialCharacterBytes != null) { ByteBuffer oldTmpBuffer = tmpBuffer; tmpBuffer = ByteBuffer.allocate(partialCharacterBytes.length + oldTmpBuffer.remaining() + 2); tmpBuffer.put(partialCharacterBytes); tmpBuffer.put(oldTmpBuffer); tmpBuffer.flip(); oldTmpBuffer.clear(); partialCharacterBytes = null; } if (cb.capacity() < tmpBuffer.remaining() * 4) { cb = CharBuffer.allocate(tmpBuffer.remaining() * 4); } CoderResult cr = decoder.decode(tmpBuffer, cb, false); if (cr.isMalformed()) { throw new MalformedInputException(tmpBuffer.remaining()); } if (cb.remaining() > 0) { cb.flip(); result = new char[cb.remaining()]; cb.get(result); } if (cr.isUnderflow() && (tmpBuffer.remaining() > 0)) { // Save the partial bytes of a multibyte character such that they // can be restored on the next read. partialCharacterBytes = new byte[tmpBuffer.remaining()]; tmpBuffer.get(partialCharacterBytes); } tmpBuffer.clear(); cb.clear(); return result; } /** * Encode string into buffer * * @param data * @return * @throws CharacterCodingException */ private ByteBuffer encode(String data) throws CharacterCodingException { ByteBuffer dataBuffer = null; encoder.reset(); // dataBuffer = encoder.encode(CharBuffer.wrap(data, idx_start, // idx_offset)); dataBuffer = encoder.encode(CharBuffer.wrap(data)); encoder.flush(dataBuffer); // dataBuffer.flip(); return dataBuffer; } public void dumpHeaders(Map<String,String> headers) { if (log.isLoggable(Level.FINEST)) { StringBuilder builder = new StringBuilder(1000); for(Map.Entry<String,String> entry : headers.entrySet()) { builder.append("KEY = "); builder.append(entry.getKey()); builder.append("VALUE = "); builder.append(entry.getValue()); builder.append('\n'); } log.log(Level.FINEST, "received headers = \n{0}", builder.toString()); } } } //~ Formatted in Tigase Code Convention on 13/02/19
src/main/java/tigase/server/websocket/WebSocketXMPPIOService.java
/* * WebSocketXMPPIOService.java * * Tigase Jabber/XMPP Server * Copyright (C) 2004-2012 "Artur Hefczyc" <artur.hefczyc@tigase.org> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. Look for COPYING file in the top folder. * If not, see http://www.gnu.org/licenses/. * */ package tigase.server.websocket; //~--- non-JDK imports -------------------------------------------------------- import tigase.util.Base64; import tigase.xmpp.XMPPIOService; //~--- JDK imports ------------------------------------------------------------ import java.io.IOException; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.CharacterCodingException; import java.nio.charset.CoderResult; import java.nio.charset.MalformedInputException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.logging.Level; import java.util.logging.Logger; /** * Class implements basic support for WebSocket protocol. It extends * XMPPIOService so it can be used instead of XMPPIOService in * ClientConnectionManager to allow web clients to connect to it without using * BOSH extension. * * @param <RefObject> */ public class WebSocketXMPPIOService<RefObject> extends XMPPIOService<RefObject> { private static final String BAD_REQUEST = "HTTP/1.0 400 Bad request\r\n\r\n"; private static final String CONNECTION_KEY = "Connection"; private static final String GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"; private static final String HOST_KEY = "Host"; private static final Logger log = Logger.getLogger(WebSocketXMPPIOService.class.getCanonicalName()); private static final String ORIGIN_KEY = "Origin"; /* static variables used by WebSocket protocol */ private static final String RESPONSE_HEADER = "HTTP/1.1 101 Switching Protocols\r\n" + "Upgrade: websocket\r\n" + "Connection: Upgrade\r\n" + "Access-Control-Allow-Origin: *\r\n" + "Access-Control-Allow-Methods: GET, POST, OPTIONS\r\n" + "Access-Control-Allow-Headers: Content-Type\r\n" + "Access-Control-Max-Age: 86400\r\n"; private static final String WS_ACCEPT_KEY = "Sec-WebSocket-Accept"; private static final String WS_KEY_KEY = "Sec-WebSocket-Key"; private static final String WS_PROTOCOL_KEY = "Sec-WebSocket-Protocol"; private static final String WS_VERSION_KEY = "Sec-WebSocket-Version"; //~--- fields --------------------------------------------------------------- private byte[] buf = null; private long frameLength = -1; private byte[] maskingKey = null; private int pos = 0; private int version = 0; private byte[] partialFrame = null; // internal properties private boolean websocket = false; private boolean started = false; //~--- methods -------------------------------------------------------------- /** * Custom implementation of readData function which decodes WebSocket * protocol frames * * @return * @throws IOException */ @Override protected char[] readData() throws IOException { ByteBuffer cb = super.readBytes(); if (cb == null) { return null; } if (websocket) { // handling partialy decoded frame if (partialFrame != null) { ByteBuffer oldtmp = cb; cb = ByteBuffer.allocate(partialFrame.length + oldtmp.remaining()); cb.order(oldtmp.order()); cb.put(partialFrame); cb.put(oldtmp); cb.flip(); oldtmp.clear(); partialFrame = null; } // data needs to be decoded fully not just first frame!! ByteBuffer tmp = ByteBuffer.allocate(cb.remaining()); // here we got buffer overflow ByteBuffer decoded = null; while (cb.hasRemaining() && (decoded = decodeFrame(cb)) != null) { //decoded = decodeFrame(cb); if (decoded != null && decoded.hasRemaining()) { tmp.put(decoded); } } // handling data which were not decoded - not complete data if (cb.hasRemaining()) { partialFrame = new byte[cb.remaining()]; cb.get(partialFrame); } // compact buffer after reading all frames cb.compact(); if (tmp.capacity() > 0) { tmp.flip(); } cb = tmp; } if (started) { return decode(cb); } if ((pos == 0) && (cb.get(0) != (byte) 'G')) { started = true; return decode(cb); } if (buf == null) { buf = new byte[1024]; } try { int read = cb.remaining(); cb.get(buf, pos, read); pos += read; cb.compact(); if ((pos > 100) && (((buf[pos - 1] == '\n') && (buf[pos - 1] == buf[pos - 3])) || ((buf[pos - 9] == '\n') && (buf[pos - 9] == buf[pos - 11])))) { started = true; processWebSocketHandshake(); websocket = true; buf = null; } } catch (Exception ex) { if (log.isLoggable(Level.FINE)) { log.log(Level.FINE, "exception processing websocket handshake", ex); } this.forceStop(); } return null; } /** * Custom implementation of writeData function which encodes data * in WebSocket protocol frames * * @param data */ @Override protected void writeData(final String data) { // Try to lock the data writing method // If cannot lock and nothing to send, just leave boolean locked = writeInProgress.tryLock(); // Otherwise wait..... if (!locked) { if (data == null) { return; } writeInProgress.lock(); } try { if (websocket) { try { if (data != null) { if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "sending data = {0}", data); } ByteBuffer buf = encode(data); int size = buf.remaining(); // set type as finally part (0x80) of message of type text (0x01) if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "sending encoded data size = {0}", size); } ByteBuffer bbuf = createFrameHeader((byte) 0x81, size); // send frame header writeBytes(bbuf); // send frame content writeBytes(buf); buf.compact(); } else { writeBytes(null); } } catch (Exception ex) { if (log.isLoggable(Level.FINE)) { log.log(Level.FINE, "exception writing data", ex); } forceStop(); } } else { super.writeData(data); } } finally { writeInProgress.unlock(); } } /** * Process data from internal temporary buffer used to decode HTTP request * used by WebSocket protocol to switch protocol to WebSocket protocol * * @throws NoSuchAlgorithmException * @throws IOException */ private void processWebSocketHandshake() throws NoSuchAlgorithmException, IOException { HashMap<String, String> headers = new HashMap<String, String>(); int i = 0; while (buf[i] != '\n') { i++; } i++; if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "parsing request = \n{0}", new String(buf)); } StringBuilder builder = new StringBuilder(64); String key = null; for (; i < pos; i++) { switch (buf[i]) { case ':' : if (key == null) { key = builder.toString(); builder = new StringBuilder(64); i++; } else { builder.append((char) buf[i]); } break; case '\r' : headers.put(key, builder.toString()); key = null; builder = new StringBuilder(64); if (buf[i + 2] == '\r') { i += 3; } else { i++; } break; default : builder.append((char) buf[i]); } } if (!headers.containsKey(CONNECTION_KEY) || !headers.get(CONNECTION_KEY).contains("Upgrade")) { writeRawData(BAD_REQUEST); return; } if (!headers.containsKey(WS_PROTOCOL_KEY) || !headers.get(WS_PROTOCOL_KEY).contains("xmpp")) { writeRawData(BAD_REQUEST); return; } StringBuilder response = new StringBuilder(RESPONSE_HEADER.length() * 2); response.append(RESPONSE_HEADER); if (headers.containsKey(WS_VERSION_KEY)) { version = Integer.parseInt(headers.get(WS_VERSION_KEY)); key = headers.get(WS_KEY_KEY) + GUID; MessageDigest md = MessageDigest.getInstance("SHA1"); byte[] resp = md.digest(key.getBytes()); response.append(WS_PROTOCOL_KEY); response.append(": xmpp\r\n"); response.append(WS_ACCEPT_KEY + ": "); response.append(Base64.encode(resp)); response.append("\r\n"); response.append("\r\n"); maskingKey = new byte[4]; writeRawData(response.toString()); } } /** * Decode data encoded in WebSocket frames from buffer * * @param buf * @return */ private ByteBuffer decodeFrame(ByteBuffer buf) { if (!buf.hasRemaining()) { if (log.isLoggable(Level.FINEST)) { log.finest("no content remainging to process"); } return null; } boolean masked = false; byte type = 0x00; int position = buf.position(); if (frameLength == -1) { type = buf.get(); if ((type & 0x08) == 0x08) { // close request if (log.isLoggable(Level.FINEST)) { log.finest("closing connection due to client request"); } forceStop(); return null; } byte b2 = buf.get(); // check if content is masked masked = (b2 & 0x80) == 0x80; // ignore sign bit frameLength = (b2 & 0x7F); if (frameLength > 125) { // if frame length is bigger than 125 then // if is 126 - size is short // is is 127 - size is long frameLength = (frameLength == 126) ? buf.getShort() : buf.getLong(); } if (masked) { // if content is masked get masking key buf.get(maskingKey); } } ByteBuffer unmasked = null; if (buf.remaining() >= frameLength) { byte[] data = new byte[(int) frameLength]; buf.get(data); // if content is masked then unmask content if (masked) { for (int i = 0; i < data.length; i++) { data[i] = (byte) (data[i] ^ maskingKey[i % 4]); } } unmasked = ByteBuffer.wrap(data); frameLength = -1; } else { // not enought data so reset buffer position buf.position(position); frameLength = -1; return null; } if (frameLength == -1) { // we need to ignore pong frame if ((type & 0x0A) == 0x0A) { if (log.isLoggable(Level.FINEST)) { log.finest("ignoring pong frame"); } unmasked = null; } // if it ping request send pong response else if ((type & 0x09) == 0x09) { if (log.isLoggable(Level.FINEST)) { log.finest("sending response on ping frame"); } type = (byte) (((byte) (type ^ 0x09)) | 0x0A); try { ByteBuffer header = createFrameHeader(type, unmasked.remaining()); writeInProgress.lock(); writeBytes(header); writeBytes(unmasked); } finally { writeInProgress.unlock(); } unmasked = null; } } return unmasked; } /** * Create WebSocket frame header with specific type and size * * @param type * @param size * @return */ private ByteBuffer createFrameHeader(byte type, int size) { ByteBuffer bbuf = ByteBuffer.allocate(9); bbuf.put(type); if (size <= 125) { bbuf.put((byte) size); } else if (size <= 0xFFFF) { bbuf.put((byte) 0x7E); bbuf.putShort((short) size); } else { bbuf.put((byte) 0x7F); bbuf.putLong((long) size); } bbuf.flip(); return bbuf; } /** * Decode data from buffer to chars array * * @param tmpBuffer * @return * @throws MalformedInputException */ private char[] decode(ByteBuffer tmpBuffer) throws MalformedInputException { if (tmpBuffer == null) { return null; } char[] result = null; // Restore the partial bytes for multibyte UTF8 characters if (partialCharacterBytes != null) { ByteBuffer oldTmpBuffer = tmpBuffer; tmpBuffer = ByteBuffer.allocate(partialCharacterBytes.length + oldTmpBuffer.remaining() + 2); tmpBuffer.put(partialCharacterBytes); tmpBuffer.put(oldTmpBuffer); tmpBuffer.flip(); oldTmpBuffer.clear(); partialCharacterBytes = null; } if (cb.capacity() < tmpBuffer.remaining() * 4) { cb = CharBuffer.allocate(tmpBuffer.remaining() * 4); } CoderResult cr = decoder.decode(tmpBuffer, cb, false); if (cr.isMalformed()) { throw new MalformedInputException(tmpBuffer.remaining()); } if (cb.remaining() > 0) { cb.flip(); result = new char[cb.remaining()]; cb.get(result); } if (cr.isUnderflow() && (tmpBuffer.remaining() > 0)) { // Save the partial bytes of a multibyte character such that they // can be restored on the next read. partialCharacterBytes = new byte[tmpBuffer.remaining()]; tmpBuffer.get(partialCharacterBytes); } tmpBuffer.clear(); cb.clear(); return result; } /** * Encode string into buffer * * @param data * @return * @throws CharacterCodingException */ private ByteBuffer encode(String data) throws CharacterCodingException { ByteBuffer dataBuffer = null; encoder.reset(); // dataBuffer = encoder.encode(CharBuffer.wrap(data, idx_start, // idx_offset)); dataBuffer = encoder.encode(CharBuffer.wrap(data)); encoder.flush(dataBuffer); // dataBuffer.flip(); return dataBuffer; } } //~ Formatted in Tigase Code Convention on 13/02/19
Fixes after initial tests of WebSockt over SSL
src/main/java/tigase/server/websocket/WebSocketXMPPIOService.java
Fixes after initial tests of WebSockt over SSL
Java
lgpl-2.1
32ae13dc5104bda358109868193a67ee6e2ad33b
0
CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine
/* * jETeL/CloverETL - Java based ETL application framework. * Copyright (c) Javlin, a.s. (info@cloveretl.com) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.jetel.util.spreadsheet; import org.jetel.util.string.StringUtils; /** * CLO-6995: * We want to share this enum in spreadsheet engine and gui * but the plugin hierarchy is messed up so the enum has to be here * instead of in cloveretl.spreadsheet.commercial plugin. * * @author salamonp (info@cloveretl.com) * (c) Javlin, a.s. (www.cloveretl.com) * * @created 3. 8. 2015 */ public enum HyperlinkType { NONE("No hyperlink"), DOCUMENT("Document"), EMAIL("E-mail"), FILE("File"), URL("URL"); private String label; private HyperlinkType(String label) { this.label = label; } @Override public String toString() { return label; } public static HyperlinkType getDefault() { return NONE; } public static HyperlinkType valueOfIgnoreCase(String string) { for (HyperlinkType type : values()) { if (type.toString().equalsIgnoreCase(string)) { return type; } } throw new IllegalArgumentException(StringUtils.quote(string) + " is not a valid Hyperlink type"); } }
cloveretl.engine/src/org/jetel/util/spreadsheet/HyperlinkType.java
/* * jETeL/CloverETL - Java based ETL application framework. * Copyright (c) Javlin, a.s. (info@cloveretl.com) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.jetel.util.spreadsheet; import org.jetel.util.string.StringUtils; /** * CLO-6995: * We want to share this enum in spreadsheet engine and gui * but the plugin hierarchy is messed up so the enum has to be here * instead of in cloveretl.spreadsheet.commercial plugin. * * @author salamonp (info@cloveretl.com) * (c) Javlin, a.s. (www.cloveretl.com) * * @created 3. 8. 2015 */ public enum HyperlinkType { NONE("None"), DOCUMENT("Document"), EMAIL("E-mail"), FILE("File"), URL("URL"); private String label; private HyperlinkType(String label) { this.label = label; } @Override public String toString() { return label; } public static HyperlinkType getDefault() { return NONE; } public static HyperlinkType valueOfIgnoreCase(String string) { for (HyperlinkType type : values()) { if (type.toString().equalsIgnoreCase(string)) { return type; } } throw new IllegalArgumentException(StringUtils.quote(string) + " is not a valid Hyperlink type"); } }
NEW: CLO-6980 - SpreadsheetWriter - add hyperlink support git-svn-id: b06cb3fadab8b8643c3bd65b40fa9dca09734dc4@17932 a09ad3ba-1a0f-0410-b1b9-c67202f10d70
cloveretl.engine/src/org/jetel/util/spreadsheet/HyperlinkType.java
NEW: CLO-6980 - SpreadsheetWriter - add hyperlink support
Java
lgpl-2.1
91b763853147eaf5596632c8439139bc0b2d8ecc
0
it-innovation/EXPERImonitor,it-innovation/EXPERImonitor,it-innovation/EXPERImonitor,it-innovation/EXPERImonitor,it-innovation/EXPERImonitor,it-innovation/EXPERImonitor,it-innovation/EXPERImonitor,it-innovation/EXPERImonitor
///////////////////////////////////////////////////////////////////////// // // © University of Southampton IT Innovation Centre, 2014 // // Copyright in this software belongs to University of Southampton // IT Innovation Centre of Gamma House, Enterprise Road, // Chilworth Science Park, Southampton, SO16 7NS, UK. // // This software may not be used, sold, licensed, transferred, copied // or reproduced in whole or in part in any manner or form or in or // on any media by any person other than in accordance with the terms // of the Licence Agreement supplied with the software, or otherwise // without the prior written consent of the copyright owners. // // This software is distributed WITHOUT ANY WARRANTY, without even the // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR // PURPOSE, except where stated in the Licence Agreement supplied with // the software. // // Created By : Simon Crowle // Created Date : 09-Apr-2014 // Created for Project : EXPERIMEDIA // ///////////////////////////////////////////////////////////////////////// package uk.ac.soton.itinnovation.experimedia.ecc.service.test.experiment; import java.util.Date; import org.junit.*; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.experiment.Experiment; import uk.co.soton.itinnovation.ecc.service.Application; import uk.co.soton.itinnovation.ecc.service.domain.EccConfiguration; import uk.co.soton.itinnovation.ecc.service.services.ConfigurationService; import uk.co.soton.itinnovation.ecc.service.services.ExperimentService; @RunWith(SpringJUnit4ClassRunner.class) @SpringApplicationConfiguration(classes = Application.class) @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD) // will reset everything after each test, comment this out if you want to test as singleton public class ExperimentServiceTest { private EccConfiguration eccConfig; @Autowired ExperimentService expService; @Autowired ConfigurationService configurationService; public ExperimentServiceTest() { } @Before public void setUp() { Assert.assertTrue(configurationService.isInitialised()); // Safer as doesn't depend on availability of http://config.experimedia.eu // Online can be a separate test eccConfig = configurationService.getLocalConfiguration(); Assert.assertNotNull(eccConfig); configurationService.selectEccConfiguration(eccConfig); Assert.assertTrue(configurationService.isConfigurationSet()); Assert.assertTrue(configurationService.startExperimentService()); } @Test public void testStartStopExperiment() { try { Date expDate = new Date(); String expName = "Test experiment " + expDate.toString(); Assert.assertNull(expService.getActiveExperiment()); expService.startExperiment("DefaultTest", expName, "JUnit test"); // Check experiment meta-data Experiment activeExp = expService.getActiveExperiment(); Assert.assertNotNull(activeExp); Assert.assertNotNull(activeExp.getUUID()); Assert.assertNotNull(activeExp.getExperimentID()); Assert.assertNotNull(activeExp.getName()); Assert.assertNotNull(activeExp.getDescription()); Assert.assertNotNull(activeExp.getStartTime()); Assert.assertNull(activeExp.getEndTime()); expService.stopExperiment(); Assert.assertNotNull(activeExp.getEndTime()); // Local copy Assert.assertNull(expService.getActiveExperiment()); // Service copy } catch (Exception ex) { Assert.fail(ex.getMessage()); } } }
eccService/src/test/java/uk/ac/soton/itinnovation/experimedia/ecc/service/test/experiment/ExperimentServiceTest.java
///////////////////////////////////////////////////////////////////////// // // © University of Southampton IT Innovation Centre, 2014 // // Copyright in this software belongs to University of Southampton // IT Innovation Centre of Gamma House, Enterprise Road, // Chilworth Science Park, Southampton, SO16 7NS, UK. // // This software may not be used, sold, licensed, transferred, copied // or reproduced in whole or in part in any manner or form or in or // on any media by any person other than in accordance with the terms // of the Licence Agreement supplied with the software, or otherwise // without the prior written consent of the copyright owners. // // This software is distributed WITHOUT ANY WARRANTY, without even the // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR // PURPOSE, except where stated in the Licence Agreement supplied with // the software. // // Created By : Simon Crowle // Created Date : 09-Apr-2014 // Created for Project : EXPERIMEDIA // ///////////////////////////////////////////////////////////////////////// package uk.ac.soton.itinnovation.experimedia.ecc.service.test.experiment; import java.util.Date; import org.junit.*; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.experiment.Experiment; import uk.co.soton.itinnovation.ecc.service.Application; import uk.co.soton.itinnovation.ecc.service.domain.EccConfiguration; import uk.co.soton.itinnovation.ecc.service.services.ConfigurationService; import uk.co.soton.itinnovation.ecc.service.services.ExperimentService; @RunWith(SpringJUnit4ClassRunner.class) @SpringApplicationConfiguration(classes = Application.class) @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD) // will reset everything after each test, comment this out if you want to test as singleton public class ExperimentServiceTest { private EccConfiguration eccConfig; @Autowired ExperimentService expService; @Autowired ConfigurationService configurationService; public ExperimentServiceTest() { } @Before public void setUp() { Assert.assertTrue(configurationService.isInitialised()); // Safer as doesn't depend on availability of http://config.experimedia.eu // Online can be a separate test eccConfig = configurationService.getLocalConfiguration(); Assert.assertNotNull(eccConfig); configurationService.selectEccConfiguration(eccConfig); Assert.assertTrue(configurationService.isConfigurationSet()); configurationService.startExperimentService(); Assert.assertTrue(expService.isStarted()); } @Test public void testStartStopExperiment() { try { Date expDate = new Date(); String expName = "Test experiment " + expDate.toString(); Assert.assertNull(expService.getActiveExperiment()); expService.startExperiment("DefaultTest", expName, "JUnit test"); // Check experiment meta-data Experiment activeExp = expService.getActiveExperiment(); Assert.assertNotNull(activeExp); Assert.assertNotNull(activeExp.getUUID()); Assert.assertNotNull(activeExp.getExperimentID()); Assert.assertNotNull(activeExp.getName()); Assert.assertNotNull(activeExp.getDescription()); Assert.assertNotNull(activeExp.getStartTime()); Assert.assertNull(activeExp.getEndTime()); expService.stopExperiment(); Assert.assertNotNull(activeExp.getEndTime()); // Local copy Assert.assertNull(expService.getActiveExperiment()); // Service copy } catch (Exception ex) { Assert.fail(ex.getMessage()); } } }
Simplifies setup phase of experiment tests
eccService/src/test/java/uk/ac/soton/itinnovation/experimedia/ecc/service/test/experiment/ExperimentServiceTest.java
Simplifies setup phase of experiment tests
Java
apache-2.0
6cfee2dbc52cc9ce9f8e6c7304346c9801385f16
0
googleinterns/step92-2020,googleinterns/step92-2020,googleinterns/step92-2020
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.sps.servlets; import com.google.appengine.api.blobstore.BlobInfo; import com.google.appengine.api.blobstore.BlobInfoFactory; import com.google.appengine.api.blobstore.BlobstoreService; import com.google.appengine.api.blobstore.BlobstoreServiceFactory; import com.google.appengine.api.blobstore.BlobKey; import com.google.appengine.api.datastore.DatastoreService; import com.google.appengine.api.datastore.DatastoreServiceFactory; import com.google.appengine.api.datastore.Entity; import com.google.appengine.api.datastore.PreparedQuery; import com.google.appengine.api.datastore.Query; import com.google.appengine.api.datastore.Query.SortDirection; import com.google.appengine.api.images.ImagesService; import com.google.appengine.api.images.ImagesServiceFactory; import com.google.appengine.api.images.ServingUrlOptions; import com.google.appengine.api.users.UserService; import com.google.appengine.api.users.UserServiceFactory; import com.google.gson.Gson; import com.google.sps.data.BlogMessage; import com.google.sps.data.BlogHashMap; import java.io.IOException; import java.io.PrintWriter; import java.io.*; import java.net.MalformedURLException; import java.net.URL; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Map; /** Servlet that returns some example content. TODO: modify this file to handle comments data */ @WebServlet("/data") public class DataServlet extends HttpServlet { int numberOfCommentsToDisplay = 0; @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { List<String> allTags = new ArrayList<>(); allTags.add("#general"); allTags.add("#music"); List<BlogMessage> messages = new ArrayList<>(); Query query = new Query("blogMessage").addSort("time", SortDirection.DESCENDING); DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); PreparedQuery results = datastore.prepare(query); UserService userService = UserServiceFactory.getUserService(); for (Entity entity : results.asIterable()) { long messageId = entity.getKey().getId(); long timestamp = (long) entity.getProperty("time"); String tag = (String) entity.getProperty("tag"); String comment = (String) entity.getProperty("text"); String nickname = (String) entity.getProperty("nickname"); String email = (String) userService.getCurrentUser().getEmail(); String image = (String) entity.getProperty("imgUrl"); ArrayList<String> messageReplies = (ArrayList) entity.getProperty("replies"); BlogMessage message = new BlogMessage(messageId, tag, comment, image, nickname, email, messageReplies, timestamp); messages.add(message); } // Create BlogHashMap Object and put BlogMessages in the map. BlogHashMap blogMap = new BlogHashMap(); blogMap.putInMap(messages); // If (user loads all BlogMessages) LinkedList<BlogMessage> allBlogMessages = blogMap.getMessages(allTags, messages.size()); if (numberOfCommentsToDisplay < 1 || numberOfCommentsToDisplay >= allBlogMessages.size()) { response.setContentType("text/html"); response.getWriter().println("Please enter an integer between 1 and "+allBlogMessages.size()+"."); return; } Gson gson = new Gson(); response.setContentType("application/json;"); if(numberOfCommentsToDisplay == 0){ response.getWriter().println(gson.toJson(allBlogMessages)); // set a default amount later. return; } else { List<BlogMessage> limitedBlogMessages = new ArrayList<>(); for (int i = 0; i < allBlogMessages.size(); i++) { limitedBlogMessages.add(allBlogMessages.get(i)); } response.getWriter().println(gson.toJson(limitedBlogMessages)); return; } /** TODO: add functionality for next cases => 1. user specifies amount for all messages 2. user specifies amount for messages under a tag 3. user specifies amount for all messages under a list of tags */ } /** * Converts a ServerStats instance into a JSON string using the Gson library */ @Override public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException { String message = request.getParameter("text-input"); String sender = getParameter(request, "sender", "Steven"); // TODO: // Get default tag from the InternalTags class and use that below. // Get type of comment. String commentType = getParameter(request, "tags", "Default"); //String imageUrl = getUploadedFileUrl(request, "image"); String messageRepliesString = getParameter(request, "replies", ""); String messageRepliesArray[] = messageRepliesString.split(","); List<String> messageReplies = new ArrayList<String>(); messageReplies = Arrays.asList(messageRepliesArray); long timestamp = System.currentTimeMillis(); Entity blogMessageEntity = new Entity("blogMessage"); blogMessageEntity.setProperty("nickname", sender); blogMessageEntity.setProperty("text", message); //blogMessageEntity.setProperty("imgUrl", imageUrl); blogMessageEntity.setProperty("time", timestamp); blogMessageEntity.setProperty("tag", commentType); blogMessageEntity.setProperty("replies", messageReplies); DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); datastore.put(blogMessageEntity); response.sendRedirect("/index.html"); } /** * @return the request parameter, or the default value if the parameter * was not specified by the client. */ private String getParameter(HttpServletRequest request, String name, String defaultValue) { String value = request.getParameter(name); if (value == null) { return defaultValue; } return value; } /* Returns number of comments to display */ private int getNumberOfCommentsToDisplay(HttpServletRequest request) { String numberOfCommentsString = getParameter(request, "comments-choice", "0"); int numberOfComments; try { numberOfComments = Integer.parseInt(numberOfCommentsString); } catch (NumberFormatException e) { System.err.println("Could not convert to int: " + numberOfCommentsString); return 1; } return numberOfComments; } /** Returns a URL that points to the uploaded file, or null if the user didn't upload a file. */ private String getUploadedFileUrl(HttpServletRequest request, String formInputElementName) { BlobstoreService blobstoreService = BlobstoreServiceFactory.getBlobstoreService(); Map<String, List<BlobKey>> blobs = blobstoreService.getUploads(request); List<BlobKey> blobKeys = blobs.get(formInputElementName); if (blobKeys == null || blobKeys.isEmpty()) { return null; } // Our form only contains a single file input, so get the first index. BlobKey blobKey = blobKeys.get(0); // User submitted form without selecting a file, so we can't get a URL. (live server) BlobInfo blobInfo = new BlobInfoFactory().loadBlobInfo(blobKey); if (blobInfo.getSize() == 0) { blobstoreService.delete(blobKey); return null; } // We could check the validity of the file here, e.g. to make sure it's an image file // https://stackoverflow.com/q/10779564/873165 // Use ImagesService to get a URL that points to the uploaded file. ImagesService imagesService = ImagesServiceFactory.getImagesService(); ServingUrlOptions options = ServingUrlOptions.Builder.withBlobKey(blobKey); // To support running in Google Cloud Shell with AppEngine's dev server, we must use the relative // path to the image, rather than the path returned by imagesService which contains a host. try { URL url = new URL(imagesService.getServingUrl(options)); return url.getPath(); } catch (MalformedURLException e) { return imagesService.getServingUrl(options); } } }
portfolio/src/main/java/com/google/sps/servlets/DataServlet.java
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.sps.servlets; import com.google.appengine.api.blobstore.BlobInfo; import com.google.appengine.api.blobstore.BlobInfoFactory; import com.google.appengine.api.blobstore.BlobstoreService; import com.google.appengine.api.blobstore.BlobstoreServiceFactory; import com.google.appengine.api.blobstore.BlobKey; import com.google.appengine.api.datastore.DatastoreService; import com.google.appengine.api.datastore.DatastoreServiceFactory; import com.google.appengine.api.datastore.Entity; import com.google.appengine.api.datastore.PreparedQuery; import com.google.appengine.api.datastore.Query; import com.google.appengine.api.datastore.Query.SortDirection; import com.google.appengine.api.images.ImagesService; import com.google.appengine.api.images.ImagesServiceFactory; import com.google.appengine.api.images.ServingUrlOptions; import com.google.appengine.api.users.UserService; import com.google.appengine.api.users.UserServiceFactory; import com.google.gson.Gson; import com.google.sps.data.BlogMessage; import com.google.sps.data.BlogHashMap; import java.io.IOException; import java.io.PrintWriter; import java.io.*; import java.net.MalformedURLException; import java.net.URL; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Map; /** Servlet that returns some example content. TODO: modify this file to handle comments data */ @WebServlet("/data") public class DataServlet extends HttpServlet { int numberOfCommentsToDisplay = 0; @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { System.out.println(numberOfCommentsToDisplay); List<BlogMessage> messages = new ArrayList<>(); Query query = new Query("blogMessage").addSort("time", SortDirection.DESCENDING); DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); PreparedQuery results = datastore.prepare(query); UserService userService = UserServiceFactory.getUserService(); for (Entity entity : results.asIterable()) { long messageId = entity.getKey().getId(); long timestamp = (long) entity.getProperty("time"); String tag = (String) entity.getProperty("tag"); String comment = (String) entity.getProperty("text"); String nickname = (String) entity.getProperty("nickname"); String email = (String) userService.getCurrentUser().getEmail(); //String image = (String) entity.getProperty("imgUrl"); ArrayList<String> messageReplies = (ArrayList) entity.getProperty("replies"); //BlogMessage message = new BlogMessage(messageId, tag, comment, image, nickname, email, messageReplies, timestamp); BlogMessage message = new BlogMessage(messageId, tag, comment, image, nickname, messageReplies, timestamp); messages.add(message); } // Create BlogHashMap Object and put BlogMessages in the map. BlogHashMap blogMap = new BlogHashMap(); blogMap.putInMap(messages); // If (user loads all BlogMessages) LinkedList<BlogMessage> allBlogMessages = blogMap.getMessages(); if (numberOfCommentsToDisplay < 1 || numberOfCommentsToDisplay >= allBlogMessages.size()) { response.setContentType("text/html"); response.getWriter().println("Please enter an integer between 1 and "+allBlogMessages.size()+"."); return; } // If (user loads BlogMessages for a specific tag) String tagToSearch = ""; // we'll get the input later. LinkedList<BlogMessage> BlogMessagesForTag = blogMap.getMessages(tagToSearch); // If (user loads all BlogMessages for a list of tags) List<String> tagsToSearch = new ArrayList<String>(); tagsToSearch.add(""); // we'll get inputs later. LinkedList<BlogMessage> loadedBlogMessages = blogMap.getMessages(tagsToSearch, numberOfCommentsToDisplay); Gson gson = new Gson(); response.setContentType("application/json;"); if(numberOfCommentsToDisplay == 0){ response.getWriter().println(gson.toJson(allBlogMessages)); // set a default amount later. return; } else { List<BlogMessage> limitedBlogMessages = new ArrayList<>(); for (int i = 0; i < allBlogMessages.size(); i++) { limitedBlogMessages.add(allBlogMessages.get(i)); } response.getWriter().println(gson.toJson(limitedBlogMessages)); return; } /** TODO: add functionality for next cases => 1. user specifies amount for all messages 2. user specifies amount for messages under a tag 3. user specifies amount for all messages under a list of tags */ } /** * Converts a ServerStats instance into a JSON string using the Gson library */ @Override public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException { String message = request.getParameter("text-input"); String sender = getParameter(request, "sender", "Steven"); // TODO: // Get default tag from the InternalTags class and use that below. // Get type of comment. String commentType = getParameter(request, "tags", "Default"); //String imageUrl = getUploadedFileUrl(request, "image"); String messageRepliesString = getParameter(request, "replies", ""); String messageRepliesArray[] = messageRepliesString.split(","); List<String> messageReplies = new ArrayList<String>(); messageReplies = Arrays.asList(messageRepliesArray); long timestamp = System.currentTimeMillis(); Entity blogMessageEntity = new Entity("blogMessage"); blogMessageEntity.setProperty("nickname", sender); blogMessageEntity.setProperty("text", message); //blogMessageEntity.setProperty("imgUrl", imageUrl); blogMessageEntity.setProperty("time", timestamp); blogMessageEntity.setProperty("tag", commentType); blogMessageEntity.setProperty("replies", messageReplies); DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); datastore.put(blogMessageEntity); response.sendRedirect("/index.html"); } /** * @return the request parameter, or the default value if the parameter * was not specified by the client. */ private String getParameter(HttpServletRequest request, String name, String defaultValue) { String value = request.getParameter(name); if (value == null) { return defaultValue; } return value; } /* Returns number of comments to display */ private int getNumberOfCommentsToDisplay(HttpServletRequest request) { String numberOfCommentsString = getParameter(request, "comments-choice", "0"); int numberOfComments; try { numberOfComments = Integer.parseInt(numberOfCommentsString); } catch (NumberFormatException e) { System.err.println("Could not convert to int: " + numberOfCommentsString); return 1; } return numberOfComments; } /** Returns a URL that points to the uploaded file, or null if the user didn't upload a file. */ private String getUploadedFileUrl(HttpServletRequest request, String formInputElementName) { BlobstoreService blobstoreService = BlobstoreServiceFactory.getBlobstoreService(); Map<String, List<BlobKey>> blobs = blobstoreService.getUploads(request); List<BlobKey> blobKeys = blobs.get(formInputElementName); if (blobKeys == null || blobKeys.isEmpty()) { return null; } // Our form only contains a single file input, so get the first index. BlobKey blobKey = blobKeys.get(0); // User submitted form without selecting a file, so we can't get a URL. (live server) BlobInfo blobInfo = new BlobInfoFactory().loadBlobInfo(blobKey); if (blobInfo.getSize() == 0) { blobstoreService.delete(blobKey); return null; } // We could check the validity of the file here, e.g. to make sure it's an image file // https://stackoverflow.com/q/10779564/873165 // Use ImagesService to get a URL that points to the uploaded file. ImagesService imagesService = ImagesServiceFactory.getImagesService(); ServingUrlOptions options = ServingUrlOptions.Builder.withBlobKey(blobKey); // To support running in Google Cloud Shell with AppEngine's dev server, we must use the relative // path to the image, rather than the path returned by imagesService which contains a host. try { URL url = new URL(imagesService.getServingUrl(options)); return url.getPath(); } catch (MalformedURLException e) { return imagesService.getServingUrl(options); } } }
Not done yet but working on getting comments to display
portfolio/src/main/java/com/google/sps/servlets/DataServlet.java
Not done yet but working on getting comments to display
Java
apache-2.0
4a759791b5334a03ede3232d02018b3ed8f74c0a
0
introproventures/graphql-jpa-query,introproventures/graphql-jpa-query,introproventures/graphql-jpa-query
/* * Copyright 2017 IntroPro Ventures Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.introproventures.graphql.jpa.query.schema.impl; import java.util.Collection; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import javax.persistence.EntityGraph; import javax.persistence.EntityManager; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.From; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import javax.persistence.metamodel.EntityType; import javax.persistence.metamodel.SingularAttribute; import graphql.language.Argument; import graphql.language.BooleanValue; import graphql.language.Field; import graphql.schema.DataFetchingEnvironment; import graphql.schema.DataFetchingEnvironmentBuilder; import graphql.schema.GraphQLObjectType; /** * JPA Query DataFetcher implementation that fetches entities with page and where criteria expressions * * @author Igor Dianov * */ class GraphQLJpaQueryDataFetcher extends QraphQLJpaBaseDataFetcher { private boolean defaultDistinct = false; private static final String HIBERNATE_QUERY_PASS_DISTINCT_THROUGH = "hibernate.query.passDistinctThrough"; private static final String ORG_HIBERNATE_CACHEABLE = "org.hibernate.cacheable"; private static final String ORG_HIBERNATE_FETCH_SIZE = "org.hibernate.fetchSize"; private static final String ORG_HIBERNATE_READ_ONLY = "org.hibernate.readOnly"; private static final String JAVAX_PERSISTENCE_FETCHGRAPH = "javax.persistence.fetchgraph"; public GraphQLJpaQueryDataFetcher(EntityManager entityManager, EntityType<?> entityType) { super(entityManager, entityType); } public GraphQLJpaQueryDataFetcher(EntityManager entityManager, EntityType<?> entityType, boolean defaultDistinct) { super(entityManager, entityType); this.defaultDistinct = defaultDistinct; } public boolean isDefaultDistinct() { return defaultDistinct; } public void setDefaultDistinct(boolean defaultDistinct) { this.defaultDistinct = defaultDistinct; } @Override public Object get(DataFetchingEnvironment environment) { Field field = environment.getFields().iterator().next(); Map<String, Object> result = new LinkedHashMap<>(); // See which fields we're requesting Optional<Field> pagesSelection = getSelectionField(field, GraphQLJpaSchemaBuilder.PAGE_PAGES_PARAM_NAME); Optional<Field> totalSelection = getSelectionField(field, GraphQLJpaSchemaBuilder.PAGE_TOTAL_PARAM_NAME); Optional<Field> recordsSelection = getSelectionField(field, GraphQLJpaSchemaBuilder.QUERY_SELECT_PARAM_NAME); Optional<Argument> pageArgument = getPageArgument(field); Page page = extractPageArgument(environment, field); Argument distinctArg = extractArgument(environment, field, GraphQLJpaSchemaBuilder.SELECT_DISTINCT_PARAM_NAME, new BooleanValue(defaultDistinct)); boolean isDistinct = ((BooleanValue) distinctArg.getValue()).isValue(); DataFetchingEnvironment queryEnvironment = environment; Field queryField = field; if (recordsSelection.isPresent()) { // Override query environment String fieldName = recordsSelection.get().getName(); queryEnvironment = Optional.of(getFieldDef(environment.getGraphQLSchema(), (GraphQLObjectType)environment.getParentType(), field)) .map(it -> (GraphQLObjectType) it.getType()) .map(it -> it.getFieldDefinition(GraphQLJpaSchemaBuilder.QUERY_SELECT_PARAM_NAME)) .map(it -> DataFetchingEnvironmentBuilder.newDataFetchingEnvironment(environment) .fieldType(it.getType()) .build() ).orElse(environment); queryField = new Field(fieldName, field.getArguments(), recordsSelection.get().getSelectionSet()); // Let's clear session persistent context to avoid getting stale objects cached in the same session // between requests with different search criteria. This looks like a Hibernate bug... entityManager.clear(); TypedQuery<?> query = getQuery(queryEnvironment, queryField, isDistinct); // Let's apply page only if present if(pageArgument.isPresent()) { query .setMaxResults(page.size) .setFirstResult((page.page - 1) * page.size); } // Let's create entity graph from selection // When using fetchgraph all relationships are considered to be lazy regardless of annotation, // and only the elements of the provided graph are loaded. This particularly useful when running // reports on certain objects and you don't want a lot of the stuff that's normally flagged to // load via eager annotations. EntityGraph<?> graph = buildEntityGraph(queryField); query.setHint(JAVAX_PERSISTENCE_FETCHGRAPH, graph); // Let' try reduce overhead and disable all caching query.setHint(ORG_HIBERNATE_READ_ONLY, true); query.setHint(ORG_HIBERNATE_FETCH_SIZE, 1000); query.setHint(ORG_HIBERNATE_CACHEABLE, false); // Let's not pass distinct if enabled to have better performance if(isDistinct) { query.setHint(HIBERNATE_QUERY_PASS_DISTINCT_THROUGH, false); } // Let's remove any duplicate references for root entities Collection<?> resultList = isDistinct ? new LinkedHashSet<Object>(query.getResultList()) : query.getResultList(); result.put(GraphQLJpaSchemaBuilder.QUERY_SELECT_PARAM_NAME, resultList); } if (totalSelection.isPresent() || pagesSelection.isPresent()) { final DataFetchingEnvironment countQueryEnvironment = queryEnvironment; final Field countQueryField = queryField; final Long total = recordsSelection .map(contentField -> getCountQuery(countQueryEnvironment, countQueryField).getSingleResult()) // if no "content" was selected an empty Field can be used .orElseGet(() -> getCountQuery(environment, new Field("count")).getSingleResult()); result.put(GraphQLJpaSchemaBuilder.PAGE_TOTAL_PARAM_NAME, total); result.put(GraphQLJpaSchemaBuilder.PAGE_PAGES_PARAM_NAME, ((Double) Math.ceil(total / (double) page.size)).longValue()); } return result; } @Override protected Predicate getPredicate(CriteriaBuilder cb, Root<?> root, From<?,?> path, DataFetchingEnvironment environment, Argument argument) { if(isLogicalArgument(argument) || isDistinctArgument(argument)) return null; if(isWhereArgument(argument)) return getWherePredicate(cb, root, path, argumentEnvironment(environment, argument.getName()), argument); return super.getPredicate(cb, root, path, environment, argument); } private TypedQuery<Long> getCountQuery(DataFetchingEnvironment environment, Field field) { CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery<Long> query = cb.createQuery(Long.class); Root<?> root = query.from(entityType); SingularAttribute<?,?> idAttribute = entityType.getId(Object.class); query.select(cb.count(root.get(idAttribute.getName()))); List<Predicate> predicates = field.getArguments().stream() .map(it -> getPredicate(cb, root, null, environment, it)) .filter(it -> it != null) .collect(Collectors.toList()); query.where(predicates.toArray(new Predicate[predicates.size()])); return entityManager.createQuery(query); } private Optional<Argument> getPageArgument(Field field) { return field.getArguments() .stream() .filter(it -> GraphQLJpaSchemaBuilder.PAGE_PARAM_NAME.equals(it.getName())) .findFirst(); } private Page extractPageArgument(DataFetchingEnvironment environment, Field field) { Optional<Argument> paginationRequest = getPageArgument(field); if (paginationRequest.isPresent()) { field.getArguments() .remove(paginationRequest.get()); Map<String, Integer> pagex = environment.getArgument(GraphQLJpaSchemaBuilder.PAGE_PARAM_NAME); Integer start = pagex.get(GraphQLJpaSchemaBuilder.PAGE_START_PARAM_NAME); Integer limit = pagex.get(GraphQLJpaSchemaBuilder.PAGE_LIMIT_PARAM_NAME); return new Page(start, limit); } return new Page(1, Integer.MAX_VALUE); } private Boolean isWhereArgument(Argument argument) { return GraphQLJpaSchemaBuilder.QUERY_WHERE_PARAM_NAME.equals(argument.getName()); } private Boolean isLogicalArgument(Argument argument) { return GraphQLJpaSchemaBuilder.QUERY_LOGICAL_PARAM_NAME.equals(argument.getName()); } private Boolean isDistinctArgument(Argument argument) { return GraphQLJpaSchemaBuilder.SELECT_DISTINCT_PARAM_NAME.equals(argument.getName()); } private static final class Page { public Integer page; public Integer size; public Page(Integer page, Integer size) { this.page = page; this.size = size; } } }
graphql-jpa-query-schema/src/main/java/com/introproventures/graphql/jpa/query/schema/impl/GraphQLJpaQueryDataFetcher.java
/* * Copyright 2017 IntroPro Ventures Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.introproventures.graphql.jpa.query.schema.impl; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import javax.persistence.EntityGraph; import javax.persistence.EntityManager; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.From; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import javax.persistence.metamodel.EntityType; import javax.persistence.metamodel.SingularAttribute; import graphql.language.Argument; import graphql.language.BooleanValue; import graphql.language.Field; import graphql.schema.DataFetchingEnvironment; import graphql.schema.DataFetchingEnvironmentBuilder; import graphql.schema.GraphQLObjectType; /** * JPA Query DataFetcher implementation that fetches entities with page and where criteria expressions * * @author Igor Dianov * */ class GraphQLJpaQueryDataFetcher extends QraphQLJpaBaseDataFetcher { private boolean defaultDistinct = false; private static final String HIBERNATE_QUERY_PASS_DISTINCT_THROUGH = "hibernate.query.passDistinctThrough"; private static final String ORG_HIBERNATE_CACHEABLE = "org.hibernate.cacheable"; private static final String ORG_HIBERNATE_FETCH_SIZE = "org.hibernate.fetchSize"; private static final String ORG_HIBERNATE_READ_ONLY = "org.hibernate.readOnly"; private static final String JAVAX_PERSISTENCE_FETCHGRAPH = "javax.persistence.fetchgraph"; public GraphQLJpaQueryDataFetcher(EntityManager entityManager, EntityType<?> entityType) { super(entityManager, entityType); } public GraphQLJpaQueryDataFetcher(EntityManager entityManager, EntityType<?> entityType, boolean defaultDistinct) { super(entityManager, entityType); this.defaultDistinct = defaultDistinct; } public boolean isDefaultDistinct() { return defaultDistinct; } public void setDefaultDistinct(boolean defaultDistinct) { this.defaultDistinct = defaultDistinct; } @Override public Object get(DataFetchingEnvironment environment) { Field field = environment.getFields().iterator().next(); Map<String, Object> result = new LinkedHashMap<>(); // See which fields we're requesting Optional<Field> pagesSelection = getSelectionField(field, GraphQLJpaSchemaBuilder.PAGE_PAGES_PARAM_NAME); Optional<Field> totalSelection = getSelectionField(field, GraphQLJpaSchemaBuilder.PAGE_TOTAL_PARAM_NAME); Optional<Field> recordsSelection = getSelectionField(field, GraphQLJpaSchemaBuilder.QUERY_SELECT_PARAM_NAME); Optional<Argument> pageArgument = getPageArgument(field); Page page = extractPageArgument(environment, field); Argument distinctArg = extractArgument(environment, field, GraphQLJpaSchemaBuilder.SELECT_DISTINCT_PARAM_NAME, new BooleanValue(defaultDistinct)); boolean isDistinct = ((BooleanValue) distinctArg.getValue()).isValue(); DataFetchingEnvironment queryEnvironment = environment; Field queryField = field; if (recordsSelection.isPresent()) { // Override query environment String fieldName = recordsSelection.get().getName(); queryEnvironment = Optional.of(getFieldDef(environment.getGraphQLSchema(), (GraphQLObjectType)environment.getParentType(), field)) .map(it -> (GraphQLObjectType) it.getType()) .map(it -> it.getFieldDefinition(GraphQLJpaSchemaBuilder.QUERY_SELECT_PARAM_NAME)) .map(it -> DataFetchingEnvironmentBuilder.newDataFetchingEnvironment(environment) .fieldType(it.getType()) .build() ).orElse(environment); queryField = new Field(fieldName, field.getArguments(), recordsSelection.get().getSelectionSet()); // Let's clear session persistent context to avoid getting stale objects cached in the same session // between requests with different search criteria. This looks like a Hibernate bug... entityManager.clear(); TypedQuery<?> query = getQuery(queryEnvironment, queryField, isDistinct); // Let's apply page only if present if(pageArgument.isPresent()) { query .setMaxResults(page.size) .setFirstResult((page.page - 1) * page.size); } // Let's create entity graph from selection // When using fetchgraph all relationships are considered to be lazy regardless of annotation, // and only the elements of the provided graph are loaded. This particularly useful when running // reports on certain objects and you don't want a lot of the stuff that's normally flagged to // load via eager annotations. EntityGraph<?> graph = buildEntityGraph(queryField); query.setHint(JAVAX_PERSISTENCE_FETCHGRAPH, graph); // Let' try reduce overhead and disable all caching query.setHint(ORG_HIBERNATE_READ_ONLY, true); query.setHint(ORG_HIBERNATE_FETCH_SIZE, 1000); query.setHint(ORG_HIBERNATE_CACHEABLE, false); // Let's pass distinct if enabled if(isDistinct) { query.setHint(HIBERNATE_QUERY_PASS_DISTINCT_THROUGH, true); } result.put(GraphQLJpaSchemaBuilder.QUERY_SELECT_PARAM_NAME, query.getResultList()); } if (totalSelection.isPresent() || pagesSelection.isPresent()) { final DataFetchingEnvironment countQueryEnvironment = queryEnvironment; final Field countQueryField = queryField; final Long total = recordsSelection .map(contentField -> getCountQuery(countQueryEnvironment, countQueryField).getSingleResult()) // if no "content" was selected an empty Field can be used .orElseGet(() -> getCountQuery(environment, new Field("count")).getSingleResult()); result.put(GraphQLJpaSchemaBuilder.PAGE_TOTAL_PARAM_NAME, total); result.put(GraphQLJpaSchemaBuilder.PAGE_PAGES_PARAM_NAME, ((Double) Math.ceil(total / (double) page.size)).longValue()); } return result; } @Override protected Predicate getPredicate(CriteriaBuilder cb, Root<?> root, From<?,?> path, DataFetchingEnvironment environment, Argument argument) { if(isLogicalArgument(argument) || isDistinctArgument(argument)) return null; if(isWhereArgument(argument)) return getWherePredicate(cb, root, path, argumentEnvironment(environment, argument.getName()), argument); return super.getPredicate(cb, root, path, environment, argument); } private TypedQuery<Long> getCountQuery(DataFetchingEnvironment environment, Field field) { CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery<Long> query = cb.createQuery(Long.class); Root<?> root = query.from(entityType); SingularAttribute<?,?> idAttribute = entityType.getId(Object.class); query.select(cb.count(root.get(idAttribute.getName()))); List<Predicate> predicates = field.getArguments().stream() .map(it -> getPredicate(cb, root, null, environment, it)) .filter(it -> it != null) .collect(Collectors.toList()); query.where(predicates.toArray(new Predicate[predicates.size()])); return entityManager.createQuery(query); } private Optional<Argument> getPageArgument(Field field) { return field.getArguments() .stream() .filter(it -> GraphQLJpaSchemaBuilder.PAGE_PARAM_NAME.equals(it.getName())) .findFirst(); } private Page extractPageArgument(DataFetchingEnvironment environment, Field field) { Optional<Argument> paginationRequest = getPageArgument(field); if (paginationRequest.isPresent()) { field.getArguments() .remove(paginationRequest.get()); Map<String, Integer> pagex = environment.getArgument(GraphQLJpaSchemaBuilder.PAGE_PARAM_NAME); Integer start = pagex.get(GraphQLJpaSchemaBuilder.PAGE_START_PARAM_NAME); Integer limit = pagex.get(GraphQLJpaSchemaBuilder.PAGE_LIMIT_PARAM_NAME); return new Page(start, limit); } return new Page(1, Integer.MAX_VALUE); } private Boolean isWhereArgument(Argument argument) { return GraphQLJpaSchemaBuilder.QUERY_WHERE_PARAM_NAME.equals(argument.getName()); } private Boolean isLogicalArgument(Argument argument) { return GraphQLJpaSchemaBuilder.QUERY_LOGICAL_PARAM_NAME.equals(argument.getName()); } private Boolean isDistinctArgument(Argument argument) { return GraphQLJpaSchemaBuilder.SELECT_DISTINCT_PARAM_NAME.equals(argument.getName()); } private static final class Page { public Integer page; public Integer size; public Page(Integer page, Integer size) { this.page = page; this.size = size; } } }
fix: Let's not pass DISTINCT in JPQL for better performance
graphql-jpa-query-schema/src/main/java/com/introproventures/graphql/jpa/query/schema/impl/GraphQLJpaQueryDataFetcher.java
fix: Let's not pass DISTINCT in JPQL for better performance
Java
apache-2.0
3ffbdb6eab604153a76ff244547cbfe8fbce88e5
0
google/j2objc,groschovskiy/j2objc,mirego/j2objc,mirego/j2objc,groschovskiy/j2objc,groschovskiy/j2objc,mirego/j2objc,google/j2objc,mirego/j2objc,groschovskiy/j2objc,lukhnos/j2objc,groschovskiy/j2objc,lukhnos/j2objc,groschovskiy/j2objc,google/j2objc,lukhnos/j2objc,mirego/j2objc,google/j2objc,google/j2objc,lukhnos/j2objc,mirego/j2objc,lukhnos/j2objc,lukhnos/j2objc,google/j2objc
/* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.tests.java.nio.charset; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.CharacterCodingException; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CharsetEncoder; import java.nio.charset.CoderResult; import java.nio.charset.CodingErrorAction; import java.nio.charset.MalformedInputException; import java.nio.charset.UnmappableCharacterException; import java.nio.charset.UnsupportedCharsetException; import java.util.Arrays; import junit.framework.TestCase; /** * API unit test for java.nio.charset.CharsetEncoder */ public class CharsetEncoderTest extends TestCase { static final int MAX_BYTES = 3; static final float AVER_BYTES = 0.5f; // charset for mock class private static final Charset MOCKCS = new MockCharset("CharsetEncoderTest_mock", new String[0]); Charset cs = MOCKCS; // default encoder CharsetEncoder encoder; // default for Charset abstract class byte[] defaultReplacement = new byte[] { 63 }; // specific for Charset implementation subclass byte[] specifiedReplacement = new byte[] { 63 }; static final String unistr = " buffer";// \u8000\u8001\u00a5\u3000\r\n"; byte[] unibytes = new byte[] { 32, 98, 117, 102, 102, 101, 114 }; byte[] unibytesWithRep = null; byte[] surrogate = new byte[0]; protected void setUp() throws Exception { super.setUp(); encoder = cs.newEncoder(); if (null == unibytesWithRep) { byte[] replacement = encoder.replacement(); unibytesWithRep = new byte[replacement.length + unibytes.length]; System.arraycopy(replacement, 0, unibytesWithRep, 0, replacement.length); System.arraycopy(unibytes, 0, unibytesWithRep, replacement.length, unibytes.length); } } /* * @see TestCase#tearDown() */ protected void tearDown() throws Exception { super.tearDown(); } public void testSpecificDefaultValue() { assertTrue(encoder.averageBytesPerChar() == AVER_BYTES); assertTrue(encoder.maxBytesPerChar() == MAX_BYTES); } public void testDefaultValue() { assertEquals(CodingErrorAction.REPORT, encoder.malformedInputAction()); assertEquals(CodingErrorAction.REPORT, encoder.unmappableCharacterAction()); assertSame(encoder, encoder.onMalformedInput(CodingErrorAction.IGNORE)); assertSame(encoder, encoder.onUnmappableCharacter(CodingErrorAction.IGNORE)); if (encoder instanceof MockCharsetEncoder) { assertTrue(Arrays.equals(encoder.replacement(), defaultReplacement)); } else { assertTrue(Arrays.equals(encoder.replacement(), specifiedReplacement)); } } /* * Class under test for constructor CharsetEncoder(Charset, float, float) */ public void testCharsetEncoderCharsetfloatfloat() { // default value encoder = new MockCharsetEncoder(cs, (float) AVER_BYTES, MAX_BYTES); assertSame(encoder.charset(), cs); assertTrue(encoder.averageBytesPerChar() == AVER_BYTES); assertTrue(encoder.maxBytesPerChar() == MAX_BYTES); assertEquals(CodingErrorAction.REPORT, encoder.malformedInputAction()); assertEquals(CodingErrorAction.REPORT, encoder .unmappableCharacterAction()); assertEquals(new String(encoder.replacement()), new String( defaultReplacement)); assertSame(encoder, encoder.onMalformedInput(CodingErrorAction.IGNORE)); assertSame(encoder, encoder .onUnmappableCharacter(CodingErrorAction.IGNORE)); // normal case CharsetEncoder ec = new MockCharsetEncoder(cs, 1, MAX_BYTES); assertSame(ec.charset(), cs); assertEquals(1.0, ec.averageBytesPerChar(), 0); assertTrue(ec.maxBytesPerChar() == MAX_BYTES); /* * ------------------------ Exceptional cases ------------------------- */ // NullPointerException: null charset try { ec = new MockCharsetEncoder(null, 1, MAX_BYTES); fail("should throw null pointer exception"); } catch (NullPointerException e) { } ec = new MockCharsetEncoder(new MockCharset("mock", new String[0]), 1, MAX_BYTES); // Commented out since the comment is wrong since MAX_BYTES > 1 // // OK: average length less than max length // ec = new MockCharsetEncoder(cs, MAX_BYTES, 1); // assertTrue(ec.averageBytesPerChar() == MAX_BYTES); // assertTrue(ec.maxBytesPerChar() == 1); // Illegal Argument: zero length try { ec = new MockCharsetEncoder(cs, 0, MAX_BYTES); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } try { ec = new MockCharsetEncoder(cs, 1, 0); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Illegal Argument: negative length try { ec = new MockCharsetEncoder(cs, -1, MAX_BYTES); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } try { ec = new MockCharsetEncoder(cs, 1, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } } /* * Class under test for constructor CharsetEncoder(Charset, float, float, * byte[]) */ public void testCharsetEncoderCharsetfloatfloatbyteArray() { byte[] ba = getLegalByteArray(); // normal case CharsetEncoder ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, ba); assertSame(ec.charset(), cs); assertEquals(1.0, ec.averageBytesPerChar(), 0.0); assertTrue(ec.maxBytesPerChar() == MAX_BYTES); assertSame(ba, ec.replacement()); /* * ------------------------ Exceptional cases ------------------------- */ // NullPointerException: null charset try { ec = new MockCharsetEncoder(null, 1, MAX_BYTES, ba); fail("should throw null pointer exception"); } catch (NullPointerException e) { } // Illegal Argument: null byte array try { ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, null); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Illegal Argument: empty byte array try { ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, new byte[0]); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Illegal Argument: byte array is longer than max length try { ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, new byte[] { 1, 2, MAX_BYTES, 4 }); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Commented out since the comment is wrong since MAX_BYTES > 1 // This test throws IllegalArgumentException on Harmony and RI // // OK: average length less than max length // ec = new MockCharsetEncoder(cs, MAX_BYTES, ba.length, ba); // assertTrue(ec.averageBytesPerChar() == MAX_BYTES); // assertTrue(ec.maxBytesPerChar() == ba.length); // Illegal Argument: zero length try { ec = new MockCharsetEncoder(cs, 0, MAX_BYTES, ba); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } try { ec = new MockCharsetEncoder(cs, 1, 0, ba); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Illegal Argument: negative length try { ec = new MockCharsetEncoder(cs, -1, MAX_BYTES, ba); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } try { ec = new MockCharsetEncoder(cs, 1, -1, ba); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } } /* * Class under test for boolean canEncode(char) */ public void testCanEncodechar() throws CharacterCodingException { // for non-mapped char assertTrue(encoder.canEncode('\uc2c0')); // surrogate char for unicode // 1st byte: d800-dbff // 2nd byte: dc00-dfff assertTrue(encoder.canEncode('\ud800')); // valid surrogate pair assertTrue(encoder.canEncode('\udc00')); } /*----------------------------------------- * Class under test for illegal state case * methods which can change internal states are two encode, flush, two canEncode, reset * ----------------------------------------- */ // Normal case: just after reset, and it also means reset can be done // anywhere public void testResetIllegalState() throws CharacterCodingException { assertSame(encoder, encoder.reset()); encoder.canEncode('\ud901'); assertSame(encoder, encoder.reset()); encoder.canEncode("\ud901\udc00"); assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("aaa")); assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("aaa"), ByteBuffer.allocate(3), false); assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("aaa"), ByteBuffer.allocate(3), true); assertSame(encoder, encoder.reset()); } public void testFlushIllegalState() throws CharacterCodingException { CharBuffer in = CharBuffer.wrap("aaa"); ByteBuffer out = ByteBuffer.allocate(5); // Illegal state: after reset. encoder.reset(); try { encoder.flush(out); fail(); } catch (IllegalStateException expected) { } // Normal case: after encode with endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(in, out, true); out.rewind(); CoderResult result = encoder.flush(out); // Good state: flush twice encoder.flush(out); // Illegal state: flush after encode with endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(in, out, false); try { encoder.flush(out); fail(); } catch (IllegalStateException expected) { } } public void testFlushAfterConstructing() { ByteBuffer out = ByteBuffer.allocate(5); //Illegal state: flush after instance created try { encoder.flush(out); fail("should throw IllegalStateException"); } catch (IllegalStateException e) { // Expected } } // test illegal states for encode facade public void testEncodeFacadeIllegalState() throws CharacterCodingException { // encode facade can be execute in anywhere CharBuffer in = CharBuffer.wrap("aaa"); // Normal case: just created encoder.encode(in); in.rewind(); // Normal case: just after encode facade encoder.encode(in); in.rewind(); // Normal case: just after canEncode assertSame(encoder, encoder.reset()); encoder.canEncode("\ud902\udc00"); encoder.encode(in); in.rewind(); assertSame(encoder, encoder.reset()); encoder.canEncode('\ud902'); encoder.encode(in); in.rewind(); // Normal case: just after encode with that endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"), ByteBuffer.allocate(30), true); encoder.encode(in); in.rewind(); // Normal case:just after encode with that endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"), ByteBuffer.allocate(30), false); encoder.encode(in); in.rewind(); // Normal case: just after flush assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"), ByteBuffer.allocate(30), true); encoder.flush(ByteBuffer.allocate(10)); encoder.encode(in); in.rewind(); } // test illegal states for two encode method with endOfInput is true public void testEncodeTrueIllegalState() throws CharacterCodingException { CharBuffer in = CharBuffer.wrap("aaa"); ByteBuffer out = ByteBuffer.allocate(5); // Normal case: just created encoder.encode(in, out, true); in.rewind(); out.rewind(); in.rewind(); out.rewind(); // Normal case: just after encode with that endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"), ByteBuffer.allocate(30), true); encoder.encode(in, out, true); in.rewind(); out.rewind(); // Normal case:just after encode with that endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"), ByteBuffer.allocate(30), false); encoder.encode(in, out, true); in.rewind(); out.rewind(); // Illegal state: just after flush assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"), ByteBuffer.allocate(30), true); encoder.flush(ByteBuffer.allocate(10)); try { encoder.encode(in, out, true); fail("should illegal state"); } catch (IllegalStateException e) { } // Normal case: after canEncode assertSame(encoder, encoder.reset()); encoder.canEncode("\ud906\udc00"); encoder.encode(in, out, true); in.rewind(); out.rewind(); assertSame(encoder, encoder.reset()); encoder.canEncode('\ud905'); encoder.encode(in, out, true); } // test illegal states for two encode method with endOfInput is false public void testEncodeFalseIllegalState() throws CharacterCodingException { CharBuffer in = CharBuffer.wrap("aaa"); ByteBuffer out = ByteBuffer.allocate(5); // Normal case: just created encoder.encode(in, out, false); in.rewind(); out.rewind(); // Illegal state: just after encode facade assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState1")); try { encoder.encode(in, out, false); fail("should illegal state"); } catch (IllegalStateException e) { } // Illegal state: just after encode with that endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"), ByteBuffer.allocate(30), true); try { encoder.encode(in, out, false); fail("should illegal state"); } catch (IllegalStateException e) { } // Normal case:just after encode with that endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"), ByteBuffer.allocate(30), false); encoder.encode(in, out, false); in.rewind(); out.rewind(); // Illegal state: just after flush assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"), ByteBuffer.allocate(30), true); encoder.flush(ByteBuffer.allocate(10)); try { encoder.encode(in, out, false); fail("should illegal state"); } catch (IllegalStateException e) { } // Normal case: after canEncode assertSame(encoder, encoder.reset()); encoder.canEncode("\ud906\udc00"); encoder.encode(in, out, false); in.rewind(); out.rewind(); assertSame(encoder, encoder.reset()); encoder.canEncode('\ud905'); encoder.encode(in, out, false); } // test illegal states for two canEncode methods public void testCanEncodeIllegalState() throws CharacterCodingException { // Normal case: just created encoder.canEncode("\ud900\udc00"); encoder.canEncode('\ud900'); // Illegal state: just after encode with that endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"), ByteBuffer.allocate(30), true); try { encoder.canEncode("\ud903\udc00"); fail("should throw illegal state exception"); } catch (IllegalStateException e) { } // Illegal state:just after encode with that endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"), ByteBuffer.allocate(30), false); try { encoder.canEncode("\ud904\udc00"); fail("should throw illegal state exception"); } catch (IllegalStateException e) { } // Normal case: just after flush encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"), ByteBuffer.allocate(30), true); encoder.flush(ByteBuffer.allocate(10)); encoder.canEncode("\ud905\udc00"); encoder.canEncode('\ud906'); // Normal case: after reset again assertSame(encoder, encoder.reset()); encoder.canEncode("\ud906\udc00"); encoder.canEncode('\ud905'); } /* * --------------------------------- illegal state test end * --------------------------------- */ /* * Class under test for boolean canEncode(CharSequence) */ public void testCanEncodeCharSequence() { // for non-mapped char assertTrue(encoder.canEncode("\uc2c0")); // surrogate char for unicode // 1st byte: d800-dbff // 2nd byte: dc00-dfff // valid surrogate pair assertTrue(encoder.canEncode("\ud800\udc00")); // invalid surrogate pair assertTrue(encoder.canEncode("\ud800\udb00")); } public void test_canEncode_empty() throws Exception { assertTrue(encoder.canEncode("")); } public void test_canEncode_null() throws Exception { try { encoder.canEncode(null); fail(); } catch (NullPointerException e) { } } /* * Class under test for Charset charset() */ public void testCharset() { try { encoder = new MockCharsetEncoder(Charset.forName("gbk"), 1, MAX_BYTES); // assertSame(encoder.charset(), Charset.forName("gbk")); } catch (UnsupportedCharsetException e) { System.err .println("Don't support GBK encoding, ignore current test"); } } /* * Class under test for ByteBuffer encode(CharBuffer) */ public void testEncodeCharBuffer() throws CharacterCodingException { // Null pointer try { encoder.encode(null); fail("should throw null pointer exception"); } catch (NullPointerException e) { } // empty input buffer ByteBuffer out = encoder.encode(CharBuffer.wrap("")); assertEquals(out.position(), 0); assertByteArray(out, new byte[0]); // assertByteArray(out, surrogate); // normal case out = encoder.encode(CharBuffer.wrap(unistr)); assertEquals(out.position(), 0); assertByteArray(out, addSurrogate(unibytes)); // Regression test for harmony-3378 Charset cs = Charset.forName("UTF-8"); CharsetEncoder encoder = cs.newEncoder(); encoder.onMalformedInput(CodingErrorAction.REPLACE); encoder = encoder.replaceWith(new byte[] { (byte) 0xef, (byte) 0xbf, (byte) 0xbd, }); CharBuffer in = CharBuffer.wrap("\ud800"); out = encoder.encode(in); assertNotNull(out); } private byte[] addSurrogate(byte[] expected) { if (surrogate.length > 0) { byte[] temp = new byte[surrogate.length + expected.length]; System.arraycopy(surrogate, 0, temp, 0, surrogate.length); System.arraycopy(expected, 0, temp, surrogate.length, expected.length); expected = temp; } return expected; } /** * @return */ protected byte[] getEmptyByteArray() { return new byte[0]; } CharBuffer getMalformedCharBuffer() { return CharBuffer.wrap("malform buffer"); } CharBuffer getUnmapCharBuffer() { return CharBuffer.wrap("unmap buffer"); } CharBuffer getExceptionCharBuffer() { return CharBuffer.wrap("runtime buffer"); } public void testEncodeCharBufferException() throws CharacterCodingException { ByteBuffer out; CharBuffer in; // MalformedException: in = getMalformedCharBuffer(); encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); if (in != null) { try { // regression test for Harmony-1379 encoder.encode(in); fail("should throw MalformedInputException"); } catch (MalformedInputException e) { } encoder.reset(); in.rewind(); encoder.onMalformedInput(CodingErrorAction.IGNORE); out = encoder.encode(in); assertByteArray(out, addSurrogate(unibytes)); encoder.reset(); in.rewind(); encoder.onMalformedInput(CodingErrorAction.REPLACE); out = encoder.encode(in); assertByteArray(out, addSurrogate(unibytesWithRep)); } // Unmapped Exception: in = getUnmapCharBuffer(); encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); if (in != null) { encoder.reset(); try { encoder.encode(in); fail("should throw UnmappableCharacterException"); } catch (UnmappableCharacterException e) { } encoder.reset(); in.rewind(); encoder.onUnmappableCharacter(CodingErrorAction.IGNORE); out = encoder.encode(in); assertByteArray(out, unibytes); encoder.reset(); in.rewind(); encoder.onUnmappableCharacter(CodingErrorAction.REPLACE); out = encoder.encode(in); assertByteArray(out, unibytesWithRep); } // RuntimeException try { encoder.encode(getExceptionCharBuffer()); fail("should throw runtime exception"); } catch (RuntimeException e) { } } /* * utility method, extract given bytebuffer to a string and compare with * give string */ void assertByteArray(ByteBuffer out, byte[] expected) { out = out.duplicate(); if (out.position() != 0) { out.flip(); } byte[] ba = new byte[out.limit() - out.position()]; out.get(ba); // byte[] ba = out.array(); assertTrue(Arrays.equals(ba, expected)); } /* * Class under test for CoderResult encode(CharBuffer, ByteBuffer, boolean) */ public void testEncodeCharBufferByteBufferboolean() throws CharacterCodingException { ByteBuffer out = ByteBuffer.allocate(200); CharBuffer in = CharBuffer.wrap(unistr); // Null pointer try { encoder.encode(null, out, true); fail("should throw null pointer exception"); } catch (NullPointerException e) { } try { encoder.encode(in, null, true); fail("should throw null pointer exception"); } catch (NullPointerException e) { } // normal case, one complete operation assertSame(encoder, encoder.reset()); in.rewind(); out.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(unibytes)); in.rewind(); encoder.flush(out); // normal case, one complete operation, but call twice, first time set // endOfInput to false assertSame(encoder, encoder.reset()); in.rewind(); out = ByteBuffer.allocate(200); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, false)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(unibytes)); in.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, false)); in.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(duplicateByteArray(unibytes, 3))); // overflow out = ByteBuffer.allocate(4); assertSame(encoder, encoder.reset()); in.rewind(); out.rewind(); assertSame(CoderResult.OVERFLOW, encoder.encode(in, out, true)); assertEquals(out.limit(), 4); assertEquals(out.position(), 4); assertEquals(out.remaining(), 0); assertEquals(out.capacity(), 4); ByteBuffer temp = ByteBuffer.allocate(200); out.flip(); temp.put(out); out = temp; assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(unibytes)); assertSame(encoder, encoder.reset()); in.rewind(); out = ByteBuffer.allocate(4); assertSame(CoderResult.OVERFLOW, encoder.encode(in, out, false)); assertEquals(out.limit(), 4); assertEquals(out.position(), 4); assertEquals(out.remaining(), 0); assertEquals(out.capacity(), 4); temp = ByteBuffer.allocate(200); out.flip(); temp.put(out); out = temp; assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, false)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(unibytes)); } // void printByteBuffer(ByteBuffer buffer) { // System.out.println("print buffer"); // if (buffer.position() != 0) { // buffer.flip(); // } // byte[] ba = buffer.array(); // for (int i = 0; i < ba.length; i++) { // System.out.println(Integer.toHexString(ba[i])); // } // } public void testEncodeCharBufferByteBufferbooleanExceptionFalse() throws CharacterCodingException { implTestEncodeCharBufferByteBufferbooleanException(false); } public void testEncodeCharBufferByteBufferbooleanExceptionTrue() throws CharacterCodingException { implTestEncodeCharBufferByteBufferbooleanException(true); } private byte[] duplicateByteArray(byte[] ba, int times) { byte[] result = new byte[ba.length * times]; for (int i = 0; i < times; i++) { System.arraycopy(ba, 0, result, i * ba.length, ba.length); } return result; } protected void implTestEncodeCharBufferByteBufferbooleanException( boolean endOfInput) throws CharacterCodingException { ByteBuffer out = ByteBuffer.allocate(100); // MalformedException: CharBuffer in = getMalformedCharBuffer(); encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); if (in != null) { encoder.reset(); CoderResult r = encoder.encode(in, out, endOfInput); assertTrue(r.isMalformed()); encoder.reset(); out.clear(); in.rewind(); encoder.onMalformedInput(CodingErrorAction.IGNORE); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); assertCodingErrorAction(endOfInput, out, in, unibytes); encoder.reset(); out.clear(); in.rewind(); encoder.onMalformedInput(CodingErrorAction.REPLACE); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); assertCodingErrorAction(endOfInput, out, in, unibytesWithRep); // } else { // System.out.println("Cannot find malformed char buffer for " // + cs.name()); } // Unmapped Exception: in = getUnmapCharBuffer(); encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); if (in != null) { encoder.reset(); out.clear(); assertTrue(encoder.encode(in, out, endOfInput).isUnmappable()); encoder.reset(); out.clear(); in.rewind(); encoder.onUnmappableCharacter(CodingErrorAction.IGNORE); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); assertCodingErrorAction(endOfInput, out, in, unibytes); encoder.reset(); out.clear(); in.rewind(); encoder.onUnmappableCharacter(CodingErrorAction.REPLACE); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); assertCodingErrorAction(endOfInput, out, in, unibytesWithRep); // } else { // System.out.println("Cannot find unmapped char buffer for " // + cs.name()); } // RuntimeException try { encoder.encode(getExceptionCharBuffer()); fail("should throw runtime exception"); } catch (RuntimeException e) { } } private void assertCodingErrorAction(boolean endOfInput, ByteBuffer out, CharBuffer in, byte[] expect) { if (endOfInput) { assertByteArray(out, addSurrogate(expect)); } else { in.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); in.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true)); assertByteArray(out, addSurrogate(duplicateByteArray(expect, 3))); } } /* * Class under test for CoderResult flush(ByteBuffer) */ public void testFlush() throws CharacterCodingException { ByteBuffer out = ByteBuffer.allocate(6); CharBuffer in = CharBuffer.wrap("aaa"); assertEquals(in.remaining(), 3); // by encode facade, so that internal state will be wrong encoder.encode(CharBuffer.wrap("testFlush"), ByteBuffer.allocate(20), true); assertSame(CoderResult.UNDERFLOW, encoder .flush(ByteBuffer.allocate(50))); } /* * test isLegalReplacement(byte[]) */ public void test_isLegalReplacement_null() { try { encoder.isLegalReplacement(null); fail("should throw null pointer exception"); } catch (NullPointerException e) { } } public void test_isLegalReplacement_good() { assertTrue(encoder.isLegalReplacement(specifiedReplacement)); } public void test_isLegalReplacement_bad() { assertTrue(encoder.isLegalReplacement(new byte[200])); byte[] ba = getIllegalByteArray(); if (ba != null) { assertFalse(encoder.isLegalReplacement(ba)); } } public void test_isLegalReplacement_empty_array() { // ISO, ASC, GB, UTF8 encoder will throw exception in RI // others will pass assertTrue(encoder.isLegalReplacement(new byte[0])); } public void testOnMalformedInput() { assertSame(CodingErrorAction.REPORT, encoder.malformedInputAction()); try { encoder.onMalformedInput(null); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } encoder.onMalformedInput(CodingErrorAction.IGNORE); assertSame(CodingErrorAction.IGNORE, encoder.malformedInputAction()); } public void testOnUnmappableCharacter() { assertSame(CodingErrorAction.REPORT, encoder .unmappableCharacterAction()); try { encoder.onUnmappableCharacter(null); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } encoder.onUnmappableCharacter(CodingErrorAction.IGNORE); assertSame(CodingErrorAction.IGNORE, encoder .unmappableCharacterAction()); } public void testReplacement() { try { encoder.replaceWith(null); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } try { encoder.replaceWith(new byte[0]); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } try { encoder.replaceWith(new byte[100]); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } byte[] nr = getLegalByteArray(); assertSame(encoder, encoder.replaceWith(nr)); assertSame(nr, encoder.replacement()); nr = getIllegalByteArray(); try { encoder.replaceWith(new byte[100]); fail(); } catch (IllegalArgumentException e) { } } protected byte[] getLegalByteArray() { return new byte[] { 'a' }; } protected byte[] getIllegalByteArray() { return new byte[155]; } /* * Mock subclass of CharsetEncoder For protected method test */ public static class MockCharsetEncoder extends CharsetEncoder { boolean flushed = false; public boolean isFlushed() { boolean result = flushed; flushed = false; return result; } public boolean isLegalReplacement(byte[] ba) { if (ba.length == 155) {// specified magic number, return false return false; } return super.isLegalReplacement(ba); } public MockCharsetEncoder(Charset cs, float aver, float max) { super(cs, aver, max); } public MockCharsetEncoder(Charset cs, float aver, float max, byte[] replacement) { super(cs, aver, max, replacement); } protected CoderResult encodeLoop(CharBuffer in, ByteBuffer out) { int inPosition = in.position(); char[] input = new char[in.remaining()]; in.get(input); String result = new String(input); if (result.startsWith("malform")) { // reset the cursor to the error position in.position(inPosition); // in.position(0); // set the error length return CoderResult.malformedForLength("malform".length()); } else if (result.startsWith("unmap")) { // reset the cursor to the error position in.position(inPosition); // in.position(0); // set the error length return CoderResult.unmappableForLength("unmap".length()); } else if (result.startsWith("runtime")) { // reset the cursor to the error position in.position(0); // set the error length throw new RuntimeException("runtime"); } int inLeft = input.length; int outLeft = out.remaining(); CoderResult r = CoderResult.UNDERFLOW; int length = inLeft; if (outLeft < inLeft) { r = CoderResult.OVERFLOW; length = outLeft; in.position(inPosition + outLeft); } for (int i = 0; i < length; i++) { out.put((byte) input[i]); } return r; } protected CoderResult implFlush(ByteBuffer out) { CoderResult result = super.implFlush(out); int length = 0; if (out.remaining() >= 5) { length = 5; result = CoderResult.UNDERFLOW; flushed = true; // for (int i = 0; i < length; i++) { // out.put((byte)'f'); // } } else { length = out.remaining(); result = CoderResult.OVERFLOW; } return result; } protected void implReplaceWith(byte[] ba) { assertSame(ba, replacement()); } } /* * mock charset for test encoder initialization */ public static class MockCharset extends Charset { protected MockCharset(String arg0, String[] arg1) { super(arg0, arg1); } public boolean contains(Charset arg0) { return false; } public CharsetDecoder newDecoder() { return new CharsetDecoderTest.MockCharsetDecoder(this, (float) AVER_BYTES, MAX_BYTES); } public CharsetEncoder newEncoder() { return new MockCharsetEncoder(this, (float) AVER_BYTES, MAX_BYTES); } } }
jre_emul/android/platform/libcore/harmony-tests/src/test/java/org/apache/harmony/tests/java/nio/charset/CharsetEncoderTest.java
/* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.tests.java.nio.charset; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.CharacterCodingException; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CharsetEncoder; import java.nio.charset.CoderResult; import java.nio.charset.CodingErrorAction; import java.nio.charset.MalformedInputException; import java.nio.charset.UnmappableCharacterException; import java.nio.charset.UnsupportedCharsetException; import java.util.Arrays; import junit.framework.TestCase; /** * API unit test for java.nio.charset.CharsetEncoder */ public class CharsetEncoderTest extends TestCase { static final int MAX_BYTES = 3; static final float AVER_BYTES = 0.5f; // charset for mock class private static final Charset MOCKCS = new MockCharset("CharsetEncoderTest_mock", new String[0]); Charset cs = MOCKCS; // default encoder CharsetEncoder encoder; // default for Charset abstract class byte[] defaultReplacement = new byte[] { 63 }; // specific for Charset implementation subclass byte[] specifiedReplacement = new byte[] { 63 }; static final String unistr = " buffer";// \u8000\u8001\u00a5\u3000\r\n"; byte[] unibytes = new byte[] { 32, 98, 117, 102, 102, 101, 114 }; byte[] unibytesWithRep = null; byte[] surrogate = new byte[0]; protected void setUp() throws Exception { super.setUp(); encoder = cs.newEncoder(); if (null == unibytesWithRep) { byte[] replacement = encoder.replacement(); unibytesWithRep = new byte[replacement.length + unibytes.length]; System.arraycopy(replacement, 0, unibytesWithRep, 0, replacement.length); System.arraycopy(unibytes, 0, unibytesWithRep, replacement.length, unibytes.length); } } /* * @see TestCase#tearDown() */ protected void tearDown() throws Exception { super.tearDown(); } public void testSpecificDefaultValue() { assertTrue(encoder.averageBytesPerChar() == AVER_BYTES); assertTrue(encoder.maxBytesPerChar() == MAX_BYTES); } public void testDefaultValue() { assertEquals(CodingErrorAction.REPORT, encoder.malformedInputAction()); assertEquals(CodingErrorAction.REPORT, encoder.unmappableCharacterAction()); assertSame(encoder, encoder.onMalformedInput(CodingErrorAction.IGNORE)); assertSame(encoder, encoder.onUnmappableCharacter(CodingErrorAction.IGNORE)); if (encoder instanceof MockCharsetEncoder) { assertTrue(Arrays.equals(encoder.replacement(), defaultReplacement)); } else { assertTrue(Arrays.equals(encoder.replacement(), specifiedReplacement)); } } /* * Class under test for constructor CharsetEncoder(Charset, float, float) */ public void testCharsetEncoderCharsetfloatfloat() { // default value encoder = new MockCharsetEncoder(cs, (float) AVER_BYTES, MAX_BYTES); assertSame(encoder.charset(), cs); assertTrue(encoder.averageBytesPerChar() == AVER_BYTES); assertTrue(encoder.maxBytesPerChar() == MAX_BYTES); assertEquals(CodingErrorAction.REPORT, encoder.malformedInputAction()); assertEquals(CodingErrorAction.REPORT, encoder .unmappableCharacterAction()); assertEquals(new String(encoder.replacement()), new String( defaultReplacement)); assertSame(encoder, encoder.onMalformedInput(CodingErrorAction.IGNORE)); assertSame(encoder, encoder .onUnmappableCharacter(CodingErrorAction.IGNORE)); // normal case CharsetEncoder ec = new MockCharsetEncoder(cs, 1, MAX_BYTES); assertSame(ec.charset(), cs); assertEquals(1.0, ec.averageBytesPerChar(), 0); assertTrue(ec.maxBytesPerChar() == MAX_BYTES); /* * ------------------------ Exceptional cases ------------------------- */ // NullPointerException: null charset try { ec = new MockCharsetEncoder(null, 1, MAX_BYTES); fail("should throw null pointer exception"); } catch (NullPointerException e) { } ec = new MockCharsetEncoder(new MockCharset("mock", new String[0]), 1, MAX_BYTES); // Commented out since the comment is wrong since MAX_BYTES > 1 // // OK: average length less than max length // ec = new MockCharsetEncoder(cs, MAX_BYTES, 1); // assertTrue(ec.averageBytesPerChar() == MAX_BYTES); // assertTrue(ec.maxBytesPerChar() == 1); // Illegal Argument: zero length try { ec = new MockCharsetEncoder(cs, 0, MAX_BYTES); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } try { ec = new MockCharsetEncoder(cs, 1, 0); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Illegal Argument: negative length try { ec = new MockCharsetEncoder(cs, -1, MAX_BYTES); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } try { ec = new MockCharsetEncoder(cs, 1, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } } /* * Class under test for constructor CharsetEncoder(Charset, float, float, * byte[]) */ public void testCharsetEncoderCharsetfloatfloatbyteArray() { byte[] ba = getLegalByteArray(); // normal case CharsetEncoder ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, ba); assertSame(ec.charset(), cs); assertEquals(1.0, ec.averageBytesPerChar(), 0.0); assertTrue(ec.maxBytesPerChar() == MAX_BYTES); assertSame(ba, ec.replacement()); /* * ------------------------ Exceptional cases ------------------------- */ // NullPointerException: null charset try { ec = new MockCharsetEncoder(null, 1, MAX_BYTES, ba); fail("should throw null pointer exception"); } catch (NullPointerException e) { } // Illegal Argument: null byte array try { ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, null); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Illegal Argument: empty byte array try { ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, new byte[0]); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Illegal Argument: byte array is longer than max length try { ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, new byte[] { 1, 2, MAX_BYTES, 4 }); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Commented out since the comment is wrong since MAX_BYTES > 1 // This test throws IllegalArgumentException on Harmony and RI // // OK: average length less than max length // ec = new MockCharsetEncoder(cs, MAX_BYTES, ba.length, ba); // assertTrue(ec.averageBytesPerChar() == MAX_BYTES); // assertTrue(ec.maxBytesPerChar() == ba.length); // Illegal Argument: zero length try { ec = new MockCharsetEncoder(cs, 0, MAX_BYTES, ba); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } try { ec = new MockCharsetEncoder(cs, 1, 0, ba); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Illegal Argument: negative length try { ec = new MockCharsetEncoder(cs, -1, MAX_BYTES, ba); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } try { ec = new MockCharsetEncoder(cs, 1, -1, ba); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } } /* * Class under test for boolean canEncode(char) */ public void testCanEncodechar() throws CharacterCodingException { // for non-mapped char assertTrue(encoder.canEncode('\uc2c0')); // surrogate char for unicode // 1st byte: d800-dbff // 2nd byte: dc00-dfff assertTrue(encoder.canEncode('\ud800')); // valid surrogate pair assertTrue(encoder.canEncode('\udc00')); } /*----------------------------------------- * Class under test for illegal state case * methods which can change internal states are two encode, flush, two canEncode, reset * ----------------------------------------- */ // Normal case: just after reset, and it also means reset can be done // anywhere public void testResetIllegalState() throws CharacterCodingException { assertSame(encoder, encoder.reset()); encoder.canEncode('\ud901'); assertSame(encoder, encoder.reset()); encoder.canEncode("\ud901\udc00"); assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("aaa")); assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("aaa"), ByteBuffer.allocate(3), false); assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("aaa"), ByteBuffer.allocate(3), true); assertSame(encoder, encoder.reset()); } public void testFlushIllegalState() throws CharacterCodingException { CharBuffer in = CharBuffer.wrap("aaa"); ByteBuffer out = ByteBuffer.allocate(5); // Illegal state: after reset. encoder.reset(); try { encoder.flush(out); fail(); } catch (IllegalStateException expected) { } // Normal case: after encode with endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(in, out, true); out.rewind(); CoderResult result = encoder.flush(out); // Good state: flush twice encoder.flush(out); // Illegal state: flush after encode with endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(in, out, false); try { encoder.flush(out); fail(); } catch (IllegalStateException expected) { } } public void testFlushAfterConstructing() { ByteBuffer out = ByteBuffer.allocate(5); //Illegal state: flush after instance created try { encoder.flush(out); fail("should throw IllegalStateException"); } catch (IllegalStateException e) { // Expected } } // test illegal states for encode facade public void testEncodeFacadeIllegalState() throws CharacterCodingException { // encode facade can be execute in anywhere CharBuffer in = CharBuffer.wrap("aaa"); // Normal case: just created encoder.encode(in); in.rewind(); // Normal case: just after encode facade encoder.encode(in); in.rewind(); // Normal case: just after canEncode assertSame(encoder, encoder.reset()); encoder.canEncode("\ud902\udc00"); encoder.encode(in); in.rewind(); assertSame(encoder, encoder.reset()); encoder.canEncode('\ud902'); encoder.encode(in); in.rewind(); // Normal case: just after encode with that endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"), ByteBuffer.allocate(30), true); encoder.encode(in); in.rewind(); // Normal case:just after encode with that endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"), ByteBuffer.allocate(30), false); encoder.encode(in); in.rewind(); // Normal case: just after flush assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"), ByteBuffer.allocate(30), true); encoder.flush(ByteBuffer.allocate(10)); encoder.encode(in); in.rewind(); } // test illegal states for two encode method with endOfInput is true public void testEncodeTrueIllegalState() throws CharacterCodingException { CharBuffer in = CharBuffer.wrap("aaa"); ByteBuffer out = ByteBuffer.allocate(5); // Normal case: just created encoder.encode(in, out, true); in.rewind(); out.rewind(); in.rewind(); out.rewind(); // Normal case: just after encode with that endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"), ByteBuffer.allocate(30), true); encoder.encode(in, out, true); in.rewind(); out.rewind(); // Normal case:just after encode with that endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"), ByteBuffer.allocate(30), false); encoder.encode(in, out, true); in.rewind(); out.rewind(); // Illegal state: just after flush assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"), ByteBuffer.allocate(30), true); encoder.flush(ByteBuffer.allocate(10)); try { encoder.encode(in, out, true); fail("should illegal state"); } catch (IllegalStateException e) { } // Normal case: after canEncode assertSame(encoder, encoder.reset()); encoder.canEncode("\ud906\udc00"); encoder.encode(in, out, true); in.rewind(); out.rewind(); assertSame(encoder, encoder.reset()); encoder.canEncode('\ud905'); encoder.encode(in, out, true); } // test illegal states for two encode method with endOfInput is false public void testEncodeFalseIllegalState() throws CharacterCodingException { CharBuffer in = CharBuffer.wrap("aaa"); ByteBuffer out = ByteBuffer.allocate(5); // Normal case: just created encoder.encode(in, out, false); in.rewind(); out.rewind(); // Illegal state: just after encode facade assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState1")); try { encoder.encode(in, out, false); fail("should illegal state"); } catch (IllegalStateException e) { } // Illegal state: just after encode with that endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"), ByteBuffer.allocate(30), true); try { encoder.encode(in, out, false); fail("should illegal state"); } catch (IllegalStateException e) { } // Normal case:just after encode with that endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"), ByteBuffer.allocate(30), false); encoder.encode(in, out, false); in.rewind(); out.rewind(); // Illegal state: just after flush assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"), ByteBuffer.allocate(30), true); encoder.flush(ByteBuffer.allocate(10)); try { encoder.encode(in, out, false); fail("should illegal state"); } catch (IllegalStateException e) { } // Normal case: after canEncode assertSame(encoder, encoder.reset()); encoder.canEncode("\ud906\udc00"); encoder.encode(in, out, false); in.rewind(); out.rewind(); assertSame(encoder, encoder.reset()); encoder.canEncode('\ud905'); encoder.encode(in, out, false); } // test illegal states for two canEncode methods public void testCanEncodeIllegalState() throws CharacterCodingException { // Normal case: just created encoder.canEncode("\ud900\udc00"); encoder.canEncode('\ud900'); // Illegal state: just after encode with that endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"), ByteBuffer.allocate(30), true); try { encoder.canEncode("\ud903\udc00"); fail("should throw illegal state exception"); } catch (IllegalStateException e) { } // Illegal state:just after encode with that endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"), ByteBuffer.allocate(30), false); try { encoder.canEncode("\ud904\udc00"); fail("should throw illegal state exception"); } catch (IllegalStateException e) { } // Normal case: just after flush encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"), ByteBuffer.allocate(30), true); encoder.flush(ByteBuffer.allocate(10)); encoder.canEncode("\ud905\udc00"); encoder.canEncode('\ud906'); // Normal case: after reset again assertSame(encoder, encoder.reset()); encoder.canEncode("\ud906\udc00"); encoder.canEncode('\ud905'); } /* * --------------------------------- illegal state test end * --------------------------------- */ /* * Class under test for boolean canEncode(CharSequence) */ public void testCanEncodeCharSequence() { // for non-mapped char assertTrue(encoder.canEncode("\uc2c0")); // surrogate char for unicode // 1st byte: d800-dbff // 2nd byte: dc00-dfff // valid surrogate pair assertTrue(encoder.canEncode("\ud800\udc00")); // invalid surrogate pair assertTrue(encoder.canEncode("\ud800\udb00")); } public void test_canEncode_empty() throws Exception { assertTrue(encoder.canEncode("")); } public void test_canEncode_null() throws Exception { try { encoder.canEncode(null); fail(); } catch (NullPointerException e) { } } /* * Class under test for Charset charset() */ public void testCharset() { try { encoder = new MockCharsetEncoder(Charset.forName("gbk"), 1, MAX_BYTES); // assertSame(encoder.charset(), Charset.forName("gbk")); } catch (UnsupportedCharsetException e) { System.err .println("Don't support GBK encoding, ignore current test"); } } /* * Class under test for ByteBuffer encode(CharBuffer) */ public void testEncodeCharBuffer() throws CharacterCodingException { // Null pointer try { encoder.encode(null); fail("should throw null pointer exception"); } catch (NullPointerException e) { } // empty input buffer ByteBuffer out = encoder.encode(CharBuffer.wrap("")); assertEquals(out.position(), 0); assertByteArray(out, new byte[0]); // assertByteArray(out, surrogate); // normal case out = encoder.encode(CharBuffer.wrap(unistr)); assertEquals(out.position(), 0); assertByteArray(out, addSurrogate(unibytes)); // Regression test for harmony-3378 Charset cs = Charset.forName("UTF-8"); CharsetEncoder encoder = cs.newEncoder(); encoder.onMalformedInput(CodingErrorAction.REPLACE); encoder = encoder.replaceWith(new byte[] { (byte) 0xef, (byte) 0xbf, (byte) 0xbd, }); CharBuffer in = CharBuffer.wrap("\ud800"); out = encoder.encode(in); assertNotNull(out); } private byte[] addSurrogate(byte[] expected) { if (surrogate.length > 0) { byte[] temp = new byte[surrogate.length + expected.length]; System.arraycopy(surrogate, 0, temp, 0, surrogate.length); System.arraycopy(expected, 0, temp, surrogate.length, expected.length); expected = temp; } return expected; } /** * @return */ protected byte[] getEmptyByteArray() { return new byte[0]; } CharBuffer getMalformedCharBuffer() { return CharBuffer.wrap("malform buffer"); } CharBuffer getUnmapCharBuffer() { return CharBuffer.wrap("unmap buffer"); } CharBuffer getExceptionCharBuffer() { return CharBuffer.wrap("runtime buffer"); } public void testEncodeCharBufferException() throws CharacterCodingException { ByteBuffer out; CharBuffer in; // MalformedException: in = getMalformedCharBuffer(); encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); if (in != null) { try { // regression test for Harmony-1379 encoder.encode(in); fail("should throw MalformedInputException"); } catch (MalformedInputException e) { } encoder.reset(); in.rewind(); encoder.onMalformedInput(CodingErrorAction.IGNORE); out = encoder.encode(in); assertByteArray(out, addSurrogate(unibytes)); encoder.reset(); in.rewind(); encoder.onMalformedInput(CodingErrorAction.REPLACE); out = encoder.encode(in); assertByteArray(out, addSurrogate(unibytesWithRep)); } // Unmapped Exception: in = getUnmapCharBuffer(); encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); if (in != null) { encoder.reset(); try { encoder.encode(in); fail("should throw UnmappableCharacterException"); } catch (UnmappableCharacterException e) { } encoder.reset(); in.rewind(); encoder.onUnmappableCharacter(CodingErrorAction.IGNORE); out = encoder.encode(in); assertByteArray(out, unibytes); encoder.reset(); in.rewind(); encoder.onUnmappableCharacter(CodingErrorAction.REPLACE); out = encoder.encode(in); assertByteArray(out, unibytesWithRep); } // RuntimeException try { encoder.encode(getExceptionCharBuffer()); fail("should throw runtime exception"); } catch (RuntimeException e) { } } /* * utility method, extract given bytebuffer to a string and compare with * give string */ void assertByteArray(ByteBuffer out, byte[] expected) { out = out.duplicate(); if (out.position() != 0) { out.flip(); } byte[] ba = new byte[out.limit() - out.position()]; out.get(ba); // byte[] ba = out.array(); assertTrue(Arrays.equals(ba, expected)); } /* * Class under test for CoderResult encode(CharBuffer, ByteBuffer, boolean) */ public void testEncodeCharBufferByteBufferboolean() throws CharacterCodingException { ByteBuffer out = ByteBuffer.allocate(200); CharBuffer in = CharBuffer.wrap(unistr); // Null pointer try { encoder.encode(null, out, true); fail("should throw null pointer exception"); } catch (NullPointerException e) { } try { encoder.encode(in, null, true); fail("should throw null pointer exception"); } catch (NullPointerException e) { } // normal case, one complete operation assertSame(encoder, encoder.reset()); in.rewind(); out.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(unibytes)); in.rewind(); encoder.flush(out); // normal case, one complete operation, but call twice, first time set // endOfInput to false assertSame(encoder, encoder.reset()); in.rewind(); out = ByteBuffer.allocate(200); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, false)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(unibytes)); in.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, false)); in.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(duplicateByteArray(unibytes, 3))); // overflow out = ByteBuffer.allocate(4); assertSame(encoder, encoder.reset()); in.rewind(); out.rewind(); assertSame(CoderResult.OVERFLOW, encoder.encode(in, out, true)); assertEquals(out.limit(), 4); assertEquals(out.position(), 4); assertEquals(out.remaining(), 0); assertEquals(out.capacity(), 4); ByteBuffer temp = ByteBuffer.allocate(200); out.flip(); temp.put(out); out = temp; assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(unibytes)); assertSame(encoder, encoder.reset()); in.rewind(); out = ByteBuffer.allocate(4); assertSame(CoderResult.OVERFLOW, encoder.encode(in, out, false)); assertEquals(out.limit(), 4); assertEquals(out.position(), 4); assertEquals(out.remaining(), 0); assertEquals(out.capacity(), 4); temp = ByteBuffer.allocate(200); out.flip(); temp.put(out); out = temp; assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, false)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(unibytes)); } void printByteBuffer(ByteBuffer buffer) { System.out.println("print buffer"); if (buffer.position() != 0) { buffer.flip(); } byte[] ba = buffer.array(); for (int i = 0; i < ba.length; i++) { System.out.println(Integer.toHexString(ba[i])); } } public void testEncodeCharBufferByteBufferbooleanExceptionFalse() throws CharacterCodingException { implTestEncodeCharBufferByteBufferbooleanException(false); } public void testEncodeCharBufferByteBufferbooleanExceptionTrue() throws CharacterCodingException { implTestEncodeCharBufferByteBufferbooleanException(true); } private byte[] duplicateByteArray(byte[] ba, int times) { byte[] result = new byte[ba.length * times]; for (int i = 0; i < times; i++) { System.arraycopy(ba, 0, result, i * ba.length, ba.length); } return result; } protected void implTestEncodeCharBufferByteBufferbooleanException( boolean endOfInput) throws CharacterCodingException { ByteBuffer out = ByteBuffer.allocate(100); // MalformedException: CharBuffer in = getMalformedCharBuffer(); encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); if (in != null) { encoder.reset(); CoderResult r = encoder.encode(in, out, endOfInput); assertTrue(r.isMalformed()); encoder.reset(); out.clear(); in.rewind(); encoder.onMalformedInput(CodingErrorAction.IGNORE); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); assertCodingErrorAction(endOfInput, out, in, unibytes); encoder.reset(); out.clear(); in.rewind(); encoder.onMalformedInput(CodingErrorAction.REPLACE); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); assertCodingErrorAction(endOfInput, out, in, unibytesWithRep); } else { System.out.println("Cannot find malformed char buffer for " + cs.name()); } // Unmapped Exception: in = getUnmapCharBuffer(); encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); if (in != null) { encoder.reset(); out.clear(); assertTrue(encoder.encode(in, out, endOfInput).isUnmappable()); encoder.reset(); out.clear(); in.rewind(); encoder.onUnmappableCharacter(CodingErrorAction.IGNORE); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); assertCodingErrorAction(endOfInput, out, in, unibytes); encoder.reset(); out.clear(); in.rewind(); encoder.onUnmappableCharacter(CodingErrorAction.REPLACE); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); assertCodingErrorAction(endOfInput, out, in, unibytesWithRep); } else { System.out.println("Cannot find unmapped char buffer for " + cs.name()); } // RuntimeException try { encoder.encode(getExceptionCharBuffer()); fail("should throw runtime exception"); } catch (RuntimeException e) { } } private void assertCodingErrorAction(boolean endOfInput, ByteBuffer out, CharBuffer in, byte[] expect) { if (endOfInput) { assertByteArray(out, addSurrogate(expect)); } else { in.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); in.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true)); assertByteArray(out, addSurrogate(duplicateByteArray(expect, 3))); } } /* * Class under test for CoderResult flush(ByteBuffer) */ public void testFlush() throws CharacterCodingException { ByteBuffer out = ByteBuffer.allocate(6); CharBuffer in = CharBuffer.wrap("aaa"); assertEquals(in.remaining(), 3); // by encode facade, so that internal state will be wrong encoder.encode(CharBuffer.wrap("testFlush"), ByteBuffer.allocate(20), true); assertSame(CoderResult.UNDERFLOW, encoder .flush(ByteBuffer.allocate(50))); } /* * test isLegalReplacement(byte[]) */ public void test_isLegalReplacement_null() { try { encoder.isLegalReplacement(null); fail("should throw null pointer exception"); } catch (NullPointerException e) { } } public void test_isLegalReplacement_good() { assertTrue(encoder.isLegalReplacement(specifiedReplacement)); } public void test_isLegalReplacement_bad() { assertTrue(encoder.isLegalReplacement(new byte[200])); byte[] ba = getIllegalByteArray(); if (ba != null) { assertFalse(encoder.isLegalReplacement(ba)); } } public void test_isLegalReplacement_empty_array() { // ISO, ASC, GB, UTF8 encoder will throw exception in RI // others will pass assertTrue(encoder.isLegalReplacement(new byte[0])); } public void testOnMalformedInput() { assertSame(CodingErrorAction.REPORT, encoder.malformedInputAction()); try { encoder.onMalformedInput(null); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } encoder.onMalformedInput(CodingErrorAction.IGNORE); assertSame(CodingErrorAction.IGNORE, encoder.malformedInputAction()); } public void testOnUnmappableCharacter() { assertSame(CodingErrorAction.REPORT, encoder .unmappableCharacterAction()); try { encoder.onUnmappableCharacter(null); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } encoder.onUnmappableCharacter(CodingErrorAction.IGNORE); assertSame(CodingErrorAction.IGNORE, encoder .unmappableCharacterAction()); } public void testReplacement() { try { encoder.replaceWith(null); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } try { encoder.replaceWith(new byte[0]); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } try { encoder.replaceWith(new byte[100]); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } byte[] nr = getLegalByteArray(); assertSame(encoder, encoder.replaceWith(nr)); assertSame(nr, encoder.replacement()); nr = getIllegalByteArray(); try { encoder.replaceWith(new byte[100]); fail(); } catch (IllegalArgumentException e) { } } protected byte[] getLegalByteArray() { return new byte[] { 'a' }; } protected byte[] getIllegalByteArray() { return new byte[155]; } /* * Mock subclass of CharsetEncoder For protected method test */ public static class MockCharsetEncoder extends CharsetEncoder { boolean flushed = false; public boolean isFlushed() { boolean result = flushed; flushed = false; return result; } public boolean isLegalReplacement(byte[] ba) { if (ba.length == 155) {// specified magic number, return false return false; } return super.isLegalReplacement(ba); } public MockCharsetEncoder(Charset cs, float aver, float max) { super(cs, aver, max); } public MockCharsetEncoder(Charset cs, float aver, float max, byte[] replacement) { super(cs, aver, max, replacement); } protected CoderResult encodeLoop(CharBuffer in, ByteBuffer out) { int inPosition = in.position(); char[] input = new char[in.remaining()]; in.get(input); String result = new String(input); if (result.startsWith("malform")) { // reset the cursor to the error position in.position(inPosition); // in.position(0); // set the error length return CoderResult.malformedForLength("malform".length()); } else if (result.startsWith("unmap")) { // reset the cursor to the error position in.position(inPosition); // in.position(0); // set the error length return CoderResult.unmappableForLength("unmap".length()); } else if (result.startsWith("runtime")) { // reset the cursor to the error position in.position(0); // set the error length throw new RuntimeException("runtime"); } int inLeft = input.length; int outLeft = out.remaining(); CoderResult r = CoderResult.UNDERFLOW; int length = inLeft; if (outLeft < inLeft) { r = CoderResult.OVERFLOW; length = outLeft; in.position(inPosition + outLeft); } for (int i = 0; i < length; i++) { out.put((byte) input[i]); } return r; } protected CoderResult implFlush(ByteBuffer out) { CoderResult result = super.implFlush(out); int length = 0; if (out.remaining() >= 5) { length = 5; result = CoderResult.UNDERFLOW; flushed = true; // for (int i = 0; i < length; i++) { // out.put((byte)'f'); // } } else { length = out.remaining(); result = CoderResult.OVERFLOW; } return result; } protected void implReplaceWith(byte[] ba) { assertSame(ba, replacement()); } } /* * mock charset for test encoder initialization */ public static class MockCharset extends Charset { protected MockCharset(String arg0, String[] arg1) { super(arg0, arg1); } public boolean contains(Charset arg0) { return false; } public CharsetDecoder newDecoder() { return new CharsetDecoderTest.MockCharsetDecoder(this, (float) AVER_BYTES, MAX_BYTES); } public CharsetEncoder newEncoder() { return new MockCharsetEncoder(this, (float) AVER_BYTES, MAX_BYTES); } } }
Commented out println noise in CharsetEncoderTest. PiperOrigin-RevId: 263622649
jre_emul/android/platform/libcore/harmony-tests/src/test/java/org/apache/harmony/tests/java/nio/charset/CharsetEncoderTest.java
Commented out println noise in CharsetEncoderTest.
Java
apache-2.0
12362d0a16c9d964c9d90ac1dd9e143b3083cb2c
0
HubSpot/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity
package com.hubspot.singularity.scheduler; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; import com.google.inject.Inject; import com.hubspot.baragon.models.BaragonRequestState; import com.hubspot.mesos.Resources; import com.hubspot.mesos.SingularityContainerInfo; import com.hubspot.mesos.SingularityContainerType; import com.hubspot.mesos.SingularityDockerInfo; import com.hubspot.mesos.SingularityDockerNetworkType; import com.hubspot.mesos.SingularityDockerPortMapping; import com.hubspot.mesos.SingularityPortMappingType; import com.hubspot.mesos.SingularityVolume; import com.hubspot.mesos.protos.MesosTaskState; import com.hubspot.singularity.DeployState; import com.hubspot.singularity.ExtendedTaskState; import com.hubspot.singularity.LoadBalancerRequestType; import com.hubspot.singularity.MachineState; import com.hubspot.singularity.RequestCleanupType; import com.hubspot.singularity.RequestState; import com.hubspot.singularity.RequestType; import com.hubspot.singularity.ScheduleType; import com.hubspot.singularity.SingularityDeleteResult; import com.hubspot.singularity.SingularityDeploy; import com.hubspot.singularity.SingularityDeployBuilder; import com.hubspot.singularity.SingularityDeployMarker; import com.hubspot.singularity.SingularityDeployResult; import com.hubspot.singularity.SingularityDeployStatistics; import com.hubspot.singularity.SingularityKilledTaskIdRecord; import com.hubspot.singularity.SingularityLoadBalancerUpdate; import com.hubspot.singularity.SingularityPendingRequest; import com.hubspot.singularity.SingularityPendingRequest.PendingType; import com.hubspot.singularity.SingularityPendingTask; import com.hubspot.singularity.SingularityPendingTaskBuilder; import com.hubspot.singularity.SingularityPendingTaskId; import com.hubspot.singularity.SingularityPriorityFreezeParent; import com.hubspot.singularity.SingularityRequest; import com.hubspot.singularity.SingularityRequestBuilder; import com.hubspot.singularity.SingularityRequestCleanup; import com.hubspot.singularity.SingularityRequestHistory; import com.hubspot.singularity.SingularityRequestHistory.RequestHistoryType; import com.hubspot.singularity.SingularityRequestLbCleanup; import com.hubspot.singularity.SingularityRunNowRequestBuilder; import com.hubspot.singularity.SingularityShellCommand; import com.hubspot.singularity.SingularityTask; import com.hubspot.singularity.SingularityTaskCleanup; import com.hubspot.singularity.SingularityTaskHealthcheckResult; import com.hubspot.singularity.SingularityTaskHistoryUpdate; import com.hubspot.singularity.SingularityTaskId; import com.hubspot.singularity.SingularityTaskRequest; import com.hubspot.singularity.SlavePlacement; import com.hubspot.singularity.TaskCleanupType; import com.hubspot.singularity.api.SingularityBounceRequest; import com.hubspot.singularity.api.SingularityDeleteRequestRequest; import com.hubspot.singularity.api.SingularityDeployRequest; import com.hubspot.singularity.api.SingularityKillTaskRequest; import com.hubspot.singularity.api.SingularityPauseRequest; import com.hubspot.singularity.api.SingularityPriorityFreeze; import com.hubspot.singularity.api.SingularityRunNowRequest; import com.hubspot.singularity.api.SingularityScaleRequest; import com.hubspot.singularity.data.AbstractMachineManager.StateChangeResult; import com.hubspot.singularity.data.SingularityValidator; import com.hubspot.singularity.helpers.MesosProtosUtils; import com.hubspot.singularity.helpers.MesosUtils; import com.hubspot.singularity.mesos.OfferCache; import com.hubspot.singularity.mesos.SingularityMesosStatusUpdateHandler; import com.hubspot.singularity.mesos.SingularityMesosTaskPrioritizer; import com.hubspot.singularity.scheduler.SingularityDeployHealthHelper.DeployHealth; import com.hubspot.singularity.scheduler.SingularityTaskReconciliation.ReconciliationState; import com.jayway.awaitility.Awaitility; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import javax.ws.rs.WebApplicationException; import org.apache.mesos.v1.Protos.AgentID; import org.apache.mesos.v1.Protos.Offer; import org.apache.mesos.v1.Protos.TaskID; import org.apache.mesos.v1.Protos.TaskState; import org.apache.mesos.v1.Protos.TaskStatus; import org.apache.mesos.v1.Protos.TaskStatus.Reason; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.mockito.ArgumentMatchers; import org.mockito.Mockito; public class SingularitySchedulerTest extends SingularitySchedulerTestBase { @Inject private SingularityValidator validator; @Inject private SingularityDeployHealthHelper deployHealthHelper; @Inject private SingularityMesosTaskPrioritizer taskPrioritizer; @Inject private SingularitySchedulerPoller schedulerPoller; @Inject private OfferCache offerCache; @Inject private MesosProtosUtils mesosProtosUtils; @Inject SingularityMesosStatusUpdateHandler updateHandler; public SingularitySchedulerTest() { super(false); } private SingularityPendingTask pendingTask( String requestId, String deployId, PendingType pendingType ) { return new SingularityPendingTaskBuilder() .setPendingTaskId( new SingularityPendingTaskId( requestId, deployId, System.currentTimeMillis(), 1, pendingType, System.currentTimeMillis() ) ) .build(); } @Test public void testOfferCacheRescindOffers() { configuration.setCacheOffers(true); configuration.setOfferCacheSize(2); List<Offer> offers2 = resourceOffers(); // cached as well sms.rescind(offers2.get(0).getId()); sms.rescind(offers2.get(1).getId()); initRequest(); initFirstDeploy(); requestResource.postRequest( request .toBuilder() .setSlavePlacement(Optional.of(SlavePlacement.SEPARATE)) .setInstances(Optional.of(2)) .build(), singularityUser ); schedulerPoller.runActionOnPoll(); Assertions.assertEquals(0, taskManager.getActiveTasks().size()); resourceOffers(); int numTasks = taskManager.getActiveTasks().size(); Assertions.assertEquals(2, numTasks); startAndDeploySecondRequest(); schedulerPoller.runActionOnPoll(); Assertions.assertEquals(numTasks, taskManager.getActiveTasks().size()); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTasks().size() > numTasks); } @Test public void testSchedulerIsolatesPendingTasksBasedOnDeploy() { initRequest(); initFirstDeploy(); initSecondDeploy(); SingularityPendingTask p1 = pendingTask(requestId, firstDeployId, PendingType.ONEOFF); SingularityPendingTask p2 = pendingTask( requestId, firstDeployId, PendingType.TASK_DONE ); SingularityPendingTask p3 = pendingTask( requestId, secondDeployId, PendingType.TASK_DONE ); taskManager.savePendingTask(p1); taskManager.savePendingTask(p2); taskManager.savePendingTask(p3); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, secondDeployId, System.currentTimeMillis(), Optional.<String>empty(), PendingType.NEW_DEPLOY, Optional.<Boolean>empty(), Optional.<String>empty() ) ); scheduler.drainPendingQueue(); // we expect there to be 3 pending tasks : List<SingularityPendingTask> returnedScheduledTasks = taskManager.getPendingTasks(); Assertions.assertEquals(3, returnedScheduledTasks.size()); Assertions.assertTrue(returnedScheduledTasks.contains(p1)); Assertions.assertTrue(returnedScheduledTasks.contains(p2)); Assertions.assertTrue(!returnedScheduledTasks.contains(p3)); boolean found = false; for (SingularityPendingTask pendingTask : returnedScheduledTasks) { if (pendingTask.getPendingTaskId().getDeployId().equals(secondDeployId)) { found = true; Assertions.assertEquals( PendingType.NEW_DEPLOY, pendingTask.getPendingTaskId().getPendingType() ); } } Assertions.assertTrue(found); } @Test public void testCleanerLeavesPausedRequestTasksByDemand() { initScheduledRequest(); initFirstDeploy(); SingularityTask firstTask = launchTask( request, firstDeploy, 1, TaskState.TASK_RUNNING ); createAndSchedulePendingTask(firstDeployId); requestResource.pause( requestId, Optional.of( new SingularityPauseRequest( Optional.of(false), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ) ), singularityUser ); cleaner.drainCleanupQueue(); Assertions.assertTrue(taskManager.getKilledTaskIdRecords().isEmpty()); Assertions.assertTrue(taskManager.getPendingTaskIds().isEmpty()); Assertions.assertTrue(requestManager.getCleanupRequests().isEmpty()); statusUpdate(firstTask, TaskState.TASK_FINISHED); // make sure something new isn't scheduled! Assertions.assertTrue(taskManager.getPendingTaskIds().isEmpty()); } @Test public void testTaskKill() { initRequest(); initFirstDeploy(); SingularityTask firstTask = startTask(firstDeploy); taskResource.killTask( firstTask.getTaskId().getId(), Optional.empty(), singularityUser ); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertEquals(0, taskManager.getNumCleanupTasks()); Assertions.assertEquals(0, taskManager.getNumActiveTasks()); } @Test public void testTaskDestroy() { initRequest(); initFirstDeploy(); SingularityTask firstTask = startTask(firstDeploy, 1); SingularityTask secondTask = startTask(firstDeploy, 2); SingularityTask thirdTask = startTask(firstDeploy, 3); taskResource.killTask( secondTask.getTaskId().getId(), Optional.of( new SingularityKillTaskRequest( Optional.of(true), Optional.of("kill -9 bb"), Optional.empty(), Optional.empty(), Optional.empty() ) ), singularityUser ); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertEquals(2, taskManager.getNumActiveTasks()); System.out.println(requestManager.getCleanupRequests()); Assertions.assertEquals(0, requestManager.getCleanupRequests().size()); Assertions.assertEquals( RequestState.ACTIVE, requestManager.getRequest(requestId).get().getState() ); } @Test public void testTaskBounce() { initRequest(); initFirstDeploy(); SingularityTask firstTask = startTask(firstDeploy); taskResource.killTask( firstTask.getTaskId().getId(), Optional.of( new SingularityKillTaskRequest( Optional.empty(), Optional.of("msg"), Optional.empty(), Optional.of(true), Optional.empty() ) ), singularityUser ); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertEquals(1, taskManager.getNumCleanupTasks()); Assertions.assertEquals(0, taskManager.getKilledTaskIdRecords().size()); resourceOffers(); runLaunchedTasks(); Assertions.assertEquals(1, taskManager.getNumCleanupTasks()); Assertions.assertEquals(0, taskManager.getKilledTaskIdRecords().size()); Assertions.assertEquals(2, taskManager.getNumActiveTasks()); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertEquals(0, taskManager.getNumCleanupTasks()); Assertions.assertEquals(1, taskManager.getNumActiveTasks()); } @Test public void testBounceWithLoadBalancer() { initLoadBalancedRequest(); initFirstDeploy(); configuration.setNewTaskCheckerBaseDelaySeconds(1000000); SingularityTask taskOne = launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); saveLoadBalancerState( BaragonRequestState.SUCCESS, taskOne.getTaskId(), LoadBalancerRequestType.ADD ); requestResource.bounce(requestId, Optional.empty(), singularityUser); cleaner.drainCleanupQueue(); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(2, taskManager.getNumActiveTasks()); List<SingularityTaskId> tasks = taskManager.getActiveTaskIds(); tasks.remove(taskOne.getTaskId()); SingularityTaskId taskTwo = tasks.get(0); cleaner.drainCleanupQueue(); runLaunchedTasks(); cleaner.drainCleanupQueue(); Assertions.assertEquals(0, taskManager.getKilledTaskIdRecords().size()); Assertions.assertEquals(2, taskManager.getNumActiveTasks()); // add to LB: saveLoadBalancerState( BaragonRequestState.SUCCESS, taskTwo, LoadBalancerRequestType.ADD ); cleaner.drainCleanupQueue(); Assertions.assertEquals(0, taskManager.getKilledTaskIdRecords().size()); Assertions.assertEquals(2, taskManager.getNumActiveTasks()); saveLoadBalancerState( BaragonRequestState.SUCCESS, taskOne.getTaskId(), LoadBalancerRequestType.REMOVE ); cleaner.drainCleanupQueue(); Assertions.assertEquals(1, taskManager.getKilledTaskIdRecords().size()); killKilledTasks(); Assertions.assertEquals(1, taskManager.getNumActiveTasks()); } @Test public void testKilledTaskIdRecords() { initScheduledRequest(); initFirstDeploy(); launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); requestResource.deleteRequest(requestId, Optional.empty(), singularityUser); Assertions.assertTrue(requestManager.getCleanupRequests().size() == 1); cleaner.drainCleanupQueue(); Assertions.assertTrue(!taskManager.getKilledTaskIdRecords().isEmpty()); killKilledTasks(); cleaner.drainCleanupQueue(); Assertions.assertTrue(requestManager.getCleanupRequests().isEmpty()); Assertions.assertTrue(taskManager.getKilledTaskIdRecords().isEmpty()); } @Test public void testLongRunningTaskKills() { initScheduledRequest(); initFirstDeploy(); launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); initSecondDeploy(); deployChecker.checkDeploys(); Assertions.assertTrue(taskManager.getKilledTaskIdRecords().isEmpty()); Assertions.assertTrue(!taskManager.getCleanupTasks().isEmpty()); cleaner.drainCleanupQueue(); Assertions.assertTrue(taskManager.getKilledTaskIdRecords().isEmpty()); Assertions.assertTrue(!taskManager.getCleanupTasks().isEmpty()); requestManager.activate( request .toBuilder() .setKillOldNonLongRunningTasksAfterMillis(Optional.<Long>of(0L)) .build(), RequestHistoryType.CREATED, System.currentTimeMillis(), Optional.<String>empty(), Optional.<String>empty() ); cleaner.drainCleanupQueue(); Assertions.assertTrue(!taskManager.getKilledTaskIdRecords().isEmpty()); Assertions.assertTrue(taskManager.getCleanupTasks().isEmpty()); } @Test public void testSchedulerCanBatchOnOffers() { initRequest(); initFirstDeploy(); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(3)).build(), singularityUser ); scheduler.drainPendingQueue(); List<Offer> oneOffer = Arrays.asList(createOffer(12, 1024, 5000)); sms.resourceOffers(oneOffer).join(); Assertions.assertTrue(taskManager.getActiveTasks().size() == 3); Assertions.assertTrue(taskManager.getPendingTaskIds().isEmpty()); Assertions.assertTrue(requestManager.getPendingRequests().isEmpty()); } @Test public void testSchedulerExhaustsOffers() { initRequest(); initFirstDeploy(); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(10)).build(), singularityUser ); scheduler.drainPendingQueue(); sms .resourceOffers( Arrays.asList(createOffer(2, 1024, 2048), createOffer(1, 1024, 2048)) ) .join(); Assertions.assertEquals(3, taskManager.getActiveTaskIds().size()); Assertions.assertEquals(7, taskManager.getPendingTaskIds().size()); } @Test public void testSchedulerRandomizesOffers() { initRequest(); initFirstDeploy(); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(15)).build(), singularityUser ); scheduler.drainPendingQueue(); sms .resourceOffers( Arrays.asList(createOffer(20, 1024, 20000), createOffer(20, 1024, 20000)) ) .join(); Assertions.assertEquals(15, taskManager.getActiveTaskIds().size()); Set<String> offerIds = Sets.newHashSet(); for (SingularityTask activeTask : taskManager.getActiveTasks()) { offerIds.addAll( activeTask .getOffers() .stream() .map(o -> o.getId().getValue()) .collect(Collectors.toList()) ); } Assertions.assertEquals(2, offerIds.size()); } @Test public void testSchedulerHandlesFinishedTasks() { initScheduledRequest(); initFirstDeploy(); schedule = "*/1 * * * * ? 1995"; // cause it to be pending requestResource.postRequest( request.toBuilder().setQuartzSchedule(Optional.of(schedule)).build(), singularityUser ); scheduler.drainPendingQueue(); Assertions.assertTrue( requestResource .getActiveRequests( singularityUser, false, false, false, 10, Collections.emptyList() ) .isEmpty() ); Assertions.assertTrue( requestManager.getRequest(requestId).get().getState() == RequestState.FINISHED ); Assertions.assertTrue(taskManager.getPendingTaskIds().isEmpty()); schedule = "*/1 * * * * ?"; requestResource.postRequest( request.toBuilder().setQuartzSchedule(Optional.of(schedule)).build(), singularityUser ); scheduler.drainPendingQueue(); Assertions.assertTrue( !requestResource .getActiveRequests( singularityUser, false, false, false, 10, Collections.emptyList() ) .isEmpty() ); Assertions.assertTrue( requestManager.getRequest(requestId).get().getState() == RequestState.ACTIVE ); Assertions.assertTrue(!taskManager.getPendingTaskIds().isEmpty()); } @Test public void testNewlyDeployedScheduledTasksAreScheduledAfterStartup() { initScheduledRequest(); initFirstDeploy(); SingularityTask runningTask = launchTask( request, firstDeploy, 1, TaskState.TASK_RUNNING ); long now = System.currentTimeMillis(); initSecondDeploy(); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, secondDeployId, now, Optional.empty(), PendingType.STARTUP, Optional.empty(), Optional.empty() ) ); deployChecker.checkDeploys(); resourceOffers(); // There's an instance running, so we shouldn't schedule a pending task yet Assertions.assertTrue(taskManager.getPendingTaskIds().isEmpty()); statusUpdate(runningTask, TaskState.TASK_FINISHED); scheduler.drainPendingQueue(); // Now a pending task should be scheduled with the new deploy Assertions.assertEquals(1, taskManager.getPendingTaskIds().size()); Assertions.assertEquals( PendingType.NEW_DEPLOY, taskManager.getPendingTaskIds().get(0).getPendingType() ); Assertions.assertEquals( secondDeployId, taskManager.getPendingTaskIds().get(0).getDeployId() ); } @Test public void testFinishedRequestCanBeDeployed() { initScheduledRequest(); initFirstDeploy(); schedule = "*/1 * * * * ? 1995"; // cause it to be pending requestResource.postRequest( request.toBuilder().setQuartzSchedule(Optional.of(schedule)).build(), singularityUser ); scheduler.drainPendingQueue(); Assertions.assertTrue( requestResource .getActiveRequests( singularityUser, false, false, false, 10, Collections.emptyList() ) .isEmpty() ); Assertions.assertTrue( requestManager.getRequest(requestId).get().getState() == RequestState.FINISHED ); SingularityDeployBuilder db = new SingularityDeployBuilder(requestId, secondDeployId); initDeploy(db, System.currentTimeMillis()); deployChecker.checkDeploys(); Assertions.assertEquals( RequestState.ACTIVE, requestManager.getRequest(requestId).get().getState() ); Assertions.assertEquals(1, requestManager.getPendingRequests().size()); } @Test public void testOneOffsDontRunByThemselves() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); requestResource.postRequest(bldr.build(), singularityUser); Assertions.assertTrue(requestManager.getPendingRequests().isEmpty()); deploy("d2"); Assertions.assertTrue(requestManager.getPendingRequests().isEmpty()); deployChecker.checkDeploys(); Assertions.assertTrue(requestManager.getPendingRequests().isEmpty()); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_FINISHED); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(0, taskManager.getActiveTaskIds().size()); Assertions.assertEquals(0, taskManager.getPendingTaskIds().size()); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(0, taskManager.getActiveTaskIds().size()); Assertions.assertEquals(0, taskManager.getPendingTaskIds().size()); } @Test public void testOneOffsDontMoveDuringDecomission() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); requestResource.postRequest(bldr.build(), singularityUser); deploy("d2"); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); validateTaskDoesntMoveDuringDecommission(); } private void validateTaskDoesntMoveDuringDecommission() { scheduler.drainPendingQueue(); sms .resourceOffers( Arrays.asList(createOffer(1, 129, 1025, "slave1", "host1", Optional.of("rack1"))) ) .join(); scheduler.drainPendingQueue(); sms .resourceOffers( Arrays.asList(createOffer(1, 129, 1025, "slave2", "host2", Optional.of("rack1"))) ) .join(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); Assertions.assertEquals( "host1", taskManager.getActiveTaskIds().get(0).getSanitizedHost() ); Assertions.assertEquals( StateChangeResult.SUCCESS, slaveManager.changeState( "slave1", MachineState.STARTING_DECOMMISSION, Optional.<String>empty(), Optional.of("user1") ) ); scheduler.checkForDecomissions(); scheduler.drainPendingQueue(); sms .resourceOffers( Arrays.asList(createOffer(1, 129, 1025, "slave2", "host2", Optional.of("rack1"))) ) .join(); cleaner.drainCleanupQueue(); scheduler.drainPendingQueue(); sms .resourceOffers( Arrays.asList(createOffer(1, 129, 1025, "slave2", "host2", Optional.of("rack1"))) ) .join(); cleaner.drainCleanupQueue(); // task should not move! Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); Assertions.assertEquals( "host1", taskManager.getActiveTaskIds().get(0).getSanitizedHost() ); Assertions.assertTrue(taskManager.getKilledTaskIdRecords().isEmpty()); Assertions.assertTrue(taskManager.getCleanupTaskIds().size() == 1); } @Test public void testCustomResourcesWithRunNowRequest() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); requestResource.postRequest(bldr.build(), singularityUser); deploy("d2"); SingularityRunNowRequest runNowRequest = new SingularityRunNowRequestBuilder() .setResources(new Resources(2, 2, 0)) .build(); requestResource.scheduleImmediately(singularityUser, requestId, runNowRequest); scheduler.drainPendingQueue(); SingularityPendingTask pendingTaskWithResourcs = taskManager.getPendingTasks().get(0); Assertions.assertTrue(pendingTaskWithResourcs.getResources().isPresent()); Assertions.assertEquals( pendingTaskWithResourcs.getResources().get().getCpus(), 2, 0.0 ); sms .resourceOffers( Arrays.asList(createOffer(5, 5, 5, "slave1", "host1", Optional.of("rack1"))) ) .join(); SingularityTask task = taskManager.getActiveTasks().get(0); Assertions.assertEquals( MesosUtils.getNumCpus( mesosProtosUtils.toResourceList(task.getMesosTask().getResources()), Optional.<String>empty() ), 2.0, 0.0 ); } @Test public void testRunOnceRunOnlyOnce() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.RUN_ONCE ); request = bldr.build(); saveRequest(request); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); deployChecker.checkDeploys(); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue( deployManager.getRequestDeployState(requestId).get().getActiveDeploy().isPresent() ); Assertions.assertTrue( !deployManager.getRequestDeployState(requestId).get().getPendingDeploy().isPresent() ); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTaskIds().isEmpty()); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d2") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); deployChecker.checkDeploys(); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue( deployManager.getRequestDeployState(requestId).get().getActiveDeploy().isPresent() ); Assertions.assertTrue( !deployManager.getRequestDeployState(requestId).get().getPendingDeploy().isPresent() ); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_FINISHED); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTaskIds().isEmpty()); } @Test public void testMultipleRunOnceTasks() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.RUN_ONCE ); request = bldr.build(); saveRequest(request); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); deployChecker.checkDeploys(); Assertions.assertEquals(1, requestManager.getSizeOfPendingQueue()); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d2") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); deployChecker.checkDeploys(); Assertions.assertEquals(2, requestManager.getSizeOfPendingQueue()); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(2, taskManager.getActiveTaskIds().size()); } @Test public void testRunOnceDontMoveDuringDecomission() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.RUN_ONCE ); request = bldr.build(); saveRequest(request); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); deployChecker.checkDeploys(); validateTaskDoesntMoveDuringDecommission(); } @Test public void testDecommissionDoesntKillPendingDeploy() { initRequest(); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); deployChecker.checkDeploys(); resourceOffers(); Assertions.assertEquals(1, taskManager.getNumActiveTasks()); slaveResource.decommissionSlave( singularityUser, taskManager.getActiveTasks().get(0).getAgentId().getValue(), null ); scheduler.checkForDecomissions(); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertEquals(1, taskManager.getNumActiveTasks()); Assertions.assertEquals(1, taskManager.getNumCleanupTasks()); Assertions.assertEquals(0, taskManager.getKilledTaskIdRecords().size()); configuration.setPendingDeployHoldTaskDuringDecommissionMillis(1); try { Thread.sleep(2); } catch (InterruptedException e) {} cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertEquals(0, taskManager.getNumActiveTasks()); Assertions.assertEquals(0, taskManager.getNumCleanupTasks()); } @Test public void testRetries() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.RUN_ONCE ); request = bldr.setNumRetriesOnFailure(Optional.of(2)).build(); saveRequest(request); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); deployChecker.checkDeploys(); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTaskIds().isEmpty()); } @Test public void testRetriesWithOverrides() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); request = bldr.setNumRetriesOnFailure(Optional.of(2)).build(); saveRequest(request); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); deployChecker.checkDeploys(); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder() .setCommandLineArgs(Collections.singletonList("extraFlag")) .setResources(new Resources(17, 1337, 0)) .build() ); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); Resources resourcesForRunningTask = taskManager .getActiveTasks() .get(0) .getTaskRequest() .getPendingTask() .getResources() .get(); Assertions.assertEquals( Optional.of(Collections.singletonList("extraFlag")), taskManager .getActiveTasks() .get(0) .getTaskRequest() .getPendingTask() .getCmdLineArgsList() ); Assertions.assertEquals(17, resourcesForRunningTask.getCpus(), 0.01); Assertions.assertEquals(1337, resourcesForRunningTask.getMemoryMb(), 0.01); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); resourcesForRunningTask = taskManager .getActiveTasks() .get(0) .getTaskRequest() .getPendingTask() .getResources() .get(); Assertions.assertEquals( Optional.of(Collections.singletonList("extraFlag")), taskManager .getActiveTasks() .get(0) .getTaskRequest() .getPendingTask() .getCmdLineArgsList() ); Assertions.assertEquals(17, resourcesForRunningTask.getCpus(), 0.01); Assertions.assertEquals(1337, resourcesForRunningTask.getMemoryMb(), 0.01); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); resourcesForRunningTask = taskManager .getActiveTasks() .get(0) .getTaskRequest() .getPendingTask() .getResources() .get(); Assertions.assertEquals( Optional.of(Collections.singletonList("extraFlag")), taskManager .getActiveTasks() .get(0) .getTaskRequest() .getPendingTask() .getCmdLineArgsList() ); Assertions.assertEquals(17, resourcesForRunningTask.getCpus(), 0.01); Assertions.assertEquals(1337, resourcesForRunningTask.getMemoryMb(), 0.01); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTaskIds().isEmpty()); } /* @Test public void testCooldownAfterSequentialFailures() { initRequest(); initFirstDeploy(); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() == RequestState.ACTIVE); configuration.setFastFailureCooldownCount(2); SingularityTask firstTask = startTask(firstDeploy); SingularityTask secondTask = startTask(firstDeploy); statusUpdate(firstTask, TaskState.TASK_FAILED); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() == RequestState.ACTIVE); statusUpdate(secondTask, TaskState.TASK_FAILED); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() == RequestState.SYSTEM_COOLDOWN); cooldownChecker.checkCooldowns(); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() == RequestState.SYSTEM_COOLDOWN); SingularityTask thirdTask = startTask(firstDeploy); statusUpdate(thirdTask, TaskState.TASK_FINISHED); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() == RequestState.ACTIVE); } @Test public void testCooldownOnlyWhenTasksRapidlyFail() { initRequest(); initFirstDeploy(); configuration.setFastFailureCooldownCount(2); SingularityTask firstTask = startTask(firstDeploy); statusUpdate(firstTask, TaskState.TASK_FAILED, Optional.of(System.currentTimeMillis() - TimeUnit.HOURS.toMillis(5))); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() == RequestState.ACTIVE); SingularityTask secondTask = startTask(firstDeploy); statusUpdate(secondTask, TaskState.TASK_FAILED); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() != RequestState.SYSTEM_COOLDOWN); }*/ @Test public void testLBCleanup() { initLoadBalancedRequest(); initFirstDeploy(); configuration.setLoadBalancerRemovalGracePeriodMillis(10000); SingularityTask task = launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); saveLoadBalancerState( BaragonRequestState.SUCCESS, task.getTaskId(), LoadBalancerRequestType.ADD ); statusUpdate(task, TaskState.TASK_FAILED); Assertions.assertTrue(!taskManager.getLBCleanupTasks().isEmpty()); testingLbClient.setNextBaragonRequestState(BaragonRequestState.WAITING); cleaner.drainCleanupQueue(); Assertions.assertTrue(!taskManager.getLBCleanupTasks().isEmpty()); Optional<SingularityLoadBalancerUpdate> lbUpdate = taskManager.getLoadBalancerState( task.getTaskId(), LoadBalancerRequestType.REMOVE ); Assertions.assertTrue(lbUpdate.isPresent()); Assertions.assertTrue( lbUpdate.get().getLoadBalancerState() == BaragonRequestState.WAITING ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.FAILED); cleaner.drainCleanupQueue(); Assertions.assertTrue(!taskManager.getLBCleanupTasks().isEmpty()); lbUpdate = taskManager.getLoadBalancerState(task.getTaskId(), LoadBalancerRequestType.REMOVE); Assertions.assertTrue(lbUpdate.isPresent()); Assertions.assertTrue( lbUpdate.get().getLoadBalancerState() == BaragonRequestState.FAILED ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.SUCCESS); cleaner.drainCleanupQueue(); Assertions.assertTrue(!taskManager.getLBCleanupTasks().isEmpty()); configuration.setLoadBalancerRemovalGracePeriodMillis(0); cleaner.drainCleanupQueue(); Assertions.assertTrue(taskManager.getLBCleanupTasks().isEmpty()); lbUpdate = taskManager.getLoadBalancerState(task.getTaskId(), LoadBalancerRequestType.REMOVE); Assertions.assertTrue(lbUpdate.isPresent()); Assertions.assertTrue( lbUpdate.get().getLoadBalancerState() == BaragonRequestState.SUCCESS ); Assertions.assertTrue( lbUpdate.get().getLoadBalancerRequestId().getAttemptNumber() == 2 ); } @Test public void testLbCleanupDoesNotRemoveBeforeAdd() { initLoadBalancedRequest(); initFirstDeploy(); SingularityTask taskOne = launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); initSecondDeploy(); SingularityTask taskTwo = launchTask( request, secondDeploy, 1, TaskState.TASK_RUNNING ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.WAITING); deployChecker.checkDeploys(); // First task from old deploy is still starting, never got added to LB so it should not have a removal request Assertions.assertFalse( taskManager .getLoadBalancerState(taskOne.getTaskId(), LoadBalancerRequestType.ADD) .isPresent() ); Assertions.assertFalse( taskManager .getLoadBalancerState(taskOne.getTaskId(), LoadBalancerRequestType.REMOVE) .isPresent() ); // Second task should have an add request Assertions.assertTrue( taskManager .getLoadBalancerState(taskTwo.getTaskId(), LoadBalancerRequestType.ADD) .isPresent() ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.SUCCESS); deployChecker.checkDeploys(); // First task from old deploy should still have no LB updates, but should have a cleanup Assertions.assertFalse( taskManager .getLoadBalancerState(taskOne.getTaskId(), LoadBalancerRequestType.ADD) .isPresent() ); Assertions.assertFalse( taskManager .getLoadBalancerState(taskOne.getTaskId(), LoadBalancerRequestType.REMOVE) .isPresent() ); Assertions.assertTrue(taskManager.getCleanupTaskIds().contains(taskOne.getTaskId())); } @Test public void testLbCleanupSkippedOnSkipRemoveFlag() { configuration.setDeleteRemovedRequestsFromLoadBalancer(true); initLoadBalancedRequest(); initLoadBalancedDeploy(); startTask(firstDeploy); boolean removeFromLoadBalancer = false; SingularityDeleteRequestRequest deleteRequest = new SingularityDeleteRequestRequest( Optional.empty(), Optional.empty(), Optional.of(removeFromLoadBalancer) ); requestResource.deleteRequest(requestId, Optional.of(deleteRequest), singularityUser); testingLbClient.setNextBaragonRequestState(BaragonRequestState.WAITING); Assertions.assertFalse( requestManager.getCleanupRequests().isEmpty(), "Tasks should get cleaned up" ); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertFalse( requestManager.getCleanupRequests().isEmpty(), "The request should get cleaned up" ); cleaner.drainCleanupQueue(); Assertions.assertTrue( requestManager.getLbCleanupRequestIds().isEmpty(), "The request should not be removed from the load balancer" ); } @Test public void testLbCleanupOccursOnRequestDelete() { configuration.setDeleteRemovedRequestsFromLoadBalancer(true); initLoadBalancedRequest(); initLoadBalancedDeploy(); startTask(firstDeploy); requestResource.deleteRequest(requestId, Optional.empty(), singularityUser); testingLbClient.setNextBaragonRequestState(BaragonRequestState.WAITING); Assertions.assertFalse( requestManager.getCleanupRequests().isEmpty(), "Tasks should get cleaned up" ); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertFalse( requestManager.getCleanupRequests().isEmpty(), "The request should get cleaned up" ); cleaner.drainCleanupQueue(); Assertions.assertFalse( requestManager.getLbCleanupRequestIds().isEmpty(), "The request should get removed from the load balancer" ); } @Test public void testReconciliation() { Assertions.assertTrue(!taskReconciliation.isReconciliationRunning()); configuration.setCheckReconcileWhenRunningEveryMillis(1); initRequest(); initFirstDeploy(); Assertions.assertTrue( taskReconciliation.startReconciliation() == ReconciliationState.STARTED ); Awaitility .await() .atMost(10, TimeUnit.SECONDS) .until(() -> !taskReconciliation.isReconciliationRunning()); SingularityTask taskOne = launchTask( request, firstDeploy, 1, TaskState.TASK_STARTING ); SingularityTask taskTwo = launchTask(request, firstDeploy, 2, TaskState.TASK_RUNNING); saveLastActiveTaskStatus(taskOne, Optional.empty(), -1000); Assertions.assertTrue( taskReconciliation.startReconciliation() == ReconciliationState.STARTED ); Assertions.assertTrue( taskReconciliation.startReconciliation() == ReconciliationState.ALREADY_RUNNING ); Awaitility .await() .atMost(10, TimeUnit.SECONDS) .until(() -> taskReconciliation.isReconciliationRunning()); saveLastActiveTaskStatus(taskOne, Optional.of(buildTaskStatus(taskOne)), +1000); Awaitility .await() .atMost(10, TimeUnit.SECONDS) .until(() -> taskReconciliation.isReconciliationRunning()); saveLastActiveTaskStatus(taskTwo, Optional.of(buildTaskStatus(taskTwo)), +1000); Awaitility .await() .atMost(10, TimeUnit.SECONDS) .until(() -> !taskReconciliation.isReconciliationRunning()); } @Test public void testSchedulerPriority() { final SingularityRequest lowPriorityRequest = new SingularityRequestBuilder( "lowPriorityRequest", RequestType.WORKER ) .setTaskPriorityLevel(Optional.of(.25)) .build(); saveRequest(lowPriorityRequest); final SingularityRequest mediumPriorityRequest = new SingularityRequestBuilder( "mediumPriorityRequest", RequestType.WORKER ) .setTaskPriorityLevel(Optional.of(.5)) .build(); saveRequest(mediumPriorityRequest); final SingularityRequest highPriorityRequest = new SingularityRequestBuilder( "highPriorityRequest", RequestType.WORKER ) .setTaskPriorityLevel(Optional.of(.75)) .build(); saveRequest(highPriorityRequest); final SingularityDeploy lowPriorityDeploy = initAndFinishDeploy( lowPriorityRequest, "lowPriorityDeploy" ); final SingularityDeploy mediumPriorityDeploy = initAndFinishDeploy( mediumPriorityRequest, "mediumPriorityDeploy" ); final SingularityDeploy highPriorityDeploy = initAndFinishDeploy( highPriorityRequest, "highPriorityDeploy" ); // Task requests launched at ~ the same time should be in priority order long now = System.currentTimeMillis(); List<SingularityTaskRequest> requestsByPriority = Arrays.asList( buildTaskRequest(lowPriorityRequest, lowPriorityDeploy, now), buildTaskRequest(mediumPriorityRequest, mediumPriorityDeploy, now), buildTaskRequest(highPriorityRequest, highPriorityDeploy, now) ); List<SingularityTaskRequest> sortedRequestsByPriority = taskPrioritizer.getSortedDueTasks( requestsByPriority ); Assertions.assertEquals( sortedRequestsByPriority.get(0).getRequest().getId(), highPriorityRequest.getId() ); Assertions.assertEquals( sortedRequestsByPriority.get(1).getRequest().getId(), mediumPriorityRequest.getId() ); Assertions.assertEquals( sortedRequestsByPriority.get(2).getRequest().getId(), lowPriorityRequest.getId() ); // A lower priority task that is long overdue should be run before a higher priority task now = System.currentTimeMillis(); List<SingularityTaskRequest> requestsByOverdueAndPriority = Arrays.asList( buildTaskRequest(lowPriorityRequest, lowPriorityDeploy, now - 120000), // 2 min overdue buildTaskRequest(mediumPriorityRequest, mediumPriorityDeploy, now - 30000), // 60s overdue buildTaskRequest(highPriorityRequest, highPriorityDeploy, now) ); // Not overdue List<SingularityTaskRequest> sortedRequestsByOverdueAndPriority = taskPrioritizer.getSortedDueTasks( requestsByOverdueAndPriority ); Assertions.assertEquals( sortedRequestsByOverdueAndPriority.get(0).getRequest().getId(), lowPriorityRequest.getId() ); Assertions.assertEquals( sortedRequestsByOverdueAndPriority.get(1).getRequest().getId(), mediumPriorityRequest.getId() ); Assertions.assertEquals( sortedRequestsByOverdueAndPriority.get(2).getRequest().getId(), highPriorityRequest.getId() ); } @Test public void badPauseExpires() { initRequest(); requestManager.createCleanupRequest( new SingularityRequestCleanup( Optional.<String>empty(), RequestCleanupType.PAUSING, System.currentTimeMillis(), Optional.<Boolean>empty(), Optional.empty(), requestId, Optional.<String>empty(), Optional.<Boolean>empty(), Optional.<String>empty(), Optional.<String>empty(), Optional.<SingularityShellCommand>empty() ) ); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.getCleanupRequests().isEmpty()); configuration.setCleanupEverySeconds(0); sleep(1); cleaner.drainCleanupQueue(); Assertions.assertTrue(requestManager.getCleanupRequests().isEmpty()); } @Test public void testPauseLbCleanup() { initLoadBalancedRequest(); initFirstDeploy(); requestManager.saveLbCleanupRequest( new SingularityRequestLbCleanup( requestId, Sets.newHashSet("test"), "/basepath", Collections.<String>emptyList(), Optional.<SingularityLoadBalancerUpdate>empty() ) ); requestManager.pause( request, System.currentTimeMillis(), Optional.<String>empty(), Optional.<String>empty() ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.WAITING); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.getLbCleanupRequestIds().isEmpty()); Optional<SingularityLoadBalancerUpdate> lbUpdate = requestManager .getLbCleanupRequest(requestId) .get() .getLoadBalancerUpdate(); Assertions.assertTrue(lbUpdate.isPresent()); Assertions.assertTrue( lbUpdate.get().getLoadBalancerState() == BaragonRequestState.WAITING ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.FAILED); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.getLbCleanupRequestIds().isEmpty()); lbUpdate = requestManager.getLbCleanupRequest(requestId).get().getLoadBalancerUpdate(); Assertions.assertTrue(lbUpdate.isPresent()); Assertions.assertTrue( lbUpdate.get().getLoadBalancerState() == BaragonRequestState.FAILED ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.SUCCESS); cleaner.drainCleanupQueue(); Assertions.assertTrue(requestManager.getLbCleanupRequestIds().isEmpty()); } @Test public void testPause() { initRequest(); initFirstDeploy(); SingularityTask taskOne = startTask(firstDeploy); requestResource.pause(requestId, Optional.empty(), singularityUser); cleaner.drainCleanupQueue(); Assertions.assertEquals(1, taskManager.getKilledTaskIdRecords().size()); statusUpdate(taskOne, TaskState.TASK_KILLED); resourceOffers(); Assertions.assertEquals(0, taskManager.getActiveTaskIds().size()); Assertions.assertEquals(0, taskManager.getPendingTasks().size()); Assertions.assertEquals( RequestState.PAUSED, requestManager.getRequest(requestId).get().getState() ); Assertions.assertEquals( requestId, requestManager.getPausedRequests(false).iterator().next().getRequest().getId() ); requestResource.unpause(requestId, Optional.empty(), singularityUser); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); Assertions.assertEquals(0, taskManager.getPendingTasks().size()); Assertions.assertEquals( RequestState.ACTIVE, requestManager.getRequest(requestId).get().getState() ); Assertions.assertEquals( requestId, requestManager.getActiveRequests(false).iterator().next().getRequest().getId() ); } @Test public void testBounce() { initRequest(); requestResource.scale( requestId, new SingularityScaleRequest( Optional.of(3), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ), singularityUser ); initFirstDeploy(); SingularityTask taskOne = startTask(firstDeploy, 1); SingularityTask taskTwo = startTask(firstDeploy, 2); SingularityTask taskThree = startTask(firstDeploy, 3); requestResource.bounce(requestId, Optional.empty(), singularityUser); Assertions.assertTrue(requestManager.cleanupRequestExists(requestId)); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.cleanupRequestExists(requestId)); Assertions.assertTrue(taskManager.getCleanupTaskIds().size() == 3); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.cleanupRequestExists(requestId)); Assertions.assertTrue(taskManager.getCleanupTaskIds().size() == 3); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTaskIds().size() == 6); cleaner.drainCleanupQueue(); Assertions.assertTrue(taskManager.getCleanupTaskIds().size() == 3); for (SingularityTask task : taskManager.getActiveTasks()) { if ( !task.getTaskId().equals(taskOne.getTaskId()) && !task.getTaskId().equals(taskTwo.getTaskId()) && !task.getTaskId().equals(taskThree.getTaskId()) ) { statusUpdate(task, TaskState.TASK_RUNNING, Optional.of(1L)); } } cleaner.drainCleanupQueue(); Assertions.assertTrue(taskManager.getCleanupTaskIds().isEmpty()); Assertions.assertTrue(taskManager.getKilledTaskIdRecords().size() == 3); } @Test public void testIncrementalBounceShutsDownOldTasksPerNewHealthyTask() { initRequest(); requestResource.scale( requestId, new SingularityScaleRequest( Optional.of(3), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ), singularityUser ); initFirstDeploy(); startTask(firstDeploy, 1); startTask(firstDeploy, 2); startTask(firstDeploy, 3); requestResource.bounce( requestId, Optional.of( new SingularityBounceRequest( Optional.of(true), Optional.empty(), Optional.of(1L), Optional.empty(), Optional.of("msg"), Optional.empty() ) ), singularityUser ); Assertions.assertTrue(requestManager.cleanupRequestExists(requestId)); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.cleanupRequestExists(requestId)); Assertions.assertEquals(3, taskManager.getCleanupTaskIds().size()); SingularityTask newTask = launchTask( request, firstDeploy, 5, TaskState.TASK_STARTING ); cleaner.drainCleanupQueue(); Assertions.assertEquals(0, taskManager.getKilledTaskIdRecords().size()); Assertions.assertEquals(4, taskManager.getActiveTaskIds().size()); statusUpdate(newTask, TaskState.TASK_RUNNING); cleaner.drainCleanupQueue(); Assertions.assertEquals(1, taskManager.getKilledTaskIdRecords().size()); Assertions.assertEquals(4, taskManager.getActiveTaskIds().size()); } @Test public void testBounceOnPendingInstancesReleasesLock() { initRequest(); initFirstDeploy(); SingularityTask task = startTask(firstDeploy, 1); statusUpdate(task, TaskState.TASK_FAILED); killKilledTasks(); Assertions.assertEquals( 0, taskManager.getActiveTaskIds().size(), "Bounce starts when tasks have not yet been launched" ); requestResource.bounce( requestId, Optional.of( new SingularityBounceRequest( Optional.empty(), Optional.of(true), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ) ), singularityUser ); // It acquires a lock on the bounce Assertions.assertTrue( requestManager.getExpiringBounce(requestId).isPresent(), "Lock on bounce should be acquired during bounce" ); cleaner.drainCleanupQueue(); scheduler.drainPendingQueue(); resourceOffers(); for (SingularityTaskId singularityTaskId : taskManager.getActiveTaskIds()) { taskManager.saveTaskHistoryUpdate( new SingularityTaskHistoryUpdate( singularityTaskId, System.currentTimeMillis(), ExtendedTaskState.TASK_RUNNING, Optional.empty(), Optional.empty(), Collections.emptySet() ) ); } cleaner.drainCleanupQueue(); killKilledTasks(); // It finishes with one task running and the bounce released Assertions.assertEquals( 1, taskManager.getActiveTaskIds().size(), "Should end bounce with target number of tasks" ); for (SingularityTaskId singularityTaskId : taskManager.getActiveTaskIds()) { String statusMessage = taskManager .getTaskHistoryUpdates(singularityTaskId) .get(0) .getStatusMessage() .get(); Assertions.assertTrue( statusMessage.contains("BOUNCE"), "Task was started by bounce" ); } Assertions.assertFalse( requestManager.getExpiringBounce(requestId).isPresent(), "Lock on bounce should be released after bounce" ); } @Test public void testBounceOnRunningInstancesReleasesLock() { initRequest(); initFirstDeploy(); startTask(firstDeploy, 1); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); requestResource.bounce( requestId, Optional.of( new SingularityBounceRequest( Optional.empty(), Optional.of(true), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ) ), singularityUser ); Assertions.assertTrue(requestManager.isBouncing(requestId)); cleaner.drainCleanupQueue(); // It acquires a lock on the bounce Assertions.assertTrue( requestManager.getExpiringBounce(requestId).isPresent(), "Lock on bounce should be acquired during bounce" ); scheduler.drainPendingQueue(); resourceOffers(); for (SingularityTaskId singularityTaskId : taskManager.getActiveTaskIds()) { taskManager.saveTaskHistoryUpdate( new SingularityTaskHistoryUpdate( singularityTaskId, System.currentTimeMillis(), ExtendedTaskState.TASK_RUNNING, Optional.empty(), Optional.empty(), Collections.emptySet() ) ); } Assertions.assertTrue( taskManager.getActiveTaskIds().size() >= 2, "Need to start at least 1 instance to begin killing old instances" ); Assertions.assertTrue(requestManager.isBouncing(requestId)); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertFalse(requestManager.isBouncing(requestId)); // It finishes with one task running and the bounce released Assertions.assertEquals( 1, taskManager.getActiveTaskIds().size(), "Should end bounce with target number of tasks" ); for (SingularityTaskId singularityTaskId : taskManager.getActiveTaskIds()) { String statusMessage = taskManager .getTaskHistoryUpdates(singularityTaskId) .get(0) .getStatusMessage() .get(); Assertions.assertTrue( statusMessage.contains("BOUNCE"), "Task was started by bounce" ); } Assertions.assertFalse( requestManager.getExpiringBounce(requestId).isPresent(), "Lock on bounce should be released after bounce" ); } @Test public void testBounceReleasesLockWithAlternateCleanupType() { initRequest(); initFirstDeploy(); startTask(firstDeploy, 1); List<SingularityTaskId> activeTaskIds = taskManager.getActiveTaskIds(); Assertions.assertEquals(1, activeTaskIds.size()); SingularityTaskId firstTaskId = activeTaskIds.get(0); requestResource.bounce( requestId, Optional.of( new SingularityBounceRequest( Optional.empty(), Optional.of(true), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ) ), singularityUser ); Assertions.assertTrue(requestManager.isBouncing(requestId)); cleaner.drainCleanupQueue(); scheduler.drainPendingQueue(); resourceOffers(); // Save a new cleanup type over the old one, and make sure the bounce lock still releases taskManager.saveTaskCleanup( new SingularityTaskCleanup( Optional.empty(), TaskCleanupType.USER_REQUESTED, System.currentTimeMillis(), firstTaskId, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ) ); for (SingularityTaskId singularityTaskId : taskManager.getActiveTaskIds()) { taskManager.saveTaskHistoryUpdate( new SingularityTaskHistoryUpdate( singularityTaskId, System.currentTimeMillis(), ExtendedTaskState.TASK_RUNNING, Optional.empty(), Optional.empty(), Collections.emptySet() ) ); } Assertions.assertTrue(requestManager.isBouncing(requestId)); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertFalse(requestManager.isBouncing(requestId)); } @Test public void testIncrementalBounce() { initRequest(); resourceOffers(2); // set up slaves so scale validate will pass SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); requestResource.postRequest( request .toBuilder() .setSlavePlacement(Optional.of(SlavePlacement.SEPARATE_BY_REQUEST)) .setInstances(Optional.of(2)) .build(), singularityUser ); initHCDeploy(); SingularityTask taskOne = startSeparatePlacementTask(firstDeploy, 1); SingularityTask taskTwo = startSeparatePlacementTask(firstDeploy, 2); requestManager.createCleanupRequest( new SingularityRequestCleanup( user, RequestCleanupType.INCREMENTAL_BOUNCE, System.currentTimeMillis(), Optional.<Boolean>empty(), Optional.empty(), requestId, Optional.of(firstDeployId), Optional.<Boolean>empty(), Optional.<String>empty(), Optional.<String>empty(), Optional.<SingularityShellCommand>empty() ) ); Assertions.assertTrue(requestManager.cleanupRequestExists(requestId)); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.cleanupRequestExists(requestId)); Assertions.assertEquals(2, taskManager.getCleanupTaskIds().size()); scheduler.drainPendingQueue(); resourceOffers(3); SingularityTask taskThree = null; for (SingularityTask task : taskManager.getActiveTasks()) { if ( !task.getTaskId().equals(taskOne.getTaskId()) && !task.getTaskId().equals(taskTwo.getTaskId()) ) { taskThree = task; } } statusUpdate(taskThree, TaskState.TASK_RUNNING, Optional.of(1L)); Assertions.assertEquals(3, taskManager.getActiveTaskIds().size()); cleaner.drainCleanupQueue(); // No old tasks should be killed before new ones pass healthchecks Assertions.assertEquals(2, taskManager.getCleanupTaskIds().size()); taskManager.saveHealthcheckResult( new SingularityTaskHealthcheckResult( Optional.of(200), Optional.of(1000L), System.currentTimeMillis(), Optional.<String>empty(), Optional.<String>empty(), taskThree.getTaskId(), Optional.<Boolean>empty() ) ); cleaner.drainCleanupQueue(); Assertions.assertEquals(1, taskManager.getCleanupTaskIds().size()); statusUpdate(taskOne, TaskState.TASK_KILLED); resourceOffers(3); SingularityTask taskFour = null; for (SingularityTask task : taskManager.getActiveTasks()) { if ( !task.getTaskId().equals(taskOne.getTaskId()) && !task.getTaskId().equals(taskTwo.getTaskId()) && !task.getTaskId().equals(taskThree.getTaskId()) ) { taskFour = task; } } statusUpdate(taskFour, TaskState.TASK_RUNNING, Optional.of(1L)); taskManager.saveHealthcheckResult( new SingularityTaskHealthcheckResult( Optional.of(200), Optional.of(1000L), System.currentTimeMillis(), Optional.<String>empty(), Optional.<String>empty(), taskFour.getTaskId(), Optional.<Boolean>empty() ) ); cleaner.drainCleanupQueue(); Assertions.assertTrue(taskManager.getCleanupTaskIds().isEmpty()); } @Test public void testScheduledNotification() { schedule = "0 0 * * * ?"; // run every hour initScheduledRequest(); initFirstDeploy(); configuration.setWarnIfScheduledJobIsRunningForAtLeastMillis(Long.MAX_VALUE); configuration.setWarnIfScheduledJobIsRunningPastNextRunPct(200); final long now = System.currentTimeMillis(); SingularityTask firstTask = launchTask( request, firstDeploy, now - TimeUnit.HOURS.toMillis(3), 1, TaskState.TASK_RUNNING ); scheduledJobPoller.runActionOnPoll(); Mockito .verify(mailer, Mockito.times(0)) .sendTaskOverdueMail( ArgumentMatchers.<Optional<SingularityTask>>any(), ArgumentMatchers.<SingularityTaskId>any(), ArgumentMatchers.<SingularityRequest>any(), ArgumentMatchers.anyLong(), ArgumentMatchers.anyLong() ); configuration.setWarnIfScheduledJobIsRunningForAtLeastMillis( TimeUnit.HOURS.toMillis(1) ); scheduledJobPoller.runActionOnPoll(); Mockito .verify(mailer, Mockito.times(1)) .sendTaskOverdueMail( ArgumentMatchers.<Optional<SingularityTask>>any(), ArgumentMatchers.<SingularityTaskId>any(), ArgumentMatchers.<SingularityRequest>any(), ArgumentMatchers.anyLong(), ArgumentMatchers.anyLong() ); scheduledJobPoller.runActionOnPoll(); Mockito .verify(mailer, Mockito.times(1)) .sendTaskOverdueMail( ArgumentMatchers.<Optional<SingularityTask>>any(), ArgumentMatchers.<SingularityTaskId>any(), ArgumentMatchers.<SingularityRequest>any(), ArgumentMatchers.anyLong(), ArgumentMatchers.anyLong() ); statusUpdate(firstTask, TaskState.TASK_FINISHED); Optional<SingularityDeployStatistics> deployStatistics = deployManager.getDeployStatistics( requestId, firstDeployId ); long oldAvg = deployStatistics.get().getAverageRuntimeMillis().get(); Assertions.assertTrue(deployStatistics.get().getNumTasks() == 1); Assertions.assertTrue( deployStatistics.get().getAverageRuntimeMillis().get() > 1 && deployStatistics.get().getAverageRuntimeMillis().get() < TimeUnit.DAYS.toMillis(1) ); configuration.setWarnIfScheduledJobIsRunningForAtLeastMillis(1); SingularityTask secondTask = launchTask( request, firstDeploy, now - 500, 1, TaskState.TASK_RUNNING ); scheduledJobPoller.runActionOnPoll(); Mockito .verify(mailer, Mockito.times(1)) .sendTaskOverdueMail( ArgumentMatchers.<Optional<SingularityTask>>any(), ArgumentMatchers.<SingularityTaskId>any(), ArgumentMatchers.<SingularityRequest>any(), ArgumentMatchers.anyLong(), ArgumentMatchers.anyLong() ); statusUpdate(secondTask, TaskState.TASK_FINISHED); deployStatistics = deployManager.getDeployStatistics(requestId, firstDeployId); Assertions.assertTrue(deployStatistics.get().getNumTasks() == 2); Assertions.assertTrue( deployStatistics.get().getAverageRuntimeMillis().get() > 1 && deployStatistics.get().getAverageRuntimeMillis().get() < oldAvg ); saveRequest( request.toBuilder().setScheduledExpectedRuntimeMillis(Optional.of(1L)).build() ); SingularityTask thirdTask = launchTask( request, firstDeploy, now - 502, 1, TaskState.TASK_RUNNING ); scheduledJobPoller.runActionOnPoll(); Mockito .verify(mailer, Mockito.times(2)) .sendTaskOverdueMail( ArgumentMatchers.<Optional<SingularityTask>>any(), ArgumentMatchers.<SingularityTaskId>any(), ArgumentMatchers.<SingularityRequest>any(), ArgumentMatchers.anyLong(), ArgumentMatchers.anyLong() ); taskManager.deleteTaskHistory(thirdTask.getTaskId()); scheduledJobPoller.runActionOnPoll(); Mockito .verify(mailer, Mockito.times(3)) .sendTaskOverdueMail( ArgumentMatchers.<Optional<SingularityTask>>any(), ArgumentMatchers.<SingularityTaskId>any(), ArgumentMatchers.<SingularityRequest>any(), ArgumentMatchers.anyLong(), ArgumentMatchers.anyLong() ); } @Test public void testTaskOddities() { // test unparseable status update TaskStatus.Builder bldr = TaskStatus .newBuilder() .setTaskId(TaskID.newBuilder().setValue("task")) .setAgentId(AgentID.newBuilder().setValue("slave1")) .setState(TaskState.TASK_RUNNING); // should not throw exception: sms.statusUpdate(bldr.build()).join(); initRequest(); initFirstDeploy(); SingularityTask taskOne = launchTask( request, firstDeploy, 1, TaskState.TASK_STARTING ); taskManager.deleteTaskHistory(taskOne.getTaskId()); Assertions.assertTrue(taskManager.isActiveTask(taskOne.getTaskId())); statusUpdate(taskOne, TaskState.TASK_RUNNING); statusUpdate(taskOne, TaskState.TASK_FAILED); Assertions.assertTrue(!taskManager.isActiveTask(taskOne.getTaskId())); Assertions.assertEquals( 2, taskManager.getTaskHistoryUpdates(taskOne.getTaskId()).size() ); } @Test public void testOnDemandTasksPersist() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); requestResource.postRequest(bldr.build(), singularityUser); deploy("d2"); deployChecker.checkDeploys(); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); scheduler.drainPendingQueue(); resourceOffers(); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(2, taskManager.getActiveTaskIds().size()); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); scheduler.drainPendingQueue(); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); scheduler.drainPendingQueue(); Assertions.assertEquals(2, taskManager.getPendingTaskIds().size()); resourceOffers(); Assertions.assertEquals(4, taskManager.getActiveTaskIds().size()); } @Test public void testRunNowScheduledJobDoesNotRetry() { initScheduledRequest(); SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); SingularityRequest newRequest = request .toBuilder() .setNumRetriesOnFailure(Optional.of(2)) .build(); requestResource.postRequest(newRequest, singularityUser); initFirstDeploy(); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder().build() ); scheduler.drainPendingQueue(); resourceOffers(); SingularityTask task = taskManager.getActiveTasks().get(0); statusUpdate(task, TaskState.TASK_FAILED); scheduler.drainPendingQueue(); SingularityDeployStatistics deployStatistics = deployManager .getDeployStatistics( task.getTaskId().getRequestId(), task.getTaskId().getDeployId() ) .get(); Assertions.assertEquals( MesosTaskState.TASK_FAILED, deployStatistics.getLastTaskState().get().toTaskState().get() ); Assertions.assertEquals( PendingType.TASK_DONE, taskManager.getPendingTaskIds().get(0).getPendingType() ); Assertions.assertEquals(1, deployStatistics.getNumFailures()); Assertions.assertEquals(0, deployStatistics.getNumSequentialRetries()); Assertions.assertEquals( Optional.<Long>empty(), deployStatistics.getAverageRuntimeMillis() ); } @Test public void testRunNowOnDemandJobDoesNotRetryAfterUserInitiatedPause() { initRequestWithType(RequestType.ON_DEMAND, false); SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); SingularityRequest newRequest = request .toBuilder() .setNumRetriesOnFailure(Optional.of(2)) .build(); requestResource.postRequest(newRequest, singularityUser); initFirstDeploy(); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder().setMessage("foo bar").build() ); scheduler.drainPendingQueue(); resourceOffers(); SingularityTask task = taskManager.getActiveTasks().get(0); taskManager.saveTaskHistoryUpdate( new SingularityTaskHistoryUpdate( task.getTaskId(), System.currentTimeMillis(), ExtendedTaskState.TASK_CLEANING, Optional.of("PAUSE"), Optional.empty(), Collections.emptySet() ) ); statusUpdate(task, TaskState.TASK_KILLED); scheduler.drainPendingQueue(); SingularityDeployStatistics deployStatistics = deployManager .getDeployStatistics( task.getTaskId().getRequestId(), task.getTaskId().getDeployId() ) .get(); Assertions.assertEquals( MesosTaskState.TASK_KILLED, deployStatistics.getLastTaskState().get().toTaskState().get() ); Assertions.assertEquals(0, taskManager.getPendingTaskIds().size()); Assertions.assertEquals(0, deployStatistics.getNumFailures()); Assertions.assertEquals(0, deployStatistics.getNumSequentialRetries()); } @Test public void testRunNowOnDemandJobDoesNotRetryAfterUserInitiatedKill() { initRequestWithType(RequestType.ON_DEMAND, false); SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); SingularityRequest newRequest = request .toBuilder() .setNumRetriesOnFailure(Optional.of(2)) .build(); requestResource.postRequest(newRequest, singularityUser); initFirstDeploy(); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder().setMessage("foo bar").build() ); scheduler.drainPendingQueue(); resourceOffers(); SingularityTask task = taskManager.getActiveTasks().get(0); taskManager.saveTaskHistoryUpdate( new SingularityTaskHistoryUpdate( task.getTaskId(), System.currentTimeMillis(), ExtendedTaskState.TASK_CLEANING, Optional.of("USER_REQUESTED"), Optional.empty(), Collections.emptySet() ) ); statusUpdate(task, TaskState.TASK_KILLED); scheduler.drainPendingQueue(); SingularityDeployStatistics deployStatistics = deployManager .getDeployStatistics( task.getTaskId().getRequestId(), task.getTaskId().getDeployId() ) .get(); Assertions.assertEquals( MesosTaskState.TASK_KILLED, deployStatistics.getLastTaskState().get().toTaskState().get() ); Assertions.assertEquals(0, taskManager.getPendingTaskIds().size()); Assertions.assertEquals(0, deployStatistics.getNumFailures()); Assertions.assertEquals(0, deployStatistics.getNumSequentialRetries()); } @Test public void testRunNowOnDemandJobMayRetryOnFailure() { initRequestWithType(RequestType.ON_DEMAND, false); SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); SingularityRequest newRequest = request .toBuilder() .setNumRetriesOnFailure(Optional.of(2)) .build(); requestResource.postRequest(newRequest, singularityUser); initFirstDeploy(); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder().setMessage("foo bar").build() ); scheduler.drainPendingQueue(); resourceOffers(); SingularityTask task = taskManager.getActiveTasks().get(0); statusUpdate(task, TaskState.TASK_FAILED); scheduler.drainPendingQueue(); SingularityDeployStatistics deployStatistics = deployManager .getDeployStatistics( task.getTaskId().getRequestId(), task.getTaskId().getDeployId() ) .get(); Assertions.assertEquals( MesosTaskState.TASK_FAILED, deployStatistics.getLastTaskState().get().toTaskState().get() ); Assertions.assertEquals( PendingType.RETRY, taskManager.getPendingTaskIds().get(0).getPendingType() ); Assertions.assertEquals( "foo bar", taskManager.getPendingTasks().get(0).getMessage().get() ); Assertions.assertEquals(1, deployStatistics.getNumFailures()); Assertions.assertEquals(1, deployStatistics.getNumSequentialRetries()); } @Test public void testRunNowOnDemandJobsDoNotRetryAfterUserRequestedKill() { initRequestWithType(RequestType.ON_DEMAND, false); SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); SingularityRequest newRequest = request .toBuilder() .setNumRetriesOnFailure(Optional.of(2)) .build(); requestResource.postRequest(newRequest, singularityUser); initFirstDeploy(); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder().setMessage("foo bar").build() ); scheduler.drainPendingQueue(); resourceOffers(); SingularityTask task = taskManager.getActiveTasks().get(0); taskManager.saveTaskCleanup( new SingularityTaskCleanup( Optional.of(singularityUser.getId()), TaskCleanupType.USER_REQUESTED, System.currentTimeMillis(), task.getTaskId(), Optional.empty(), Optional.empty(), Optional.empty() ) ); cleaner.drainCleanupQueue(); statusUpdate(task, TaskState.TASK_KILLED); scheduler.drainPendingQueue(); SingularityDeployStatistics deployStatistics = deployManager .getDeployStatistics( task.getTaskId().getRequestId(), task.getTaskId().getDeployId() ) .get(); Assertions.assertEquals(0, taskManager.getPendingTaskIds().size()); Assertions.assertEquals( MesosTaskState.TASK_KILLED, deployStatistics.getLastTaskState().get().toTaskState().get() ); Assertions.assertEquals(0, deployStatistics.getNumFailures()); Assertions.assertEquals(0, deployStatistics.getNumSequentialRetries()); } @Test public void testOnDemandRunNowJobRespectsSpecifiedRunAtTime() { initOnDemandRequest(); initFirstDeploy(); long requestedLaunchTime = System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(10); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder().setRunAt(requestedLaunchTime).build() ); scheduler.drainPendingQueue(); SingularityPendingTaskId task = taskManager.getPendingTaskIds().get(0); long runAt = task.getNextRunAt(); Assertions.assertEquals(requestedLaunchTime, runAt); } @Test public void testScheduledRunNowJobRespectsSpecifiedRunAtTime() { initScheduledRequest(); initFirstDeploy(); long requestedLaunchTime = System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(10); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder().setRunAt(requestedLaunchTime).build() ); scheduler.drainPendingQueue(); SingularityPendingTaskId task = taskManager.getPendingTaskIds().get(0); long runAt = task.getNextRunAt(); Assertions.assertEquals(requestedLaunchTime, runAt); } @Test public void testJobRescheduledWhenItFinishesDuringDecommission() { initScheduledRequest(); initFirstDeploy(); resourceOffers(); SingularityTask task = launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); slaveManager.changeState( "slave1", MachineState.STARTING_DECOMMISSION, Optional.<String>empty(), Optional.of("user1") ); cleaner.drainCleanupQueue(); resourceOffers(); cleaner.drainCleanupQueue(); statusUpdate(task, TaskState.TASK_FINISHED); scheduler.drainPendingQueue(); Assertions.assertTrue(!taskManager.getPendingTaskIds().isEmpty()); } @Test public void testScaleDownTakesHighestInstances() { initRequest(); initFirstDeploy(); saveAndSchedule(request.toBuilder().setInstances(Optional.of(5))); resourceOffers(); Assertions.assertEquals(5, taskManager.getActiveTaskIds().size()); requestResource.scale( requestId, new SingularityScaleRequest( Optional.of(2), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ), singularityUser ); resourceOffers(); cleaner.drainCleanupQueue(); Assertions.assertEquals(3, taskManager.getKilledTaskIdRecords().size()); for (SingularityKilledTaskIdRecord taskId : taskManager.getKilledTaskIdRecords()) { Assertions.assertTrue(taskId.getTaskId().getInstanceNo() > 2); scheduler.drainPendingQueue(); } } @Test public void testScaleDownTakesHighestInstancesWithPendingTask() { initRequest(); initFirstDeploy(); saveAndSchedule(request.toBuilder().setInstances(Optional.of(5))); resourceOffers(); Assertions.assertEquals(5, taskManager.getActiveTaskIds().size()); SingularityTaskId instance2 = null; for (SingularityTaskId taskId : taskManager.getActiveTaskIds()) { if (taskId.getInstanceNo() == 2) { instance2 = taskId; } } statusUpdate(taskManager.getTask(instance2).get(), TaskState.TASK_KILLED); killKilledTasks(); scheduler.drainPendingQueue(); requestResource.scale( requestId, new SingularityScaleRequest( Optional.of(3), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); cleaner.drainCleanupQueue(); // instances 4 and 5 should get killed Assertions.assertEquals(2, taskManager.getKilledTaskIdRecords().size()); killKilledTasks(); resourceOffers(); // instances 1,2,3 should be active Assertions.assertEquals(3, taskManager.getActiveTaskIds().size()); for (SingularityTaskId taskId : taskManager.getActiveTaskIds()) { Assertions.assertTrue(taskId.getInstanceNo() < 4); } } @Test public void testRequestsInPendingQueueAreOrderedByTimestamp() { long now = System.currentTimeMillis(); initRequestWithType(RequestType.SCHEDULED, false); startFirstDeploy(); SingularityPendingRequest pendingDeployRequest = new SingularityPendingRequest( requestId, firstDeploy.getId(), now, Optional.empty(), PendingType.NEW_DEPLOY, firstDeploy.getSkipHealthchecksOnDeploy(), Optional.empty() ); SingularityPendingRequest pendingRunNowRequest = new SingularityPendingRequest( requestId, firstDeploy.getId(), now + 200, Optional.empty(), PendingType.IMMEDIATE, firstDeploy.getSkipHealthchecksOnDeploy(), Optional.empty() ); requestManager.addToPendingQueue(pendingDeployRequest); requestManager.addToPendingQueue(pendingRunNowRequest); Assertions.assertEquals(2, requestManager.getPendingRequests().size()); // Was added first Assertions.assertEquals( PendingType.NEW_DEPLOY, requestManager.getPendingRequests().get(0).getPendingType() ); // Was added second Assertions.assertEquals( PendingType.IMMEDIATE, requestManager.getPendingRequests().get(1).getPendingType() ); resourceOffers(); } @Test public void testImmediateRequestsAreConsistentlyDeleted() { long now = System.currentTimeMillis(); initRequestWithType(RequestType.SCHEDULED, false); startFirstDeploy(); SingularityPendingRequest pendingDeployRequest = new SingularityPendingRequest( requestId, firstDeploy.getId(), now, Optional.empty(), PendingType.NEW_DEPLOY, firstDeploy.getSkipHealthchecksOnDeploy(), Optional.empty() ); SingularityPendingRequest pendingRunNowRequest = new SingularityPendingRequest( requestId, firstDeploy.getId(), now + 200, Optional.empty(), PendingType.IMMEDIATE, firstDeploy.getSkipHealthchecksOnDeploy(), Optional.empty() ); requestManager.addToPendingQueue(pendingDeployRequest); requestManager.addToPendingQueue(pendingRunNowRequest); // Pending queue has two requests: NEW_DEPLOY & IMMEDIATE Assertions.assertEquals(2, requestManager.getPendingRequests().size()); requestManager.deletePendingRequest(pendingDeployRequest); // Just the immediate run Assertions.assertEquals(1, requestManager.getPendingRequests().size()); requestManager.deletePendingRequest(pendingRunNowRequest); // Immediate run was successfully deleted Assertions.assertEquals(0, requestManager.getPendingRequests().size()); } @Test public void testWaitAfterTaskWorks() { initRequest(); initFirstDeploy(); SingularityTask task = launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); statusUpdate(task, TaskState.TASK_FAILED); scheduler.drainPendingQueue(); Assertions.assertTrue( taskManager.getPendingTaskIds().get(0).getNextRunAt() - System.currentTimeMillis() < 1000L ); resourceOffers(); long extraWait = 100000L; saveAndSchedule( request .toBuilder() .setWaitAtLeastMillisAfterTaskFinishesForReschedule(Optional.of(extraWait)) .setInstances(Optional.of(2)) ); resourceOffers(); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_FAILED); scheduler.drainPendingQueue(); Assertions.assertTrue( taskManager.getPendingTaskIds().get(0).getNextRunAt() - System.currentTimeMillis() > 1000L ); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); } @Test public void testRemovedRequestData() { long now = System.currentTimeMillis(); initRequest(); SingularityDeployBuilder db = new SingularityDeployBuilder(requestId, firstDeployId); db.setMaxTaskRetries(Optional.of(1)); initDeploy(db, now); deployChecker.checkDeploys(); Assertions.assertEquals( DeployState.WAITING, deployManager.getPendingDeploys().get(0).getCurrentDeployState() ); requestManager.startDeletingRequest( request, Optional.empty(), Optional.<String>empty(), Optional.<String>empty(), Optional.<String>empty() ); requestManager.markDeleted( request, now, Optional.<String>empty(), Optional.<String>empty() ); deployChecker.checkDeploys(); SingularityDeployResult deployResult = deployManager .getDeployResult(requestId, firstDeployId) .get(); Assertions.assertEquals(DeployState.FAILED, deployResult.getDeployState()); Assertions.assertTrue(deployResult.getMessage().get().contains("MISSING")); } @Test public void itCorrectlyUpdatesRequestDeletingStateHistory() { initRequest(); Assertions.assertEquals( RequestState.ACTIVE, requestManager.getRequest(requestId).get().getState() ); Assertions.assertEquals(1, requestManager.getRequestHistory(requestId).size()); requestManager.startDeletingRequest( request, Optional.empty(), Optional.<String>empty(), Optional.<String>empty(), Optional.of("the cake is a lie") ); Assertions.assertEquals( RequestState.DELETING, requestManager.getRequest(requestId).get().getState() ); Assertions.assertEquals(2, requestManager.getRequestHistory(requestId).size()); cleaner.drainCleanupQueue(); Assertions.assertEquals(3, requestManager.getRequestHistory(requestId).size()); List<RequestHistoryType> historyTypes = new ArrayList<>(); for (SingularityRequestHistory request : requestManager.getRequestHistory( requestId )) { historyTypes.add(request.getEventType()); } Assertions.assertTrue(historyTypes.contains(RequestHistoryType.CREATED)); Assertions.assertTrue(historyTypes.contains(RequestHistoryType.DELETING)); Assertions.assertTrue(historyTypes.contains(RequestHistoryType.DELETED)); } @Test public void itSetsRequestStateToDeletedAfterAllTasksAreCleanedUp() { initRequest(); SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(2)).build(), singularityUser ); initFirstDeploy(); launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); launchTask(request, firstDeploy, 2, TaskState.TASK_RUNNING); Assertions.assertEquals( requestId, requestManager.getActiveRequests().iterator().next().getRequest().getId() ); Assertions.assertEquals(2, taskManager.getActiveTaskIds().size()); requestManager.startDeletingRequest( request, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ); Assertions.assertEquals( requestId, requestManager.getCleanupRequests().get(0).getRequestId() ); Assertions.assertEquals( RequestState.DELETING, requestManager.getRequest(requestId).get().getState() ); cleaner.drainCleanupQueue(); Assertions.assertEquals(0, taskManager.getCleanupTaskIds().size()); killKilledTasks(); cleaner.drainCleanupQueue(); Assertions.assertFalse(requestManager.getRequest(requestId).isPresent()); } @Test public void itSetsRequestStateToDeletedIfTaskCleanupFails() { initRequest(); SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(2)).build(), singularityUser ); initFirstDeploy(); SingularityTask firstTask = launchTask( request, firstDeploy, 1, TaskState.TASK_RUNNING ); launchTask(request, firstDeploy, 2, TaskState.TASK_RUNNING); Assertions.assertEquals( requestId, requestManager.getActiveRequests().iterator().next().getRequest().getId() ); Assertions.assertEquals(2, taskManager.getActiveTaskIds().size()); requestManager.startDeletingRequest( request, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ); Assertions.assertEquals( requestId, requestManager.getCleanupRequests().get(0).getRequestId() ); Assertions.assertEquals( RequestState.DELETING, requestManager.getRequest(requestId).get().getState() ); statusUpdate(firstTask, TaskState.TASK_FAILED); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); cleaner.drainCleanupQueue(); Assertions.assertEquals(0, taskManager.getCleanupTaskIds().size()); killKilledTasks(); cleaner.drainCleanupQueue(); Assertions.assertFalse(requestManager.getRequest(requestId).isPresent()); } @Test public void testMaxTasksPerOffer() { configuration.setMaxTasksPerOffer(3); initRequest(); initFirstDeploy(); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(20)).build(), singularityUser ); scheduler.drainPendingQueue(); sms.resourceOffers(Arrays.asList(createOffer(36, 12024, 50000))).join(); Assertions.assertTrue(taskManager.getActiveTasks().size() == 3); sms .resourceOffers( Arrays.asList( createOffer(20, 20000, 50000, "slave1", "host1"), createOffer(20, 20000, 50000, "slave2", "host2") ) ) .join(); Assertions.assertTrue(taskManager.getActiveTasks().size() == 9); configuration.setMaxTasksPerOffer(0); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTasks().size() == 20); } @Test public void testRequestedPorts() { final SingularityDeployBuilder deployBuilder = dockerDeployWithPorts(); initRequest(); initAndFinishDeploy(request, deployBuilder, Optional.of(new Resources(1, 64, 3, 0))); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(2)).build(), singularityUser ); scheduler.drainPendingQueue(); String[] portRangeWithNoRequestedPorts = { "65:70" }; sms .resourceOffers( Arrays.asList( createOffer( 20, 20000, 50000, "slave1", "host1", Optional.<String>empty(), Collections.<String, String>emptyMap(), portRangeWithNoRequestedPorts ) ) ) .join(); Assertions.assertEquals(0, taskManager.getActiveTasks().size()); String[] portRangeWithSomeRequestedPorts = { "80:82" }; sms .resourceOffers( Arrays.asList( createOffer( 20, 20000, 50000, "slave1", "host1", Optional.<String>empty(), Collections.<String, String>emptyMap(), portRangeWithSomeRequestedPorts ) ) ) .join(); Assertions.assertEquals(0, taskManager.getActiveTasks().size()); String[] portRangeWithRequestedButNotEnoughPorts = { "80:80", "8080:8080" }; sms .resourceOffers( Arrays.asList( createOffer( 20, 20000, 50000, "slave1", "host1", Optional.<String>empty(), Collections.<String, String>emptyMap(), portRangeWithRequestedButNotEnoughPorts ) ) ) .join(); Assertions.assertEquals(0, taskManager.getActiveTasks().size()); String[] portRangeWithNeededPorts = { "80:83", "8080:8080" }; sms .resourceOffers( Arrays.asList( createOffer( 20, 20000, 50000, "slave1", "host1", Optional.<String>empty(), Collections.<String, String>emptyMap(), portRangeWithNeededPorts ) ) ) .join(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); } private SingularityDeployBuilder dockerDeployWithPorts() { final SingularityDockerPortMapping literalMapping = new SingularityDockerPortMapping( Optional.<SingularityPortMappingType>empty(), 80, Optional.of(SingularityPortMappingType.LITERAL), 8080, Optional.<String>empty() ); final SingularityDockerPortMapping offerMapping = new SingularityDockerPortMapping( Optional.<SingularityPortMappingType>empty(), 81, Optional.of(SingularityPortMappingType.FROM_OFFER), 0, Optional.of("udp") ); final SingularityContainerInfo containerInfo = new SingularityContainerInfo( SingularityContainerType.DOCKER, Optional.<List<SingularityVolume>>empty(), Optional.of( new SingularityDockerInfo( "docker-image", true, SingularityDockerNetworkType.BRIDGE, Optional.of(Arrays.asList(literalMapping, offerMapping)), Optional.of(false), Optional.of(ImmutableMap.of("env", "var=value")), Optional.empty() ) ) ); final SingularityDeployBuilder deployBuilder = new SingularityDeployBuilder( requestId, "test-docker-ports-deploy" ); deployBuilder.setContainerInfo(Optional.of(containerInfo)); return deployBuilder; } @Test public void testQueueMultipleOneOffs() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); requestResource.postRequest(bldr.build(), singularityUser); deploy("on_demand_deploy"); deployChecker.checkDeploys(); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, "on_demand_deploy", System.currentTimeMillis(), Optional.<String>empty(), PendingType.ONEOFF, Optional.<List<String>>empty(), Optional.<String>empty(), Optional.<Boolean>empty(), Optional.<String>empty(), Optional.<String>empty() ) ); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, "on_demand_deploy", System.currentTimeMillis(), Optional.<String>empty(), PendingType.ONEOFF, Optional.<List<String>>empty(), Optional.<String>empty(), Optional.<Boolean>empty(), Optional.<String>empty(), Optional.<String>empty() ) ); scheduler.drainPendingQueue(); Assertions.assertEquals(2, taskManager.getPendingTaskIds().size()); } @Test public void testPriorityFreezeKillsActiveTasks() { final SingularityRequest lowPriorityRequest = new SingularityRequestBuilder( "lowPriorityRequest", RequestType.WORKER ) .setTaskPriorityLevel(Optional.of(.25)) .build(); saveRequest(lowPriorityRequest); final SingularityRequest mediumPriorityRequest = new SingularityRequestBuilder( "mediumPriorityRequest", RequestType.WORKER ) .setTaskPriorityLevel(Optional.of(.5)) .build(); saveRequest(mediumPriorityRequest); final SingularityRequest highPriorityRequest = new SingularityRequestBuilder( "highPriorityRequest", RequestType.WORKER ) .setTaskPriorityLevel(Optional.of(.75)) .build(); saveRequest(highPriorityRequest); final SingularityDeploy lowPriorityDeploy = initAndFinishDeploy( lowPriorityRequest, "lowPriorityDeploy" ); final SingularityDeploy mediumPriorityDeploy = initAndFinishDeploy( mediumPriorityRequest, "mediumPriorityDeploy" ); SingularityDeploy highPriorityDeploy = initAndFinishDeploy( highPriorityRequest, "highPriorityDeploy" ); final SingularityTask lowPriorityTask = launchTask( lowPriorityRequest, lowPriorityDeploy, 2, 1, TaskState.TASK_RUNNING ); final SingularityTask mediumPriorityTask = launchTask( mediumPriorityRequest, mediumPriorityDeploy, 1, 1, TaskState.TASK_RUNNING ); final SingularityTask highPriorityTask = launchTask( highPriorityRequest, highPriorityDeploy, 10, 1, TaskState.TASK_RUNNING ); // priority freeze of .5 means that lowPriorityRequest's task should have a cleanup priorityResource.createPriorityFreeze( singularityUser, new SingularityPriorityFreeze(.5, true, Optional.of("test"), Optional.empty()) ); // perform the killing priorityKillPoller.runActionOnPoll(); // assert lowPriorityRequest has a PRIORITY_KILL task cleanup and that mediumPriorityRequest and highPriorityRequest should not have cleanups Assertions.assertEquals( TaskCleanupType.PRIORITY_KILL, taskManager .getTaskCleanup(lowPriorityTask.getTaskId().getId()) .get() .getCleanupType() ); Assertions.assertEquals( false, taskManager.getTaskCleanup(mediumPriorityTask.getTaskId().getId()).isPresent() ); Assertions.assertEquals( false, taskManager.getTaskCleanup(highPriorityTask.getTaskId().getId()).isPresent() ); // kill task(s) with cleanups cleaner.drainCleanupQueue(); killKilledTasks(); // assert lowPriorityTask was killed, mediumPriorityTask and highPriorityTask are still running Assertions.assertEquals( ExtendedTaskState.TASK_KILLED, taskManager .getTaskHistory(lowPriorityTask.getTaskId()) .get() .getLastTaskUpdate() .get() .getTaskState() ); Assertions.assertEquals( ExtendedTaskState.TASK_RUNNING, taskManager .getTaskHistory(mediumPriorityTask.getTaskId()) .get() .getLastTaskUpdate() .get() .getTaskState() ); Assertions.assertEquals( ExtendedTaskState.TASK_RUNNING, taskManager .getTaskHistory(highPriorityTask.getTaskId()) .get() .getLastTaskUpdate() .get() .getTaskState() ); // assert lowPriorityRequest has a pending task final SingularityPendingTaskId pendingTaskId = taskManager.getPendingTaskIds().get(0); Assertions.assertEquals(PendingType.TASK_DONE, pendingTaskId.getPendingType()); Assertions.assertEquals(lowPriorityRequest.getId(), pendingTaskId.getRequestId()); // end the priority freeze priorityResource.deleteActivePriorityFreeze(singularityUser); // launch task(s) scheduler.drainPendingQueue(); resourceOffers(); // assert lowPriorityRequest has a new task running Assertions.assertNotEquals( lowPriorityTask.getTaskId(), taskManager.getActiveTaskIdsForRequest(lowPriorityRequest.getId()).get(0).getId() ); } @Test public void testPriorityFreezeDoesntLaunchTasks() { // deploy lowPriorityRequest (affected by priority freeze) final SingularityRequest lowPriorityRequest = new SingularityRequestBuilder( "lowPriorityRequest", RequestType.ON_DEMAND ) .setTaskPriorityLevel(Optional.of(.25)) .build(); saveRequest(lowPriorityRequest); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(lowPriorityRequest.getId(), "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); // deploy medium priority request (NOT affected by priority freeze) final SingularityRequest mediumPriorityRequest = new SingularityRequestBuilder( "mediumPriorityRequest", RequestType.ON_DEMAND ) .setTaskPriorityLevel(Optional.of(.5)) .build(); saveRequest(mediumPriorityRequest); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(mediumPriorityRequest.getId(), "d2") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); // create priority freeze priorityManager.createPriorityFreeze( new SingularityPriorityFreezeParent( new SingularityPriorityFreeze( 0.3, true, Optional.<String>empty(), Optional.<String>empty() ), System.currentTimeMillis(), Optional.<String>empty() ) ); // launch both tasks requestResource.scheduleImmediately( singularityUser, lowPriorityRequest.getId(), ((SingularityRunNowRequest) null) ); requestResource.scheduleImmediately( singularityUser, mediumPriorityRequest.getId(), ((SingularityRunNowRequest) null) ); // drain pending queue scheduler.drainPendingQueue(); resourceOffers(); // assert that lowPriorityRequest has a pending task Assertions.assertEquals(1, taskManager.getPendingTaskIds().size()); Assertions.assertEquals( lowPriorityRequest.getId(), taskManager.getPendingTaskIds().get(0).getRequestId() ); // assert that only mediumPriorityRequest has an active task Assertions.assertEquals( 0, taskManager.getActiveTaskIdsForRequest(lowPriorityRequest.getId()).size() ); Assertions.assertEquals( 1, taskManager.getActiveTaskIdsForRequest(mediumPriorityRequest.getId()).size() ); // delete priority freeze Assertions.assertEquals( SingularityDeleteResult.DELETED, priorityManager.deleteActivePriorityFreeze() ); // drain pending scheduler.drainPendingQueue(); resourceOffers(); // check that both requests have active tasks Assertions.assertEquals( 1, taskManager.getActiveTaskIdsForRequest(lowPriorityRequest.getId()).size() ); Assertions.assertEquals( 1, taskManager.getActiveTaskIdsForRequest(mediumPriorityRequest.getId()).size() ); } @Test public void testObsoletePendingRequestsRemoved() { initRequest(); initFirstDeploy(); SingularityTask taskOne = startTask(firstDeploy); requestResource.pause(requestId, Optional.empty(), singularityUser); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, firstDeployId, System.currentTimeMillis(), Optional.<String>empty(), PendingType.NEW_DEPLOY, Optional.<Boolean>empty(), Optional.<String>empty() ) ); Assertions.assertEquals(requestManager.getPendingRequests().size(), 1); scheduler.drainPendingQueue(); Assertions.assertEquals(requestManager.getPendingRequests().size(), 0); } @Test public void testCronScheduleChanges() throws Exception { final String requestId = "test-change-cron"; final String oldSchedule = "*/5 * * * *"; final String oldScheduleQuartz = "0 */5 * * * ?"; final String newSchedule = "*/30 * * * *"; final String newScheduleQuartz = "0 */30 * * * ?"; SingularityRequest request = new SingularityRequestBuilder( requestId, RequestType.SCHEDULED ) .setSchedule(Optional.of(oldSchedule)) .build(); request = validator.checkSingularityRequest( request, Optional.<SingularityRequest>empty(), Optional.<SingularityDeploy>empty(), Optional.<SingularityDeploy>empty() ); saveRequest(request); Assertions.assertEquals( oldScheduleQuartz, requestManager.getRequest(requestId).get().getRequest().getQuartzScheduleSafe() ); initAndFinishDeploy(request, "1"); scheduler.drainPendingQueue(); final SingularityRequest newRequest = request .toBuilder() .setSchedule(Optional.of(newSchedule)) .setQuartzSchedule(Optional.<String>empty()) .build(); final SingularityDeploy newDeploy = new SingularityDeployBuilder(request.getId(), "2") .setCommand(Optional.of("sleep 100")) .build(); deployResource.deploy( new SingularityDeployRequest( newDeploy, Optional.empty(), Optional.empty(), Optional.of(newRequest) ), singularityUser ); deployChecker.checkDeploys(); scheduler.drainPendingQueue(); Assertions.assertEquals( newScheduleQuartz, requestManager.getRequest(requestId).get().getRequest().getQuartzScheduleSafe() ); } @Test public void testImmediateRunReplacesScheduledTask() { initScheduledRequest(); SingularityDeploy deploy = SingularityDeploy .newBuilder(requestId, firstDeployId) .setCommand(Optional.of("sleep 100")) .build(); SingularityDeployRequest singularityDeployRequest = new SingularityDeployRequest( deploy, Optional.empty(), Optional.empty(), Optional.empty() ); deployResource.deploy(singularityDeployRequest, singularityUser); scheduler.drainPendingQueue(); SingularityPendingTask task1 = createAndSchedulePendingTask(firstDeployId); Assertions.assertEquals(1, taskManager.getPendingTaskIds().size()); Assertions.assertEquals( PendingType.NEW_DEPLOY, taskManager.getPendingTaskIds().get(0).getPendingType() ); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, deploy.getId(), System.currentTimeMillis(), Optional.empty(), PendingType.IMMEDIATE, deploy.getSkipHealthchecksOnDeploy(), Optional.empty() ) ); scheduler.drainPendingQueue(); Assertions.assertEquals(1, taskManager.getPendingTaskIds().size()); Assertions.assertEquals( PendingType.IMMEDIATE, taskManager.getPendingTaskIds().get(0).getPendingType() ); } @Test public void testSchedulerDropsMultipleScheduledTaskInstances() { initScheduledRequest(); SingularityDeploy deploy = SingularityDeploy .newBuilder(requestId, firstDeployId) .setCommand(Optional.of("sleep 100")) .build(); SingularityDeployRequest singularityDeployRequest = new SingularityDeployRequest( deploy, Optional.empty(), Optional.empty(), Optional.empty() ); deployResource.deploy(singularityDeployRequest, singularityUser); scheduler.drainPendingQueue(); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, firstDeployId, Instant.now().plus(3, ChronoUnit.DAYS).toEpochMilli(), Optional.empty(), PendingType.NEW_DEPLOY, Optional.empty(), Optional.empty() ) ); SingularityRunNowRequest runNowRequest = new SingularityRunNowRequestBuilder() .build(); requestResource.scheduleImmediately(singularityUser, requestId, runNowRequest); Assertions.assertEquals(2, requestManager.getPendingRequests().size()); Assertions.assertEquals( PendingType.IMMEDIATE, requestManager.getPendingRequests().get(0).getPendingType() ); Assertions.assertEquals( PendingType.NEW_DEPLOY, requestManager.getPendingRequests().get(1).getPendingType() ); scheduler.drainPendingQueue(); Assertions.assertEquals(1, taskManager.getPendingTaskIds().size()); Assertions.assertEquals( PendingType.IMMEDIATE, taskManager.getPendingTaskIds().get(0).getPendingType() ); Assertions.assertEquals(0, requestManager.getPendingRequests().size()); } @Test public void testInvalidQuartzTimeZoneErrors() { SingularityRequest req = new SingularityRequestBuilder( requestId, RequestType.SCHEDULED ) .setQuartzSchedule(Optional.of("*/1 * * * * ? 2020")) .setScheduleType(Optional.of(ScheduleType.QUARTZ)) .setScheduleTimeZone(Optional.of("invalid_timezone")) .build(); Assertions.assertThrows( WebApplicationException.class, () -> requestResource.postRequest(req, singularityUser) ); } @Test public void testDifferentQuartzTimeZones() { final Optional<String> schedule = Optional.of("* 30 14 22 3 ? 2083"); SingularityRequest requestEST = new SingularityRequestBuilder( "est_id", RequestType.SCHEDULED ) .setSchedule(schedule) .setScheduleType(Optional.of(ScheduleType.QUARTZ)) .setScheduleTimeZone(Optional.of("EST")) // fixed in relation to GMT .build(); SingularityRequest requestGMT = new SingularityRequestBuilder( "gmt_id", RequestType.SCHEDULED ) .setSchedule(schedule) .setScheduleType(Optional.of(ScheduleType.QUARTZ)) .setScheduleTimeZone(Optional.of("GMT")) .build(); requestResource.postRequest(requestEST, singularityUser); requestResource.postRequest(requestGMT, singularityUser); SingularityDeploy deployEST = new SingularityDeployBuilder( requestEST.getId(), "est_deploy_id" ) .setCommand(Optional.of("sleep 1")) .build(); SingularityDeploy deployGMT = new SingularityDeployBuilder( requestGMT.getId(), "gmt_deploy_id" ) .setCommand(Optional.of("sleep 1")) .build(); deployResource.deploy( new SingularityDeployRequest( deployEST, Optional.empty(), Optional.empty(), Optional.empty() ), singularityUser ); deployResource.deploy( new SingularityDeployRequest( deployGMT, Optional.empty(), Optional.empty(), Optional.empty() ), singularityUser ); deployChecker.checkDeploys(); scheduler.drainPendingQueue(); final long nextRunEST; final long nextRunGMT; final long fiveHoursInMilliseconds = TimeUnit.HOURS.toMillis(5); final List<SingularityPendingTaskId> pendingTaskIds = taskManager.getPendingTaskIds(); if (pendingTaskIds.get(0).getRequestId().equals(requestEST.getId())) { nextRunEST = pendingTaskIds.get(0).getNextRunAt(); nextRunGMT = pendingTaskIds.get(1).getNextRunAt(); } else { nextRunEST = pendingTaskIds.get(1).getNextRunAt(); nextRunGMT = pendingTaskIds.get(0).getNextRunAt(); } // GMT happens first, so EST is a larger timestamp Assertions.assertEquals(nextRunEST - nextRunGMT, fiveHoursInMilliseconds); } @Test public void testDeployCleanupOverwritesTaskBounceCleanup() { initRequest(); initFirstDeploy(); final SingularityTask oldTask = startTask(firstDeploy); taskResource.killTask( oldTask.getTaskId().getId(), Optional.of( new SingularityKillTaskRequest( Optional.empty(), Optional.empty(), Optional.empty(), Optional.of(true), Optional.empty() ) ), singularityUser ); final Optional<SingularityTaskCleanup> taskCleanup = taskManager.getTaskCleanup( oldTask.getTaskId().getId() ); Assertions.assertTrue(taskCleanup.isPresent()); Assertions.assertEquals( TaskCleanupType.USER_REQUESTED_TASK_BOUNCE, taskCleanup.get().getCleanupType() ); initSecondDeploy(); startTask(secondDeploy); deployChecker.checkDeploys(); Assertions.assertEquals( DeployState.SUCCEEDED, deployManager.getDeployResult(requestId, secondDeployId).get().getDeployState() ); Assertions.assertEquals( TaskCleanupType.DEPLOY_STEP_FINISHED, taskManager.getTaskCleanup(oldTask.getTaskId().getId()).get().getCleanupType() ); cleaner.drainCleanupQueue(); Assertions.assertFalse( taskManager.getTaskCleanup(oldTask.getTaskId().getId()).isPresent() ); } @Test public void testCleanerFindsTasksWithSkippedHealthchecks() { initRequest(); resourceOffers(2); // set up slaves so scale validate will pass SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); long now = System.currentTimeMillis(); requestManager.saveHistory( new SingularityRequestHistory( now, Optional.<String>empty(), RequestHistoryType.UPDATED, request .toBuilder() .setSkipHealthchecks(Optional.of(true)) .setInstances(Optional.of(2)) .build(), Optional.<String>empty() ) ); firstDeploy = initDeploy( new SingularityDeployBuilder(request.getId(), firstDeployId) .setCommand(Optional.of("sleep 100")) .setHealthcheckUri(Optional.of("http://uri")), System.currentTimeMillis() ); SingularityTask taskOne = launchTask( request, firstDeploy, now + 1000, now + 2000, 1, TaskState.TASK_RUNNING ); finishDeploy( new SingularityDeployMarker( requestId, firstDeployId, now + 2000, Optional.<String>empty(), Optional.<String>empty() ), firstDeploy ); SingularityRequest updatedRequest = request .toBuilder() .setSkipHealthchecks(Optional.<Boolean>empty()) .setInstances(Optional.of(2)) .build(); requestManager.saveHistory( new SingularityRequestHistory( now + 3000, Optional.<String>empty(), RequestHistoryType.UPDATED, updatedRequest, Optional.<String>empty() ) ); SingularityTask newTaskTwoWithCheck = prepTask( updatedRequest, firstDeploy, now + 4000, 2 ); taskManager.createTaskAndDeletePendingTask(newTaskTwoWithCheck); statusUpdate(newTaskTwoWithCheck, TaskState.TASK_RUNNING, Optional.of(now + 5000)); taskManager.saveHealthcheckResult( new SingularityTaskHealthcheckResult( Optional.of(200), Optional.of(1000L), now + 6000, Optional.<String>empty(), Optional.<String>empty(), newTaskTwoWithCheck.getTaskId(), Optional.<Boolean>empty() ) ); SingularityTask unhealthyTaskThree = prepTask( updatedRequest, firstDeploy, now + 4000, 3 ); taskManager.createTaskAndDeletePendingTask(unhealthyTaskThree); statusUpdate(unhealthyTaskThree, TaskState.TASK_RUNNING, Optional.of(now + 5000)); List<SingularityTaskId> activeTaskIds = taskManager.getActiveTaskIdsForRequest( requestId ); List<SingularityTaskId> healthyTaskIds = deployHealthHelper.getHealthyTasks( updatedRequest, Optional.of(firstDeploy), activeTaskIds, false ); Assertions.assertTrue(!healthyTaskIds.contains(unhealthyTaskThree.getTaskId())); Assertions.assertEquals(2, healthyTaskIds.size()); // Healthchecked and skip-healthchecked tasks should both be here Assertions.assertEquals( DeployHealth.WAITING, deployHealthHelper.getDeployHealth( updatedRequest, Optional.of(firstDeploy), activeTaskIds, false ) ); taskManager.saveHealthcheckResult( new SingularityTaskHealthcheckResult( Optional.of(200), Optional.of(1000L), now + 6000, Optional.<String>empty(), Optional.<String>empty(), unhealthyTaskThree.getTaskId(), Optional.<Boolean>empty() ) ); Assertions.assertEquals( DeployHealth.HEALTHY, deployHealthHelper.getDeployHealth( updatedRequest, Optional.of(firstDeploy), activeTaskIds, false ) ); } @Test public void testScaleWithBounceDoesNotLaunchExtraInstances() { initRequest(); initFirstDeploy(); launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); requestResource.scale( requestId, new SingularityScaleRequest( Optional.of(5), Optional.of(1L), Optional.empty(), Optional.empty(), Optional.empty(), Optional.of(true), Optional.empty(), Optional.empty() ), singularityUser ); Assertions.assertEquals(1, requestManager.getCleanupRequests().size()); cleaner.drainCleanupQueue(); Assertions.assertEquals(1, taskManager.getNumCleanupTasks()); scheduler.drainPendingQueue(); Assertions.assertEquals(5, taskManager.getPendingTaskIds().size()); } @Test public void testAcceptOffersWithRoleForRequestWithRole() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); bldr.setRequiredRole(Optional.of("test-role")); requestResource.postRequest(bldr.build(), singularityUser); deploy("d2"); SingularityRunNowRequest runNowRequest = new SingularityRunNowRequestBuilder() .setResources(new Resources(2, 2, 0)) .build(); requestResource.scheduleImmediately(singularityUser, requestId, runNowRequest); scheduler.drainPendingQueue(); SingularityPendingTask pendingTaskWithResources = taskManager .getPendingTasks() .get(0); Assertions.assertTrue(pendingTaskWithResources.getResources().isPresent()); Assertions.assertEquals( pendingTaskWithResources.getResources().get().getCpus(), 2, 0.0 ); sms.resourceOffers(Arrays.asList(createOffer(5, 5, 5))).join(); pendingTaskWithResources = taskManager.getPendingTasks().get(0); Assertions.assertTrue(pendingTaskWithResources.getResources().isPresent()); Assertions.assertEquals( pendingTaskWithResources.getResources().get().getCpus(), 2, 0.0 ); sms .resourceOffers(Arrays.asList(createOffer(5, 5, 5, Optional.of("test-role")))) .join(); SingularityTask task = taskManager.getActiveTasks().get(0); Assertions.assertEquals( MesosUtils.getNumCpus( mesosProtosUtils.toResourceList(task.getMesosTask().getResources()), Optional.of("test-role") ), 2.0, 0.0 ); } @Test public void testNotAcceptOfferWithRoleForRequestWithoutRole() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); requestResource.postRequest(bldr.build(), singularityUser); deploy("d2"); SingularityRunNowRequest runNowRequest = new SingularityRunNowRequestBuilder() .setResources(new Resources(2, 2, 0)) .build(); requestResource.scheduleImmediately(singularityUser, requestId, runNowRequest); scheduler.drainPendingQueue(); SingularityPendingTask pendingTaskWithResources = taskManager .getPendingTasks() .get(0); Assertions.assertTrue(pendingTaskWithResources.getResources().isPresent()); Assertions.assertEquals( pendingTaskWithResources.getResources().get().getCpus(), 2, 0.0 ); sms .resourceOffers(Arrays.asList(createOffer(5, 5, 5, Optional.of("test-role")))) .join(); pendingTaskWithResources = taskManager.getPendingTasks().get(0); Assertions.assertTrue(pendingTaskWithResources.getResources().isPresent()); Assertions.assertEquals( pendingTaskWithResources.getResources().get().getCpus(), 2, 0.0 ); } @Test public void testMaxOnDemandTasks() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); bldr.setInstances(Optional.of(1)); requestResource.postRequest(bldr.build(), singularityUser); deploy("on_demand_deploy"); deployChecker.checkDeploys(); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, "on_demand_deploy", System.currentTimeMillis(), Optional.<String>empty(), PendingType.ONEOFF, Optional.<List<String>>empty(), Optional.<String>empty(), Optional.<Boolean>empty(), Optional.<String>empty(), Optional.<String>empty() ) ); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, "on_demand_deploy", System.currentTimeMillis(), Optional.<String>empty(), PendingType.ONEOFF, Optional.<List<String>>empty(), Optional.<String>empty(), Optional.<Boolean>empty(), Optional.<String>empty(), Optional.<String>empty() ) ); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); } @Test public void testCleanupsCreatedOnScaleDown() { initRequest(); SingularityRequestBuilder bldr = request.toBuilder(); bldr.setInstances(Optional.of(2)); requestResource.postRequest(bldr.build(), singularityUser); initFirstDeploy(); SingularityTask firstTask = launchTask( request, firstDeploy, 1, TaskState.TASK_RUNNING ); SingularityTask secondTask = launchTask( request, firstDeploy, 2, TaskState.TASK_RUNNING ); Assertions.assertEquals(0, taskManager.getNumCleanupTasks()); bldr.setInstances(Optional.of(1)); requestResource.postRequest(bldr.build(), singularityUser); Assertions.assertEquals(1, taskManager.getNumCleanupTasks()); Assertions.assertEquals( taskManager.getCleanupTaskIds().get(0), secondTask.getTaskId() ); } @Test public void testRecoveredTask() { // set up the slave first sms .resourceOffers( Arrays.asList(createOffer(1, 129, 1025, "slave1", "host1", Optional.of("rack1"))) ) .join(); initRequest(); initFirstDeploy(); SingularityTask task = launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); Assertions.assertEquals(1, taskManager.getNumActiveTasks()); TaskStatus lost = TaskStatus .newBuilder() .setTaskId(MesosProtosUtils.toTaskId(task.getMesosTask().getTaskId())) .setAgentId(MesosProtosUtils.toAgentId(task.getAgentId())) .setReason(Reason.REASON_AGENT_REMOVED) .setMessage("health check timed out") .setState(TaskState.TASK_LOST) .build(); sms.statusUpdate(lost).join(); Assertions.assertEquals(0, taskManager.getNumActiveTasks()); Assertions.assertTrue(taskManager.getTaskHistory(task.getTaskId()).isPresent()); TaskStatus recovered = TaskStatus .newBuilder() .setTaskId(MesosProtosUtils.toTaskId(task.getMesosTask().getTaskId())) .setAgentId(MesosProtosUtils.toAgentId(task.getAgentId())) .setReason(Reason.REASON_AGENT_REREGISTERED) .setMessage("agent reregistered") .setState(TaskState.TASK_RUNNING) .build(); sms.statusUpdate(recovered).join(); Assertions.assertEquals(1, taskManager.getNumActiveTasks()); Assertions.assertEquals(1, requestManager.getSizeOfPendingQueue()); } @Test public void itRetriesLostShortRunningRequests() { runTest(RequestType.ON_DEMAND, Reason.REASON_AGENT_RESTARTED, true); } @Test public void itDoesNotRetryLostLongRunningRequests() { runTest(RequestType.SERVICE, Reason.REASON_AGENT_RESTARTED, false); } @Test public void itDoesNotRetryLostRequestsDueToNonAgentFailures() { runTest(RequestType.ON_DEMAND, Reason.REASON_CONTAINER_LIMITATION_DISK, false); } private void runTest(RequestType requestType, Reason reason, boolean shouldRetry) { initRequestWithType(requestType, false); initFirstDeploy(); SingularityTask task = startTask(firstDeploy); Assertions.assertEquals(0, taskManager.getPendingTaskIds().size()); Assertions.assertEquals(0, requestManager.getPendingRequests().size()); try { updateHandler .processStatusUpdateAsync( TaskStatus .newBuilder() .setState(TaskState.TASK_LOST) .setReason(reason) .setTaskId(TaskID.newBuilder().setValue(task.getTaskId().getId())) .build() ) .get(); } catch (InterruptedException | ExecutionException e) { Assertions.assertTrue(false); } if (shouldRetry) { Assertions.assertEquals(requestManager.getPendingRequests().size(), 1); Assertions.assertEquals( requestManager.getPendingRequests().get(0).getPendingType(), PendingType.RETRY ); } else { if (requestManager.getPendingRequests().size() > 0) { Assertions.assertEquals( requestManager.getPendingRequests().get(0).getPendingType(), PendingType.TASK_DONE ); } } scheduler.drainPendingQueue(); } }
SingularityService/src/test/java/com/hubspot/singularity/scheduler/SingularitySchedulerTest.java
package com.hubspot.singularity.scheduler; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; import com.google.inject.Inject; import com.hubspot.baragon.models.BaragonRequestState; import com.hubspot.mesos.Resources; import com.hubspot.mesos.SingularityContainerInfo; import com.hubspot.mesos.SingularityContainerType; import com.hubspot.mesos.SingularityDockerInfo; import com.hubspot.mesos.SingularityDockerNetworkType; import com.hubspot.mesos.SingularityDockerPortMapping; import com.hubspot.mesos.SingularityPortMappingType; import com.hubspot.mesos.SingularityVolume; import com.hubspot.mesos.protos.MesosTaskState; import com.hubspot.singularity.DeployState; import com.hubspot.singularity.ExtendedTaskState; import com.hubspot.singularity.LoadBalancerRequestType; import com.hubspot.singularity.MachineState; import com.hubspot.singularity.RequestCleanupType; import com.hubspot.singularity.RequestState; import com.hubspot.singularity.RequestType; import com.hubspot.singularity.ScheduleType; import com.hubspot.singularity.SingularityDeleteResult; import com.hubspot.singularity.SingularityDeploy; import com.hubspot.singularity.SingularityDeployBuilder; import com.hubspot.singularity.SingularityDeployMarker; import com.hubspot.singularity.SingularityDeployResult; import com.hubspot.singularity.SingularityDeployStatistics; import com.hubspot.singularity.SingularityKilledTaskIdRecord; import com.hubspot.singularity.SingularityLoadBalancerUpdate; import com.hubspot.singularity.SingularityPendingRequest; import com.hubspot.singularity.SingularityPendingRequest.PendingType; import com.hubspot.singularity.SingularityPendingTask; import com.hubspot.singularity.SingularityPendingTaskBuilder; import com.hubspot.singularity.SingularityPendingTaskId; import com.hubspot.singularity.SingularityPriorityFreezeParent; import com.hubspot.singularity.SingularityRequest; import com.hubspot.singularity.SingularityRequestBuilder; import com.hubspot.singularity.SingularityRequestCleanup; import com.hubspot.singularity.SingularityRequestHistory; import com.hubspot.singularity.SingularityRequestHistory.RequestHistoryType; import com.hubspot.singularity.SingularityRequestLbCleanup; import com.hubspot.singularity.SingularityRunNowRequestBuilder; import com.hubspot.singularity.SingularityShellCommand; import com.hubspot.singularity.SingularityTask; import com.hubspot.singularity.SingularityTaskCleanup; import com.hubspot.singularity.SingularityTaskHealthcheckResult; import com.hubspot.singularity.SingularityTaskHistoryUpdate; import com.hubspot.singularity.SingularityTaskId; import com.hubspot.singularity.SingularityTaskRequest; import com.hubspot.singularity.SlavePlacement; import com.hubspot.singularity.TaskCleanupType; import com.hubspot.singularity.api.SingularityBounceRequest; import com.hubspot.singularity.api.SingularityDeleteRequestRequest; import com.hubspot.singularity.api.SingularityDeployRequest; import com.hubspot.singularity.api.SingularityKillTaskRequest; import com.hubspot.singularity.api.SingularityPauseRequest; import com.hubspot.singularity.api.SingularityPriorityFreeze; import com.hubspot.singularity.api.SingularityRunNowRequest; import com.hubspot.singularity.api.SingularityScaleRequest; import com.hubspot.singularity.data.AbstractMachineManager.StateChangeResult; import com.hubspot.singularity.data.SingularityValidator; import com.hubspot.singularity.helpers.MesosProtosUtils; import com.hubspot.singularity.helpers.MesosUtils; import com.hubspot.singularity.mesos.OfferCache; import com.hubspot.singularity.mesos.SingularityMesosStatusUpdateHandler; import com.hubspot.singularity.mesos.SingularityMesosTaskPrioritizer; import com.hubspot.singularity.scheduler.SingularityDeployHealthHelper.DeployHealth; import com.hubspot.singularity.scheduler.SingularityTaskReconciliation.ReconciliationState; import com.jayway.awaitility.Awaitility; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import javax.ws.rs.WebApplicationException; import org.apache.mesos.v1.Protos.AgentID; import org.apache.mesos.v1.Protos.Offer; import org.apache.mesos.v1.Protos.TaskID; import org.apache.mesos.v1.Protos.TaskState; import org.apache.mesos.v1.Protos.TaskStatus; import org.apache.mesos.v1.Protos.TaskStatus.Reason; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.mockito.ArgumentMatchers; import org.mockito.Mockito; public class SingularitySchedulerTest extends SingularitySchedulerTestBase { @Inject private SingularityValidator validator; @Inject private SingularityDeployHealthHelper deployHealthHelper; @Inject private SingularityMesosTaskPrioritizer taskPrioritizer; @Inject private SingularitySchedulerPoller schedulerPoller; @Inject private OfferCache offerCache; @Inject private MesosProtosUtils mesosProtosUtils; @Inject SingularityMesosStatusUpdateHandler updateHandler; public SingularitySchedulerTest() { super(false); } private SingularityPendingTask pendingTask( String requestId, String deployId, PendingType pendingType ) { return new SingularityPendingTaskBuilder() .setPendingTaskId( new SingularityPendingTaskId( requestId, deployId, System.currentTimeMillis(), 1, pendingType, System.currentTimeMillis() ) ) .build(); } @Test public void testOfferCacheRescindOffers() { configuration.setCacheOffers(true); configuration.setOfferCacheSize(2); List<Offer> offers2 = resourceOffers(); // cached as well sms.rescind(offers2.get(0).getId()); sms.rescind(offers2.get(1).getId()); initRequest(); initFirstDeploy(); requestResource.postRequest( request .toBuilder() .setSlavePlacement(Optional.of(SlavePlacement.SEPARATE)) .setInstances(Optional.of(2)) .build(), singularityUser ); schedulerPoller.runActionOnPoll(); Assertions.assertEquals(0, taskManager.getActiveTasks().size()); resourceOffers(); int numTasks = taskManager.getActiveTasks().size(); Assertions.assertEquals(2, numTasks); startAndDeploySecondRequest(); schedulerPoller.runActionOnPoll(); Assertions.assertEquals(numTasks, taskManager.getActiveTasks().size()); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTasks().size() > numTasks); } @Test public void testSchedulerIsolatesPendingTasksBasedOnDeploy() { initRequest(); initFirstDeploy(); initSecondDeploy(); SingularityPendingTask p1 = pendingTask(requestId, firstDeployId, PendingType.ONEOFF); SingularityPendingTask p2 = pendingTask( requestId, firstDeployId, PendingType.TASK_DONE ); SingularityPendingTask p3 = pendingTask( requestId, secondDeployId, PendingType.TASK_DONE ); taskManager.savePendingTask(p1); taskManager.savePendingTask(p2); taskManager.savePendingTask(p3); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, secondDeployId, System.currentTimeMillis(), Optional.<String>empty(), PendingType.NEW_DEPLOY, Optional.<Boolean>empty(), Optional.<String>empty() ) ); scheduler.drainPendingQueue(); // we expect there to be 3 pending tasks : List<SingularityPendingTask> returnedScheduledTasks = taskManager.getPendingTasks(); Assertions.assertEquals(3, returnedScheduledTasks.size()); Assertions.assertTrue(returnedScheduledTasks.contains(p1)); Assertions.assertTrue(returnedScheduledTasks.contains(p2)); Assertions.assertTrue(!returnedScheduledTasks.contains(p3)); boolean found = false; for (SingularityPendingTask pendingTask : returnedScheduledTasks) { if (pendingTask.getPendingTaskId().getDeployId().equals(secondDeployId)) { found = true; Assertions.assertEquals( PendingType.NEW_DEPLOY, pendingTask.getPendingTaskId().getPendingType() ); } } Assertions.assertTrue(found); } @Test public void testCleanerLeavesPausedRequestTasksByDemand() { initScheduledRequest(); initFirstDeploy(); SingularityTask firstTask = launchTask( request, firstDeploy, 1, TaskState.TASK_RUNNING ); createAndSchedulePendingTask(firstDeployId); requestResource.pause( requestId, Optional.of( new SingularityPauseRequest( Optional.of(false), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ) ), singularityUser ); cleaner.drainCleanupQueue(); Assertions.assertTrue(taskManager.getKilledTaskIdRecords().isEmpty()); Assertions.assertTrue(taskManager.getPendingTaskIds().isEmpty()); Assertions.assertTrue(requestManager.getCleanupRequests().isEmpty()); statusUpdate(firstTask, TaskState.TASK_FINISHED); // make sure something new isn't scheduled! Assertions.assertTrue(taskManager.getPendingTaskIds().isEmpty()); } @Test public void testTaskKill() { initRequest(); initFirstDeploy(); SingularityTask firstTask = startTask(firstDeploy); taskResource.killTask( firstTask.getTaskId().getId(), Optional.empty(), singularityUser ); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertEquals(0, taskManager.getNumCleanupTasks()); Assertions.assertEquals(0, taskManager.getNumActiveTasks()); } @Test public void testTaskDestroy() { initRequest(); initFirstDeploy(); SingularityTask firstTask = startTask(firstDeploy, 1); SingularityTask secondTask = startTask(firstDeploy, 2); SingularityTask thirdTask = startTask(firstDeploy, 3); taskResource.killTask( secondTask.getTaskId().getId(), Optional.of( new SingularityKillTaskRequest( Optional.of(true), Optional.of("kill -9 bb"), Optional.empty(), Optional.empty(), Optional.empty() ) ), singularityUser ); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertEquals(2, taskManager.getNumActiveTasks()); System.out.println(requestManager.getCleanupRequests()); Assertions.assertEquals(0, requestManager.getCleanupRequests().size()); Assertions.assertEquals( RequestState.ACTIVE, requestManager.getRequest(requestId).get().getState() ); } @Test public void testTaskBounce() { initRequest(); initFirstDeploy(); SingularityTask firstTask = startTask(firstDeploy); taskResource.killTask( firstTask.getTaskId().getId(), Optional.of( new SingularityKillTaskRequest( Optional.empty(), Optional.of("msg"), Optional.empty(), Optional.of(true), Optional.empty() ) ), singularityUser ); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertEquals(1, taskManager.getNumCleanupTasks()); Assertions.assertEquals(0, taskManager.getKilledTaskIdRecords().size()); resourceOffers(); runLaunchedTasks(); Assertions.assertEquals(1, taskManager.getNumCleanupTasks()); Assertions.assertEquals(0, taskManager.getKilledTaskIdRecords().size()); Assertions.assertEquals(2, taskManager.getNumActiveTasks()); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertEquals(0, taskManager.getNumCleanupTasks()); Assertions.assertEquals(1, taskManager.getNumActiveTasks()); } @Test public void testBounceWithLoadBalancer() { initLoadBalancedRequest(); initFirstDeploy(); configuration.setNewTaskCheckerBaseDelaySeconds(1000000); SingularityTask taskOne = launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); saveLoadBalancerState( BaragonRequestState.SUCCESS, taskOne.getTaskId(), LoadBalancerRequestType.ADD ); requestResource.bounce(requestId, Optional.empty(), singularityUser); cleaner.drainCleanupQueue(); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(2, taskManager.getNumActiveTasks()); List<SingularityTaskId> tasks = taskManager.getActiveTaskIds(); tasks.remove(taskOne.getTaskId()); SingularityTaskId taskTwo = tasks.get(0); cleaner.drainCleanupQueue(); runLaunchedTasks(); cleaner.drainCleanupQueue(); Assertions.assertEquals(0, taskManager.getKilledTaskIdRecords().size()); Assertions.assertEquals(2, taskManager.getNumActiveTasks()); // add to LB: saveLoadBalancerState( BaragonRequestState.SUCCESS, taskTwo, LoadBalancerRequestType.ADD ); cleaner.drainCleanupQueue(); Assertions.assertEquals(0, taskManager.getKilledTaskIdRecords().size()); Assertions.assertEquals(2, taskManager.getNumActiveTasks()); saveLoadBalancerState( BaragonRequestState.SUCCESS, taskOne.getTaskId(), LoadBalancerRequestType.REMOVE ); cleaner.drainCleanupQueue(); Assertions.assertEquals(1, taskManager.getKilledTaskIdRecords().size()); killKilledTasks(); Assertions.assertEquals(1, taskManager.getNumActiveTasks()); } @Test public void testKilledTaskIdRecords() { initScheduledRequest(); initFirstDeploy(); launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); requestResource.deleteRequest(requestId, Optional.empty(), singularityUser); Assertions.assertTrue(requestManager.getCleanupRequests().size() == 1); cleaner.drainCleanupQueue(); Assertions.assertTrue(!taskManager.getKilledTaskIdRecords().isEmpty()); killKilledTasks(); cleaner.drainCleanupQueue(); Assertions.assertTrue(requestManager.getCleanupRequests().isEmpty()); Assertions.assertTrue(taskManager.getKilledTaskIdRecords().isEmpty()); } @Test public void testLongRunningTaskKills() { initScheduledRequest(); initFirstDeploy(); launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); initSecondDeploy(); deployChecker.checkDeploys(); Assertions.assertTrue(taskManager.getKilledTaskIdRecords().isEmpty()); Assertions.assertTrue(!taskManager.getCleanupTasks().isEmpty()); cleaner.drainCleanupQueue(); Assertions.assertTrue(taskManager.getKilledTaskIdRecords().isEmpty()); Assertions.assertTrue(!taskManager.getCleanupTasks().isEmpty()); requestManager.activate( request .toBuilder() .setKillOldNonLongRunningTasksAfterMillis(Optional.<Long>of(0L)) .build(), RequestHistoryType.CREATED, System.currentTimeMillis(), Optional.<String>empty(), Optional.<String>empty() ); cleaner.drainCleanupQueue(); Assertions.assertTrue(!taskManager.getKilledTaskIdRecords().isEmpty()); Assertions.assertTrue(taskManager.getCleanupTasks().isEmpty()); } @Test public void testSchedulerCanBatchOnOffers() { initRequest(); initFirstDeploy(); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(3)).build(), singularityUser ); scheduler.drainPendingQueue(); List<Offer> oneOffer = Arrays.asList(createOffer(12, 1024, 5000)); sms.resourceOffers(oneOffer).join(); Assertions.assertTrue(taskManager.getActiveTasks().size() == 3); Assertions.assertTrue(taskManager.getPendingTaskIds().isEmpty()); Assertions.assertTrue(requestManager.getPendingRequests().isEmpty()); } @Test public void testSchedulerExhaustsOffers() { initRequest(); initFirstDeploy(); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(10)).build(), singularityUser ); scheduler.drainPendingQueue(); sms .resourceOffers( Arrays.asList(createOffer(2, 1024, 2048), createOffer(1, 1024, 2048)) ) .join(); Assertions.assertEquals(3, taskManager.getActiveTaskIds().size()); Assertions.assertEquals(7, taskManager.getPendingTaskIds().size()); } @Test public void testSchedulerRandomizesOffers() { initRequest(); initFirstDeploy(); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(15)).build(), singularityUser ); scheduler.drainPendingQueue(); sms .resourceOffers( Arrays.asList(createOffer(20, 1024, 20000), createOffer(20, 1024, 20000)) ) .join(); Assertions.assertEquals(15, taskManager.getActiveTaskIds().size()); Set<String> offerIds = Sets.newHashSet(); for (SingularityTask activeTask : taskManager.getActiveTasks()) { offerIds.addAll( activeTask .getOffers() .stream() .map(o -> o.getId().getValue()) .collect(Collectors.toList()) ); } Assertions.assertEquals(2, offerIds.size()); } @Test public void testSchedulerHandlesFinishedTasks() { initScheduledRequest(); initFirstDeploy(); schedule = "*/1 * * * * ? 1995"; // cause it to be pending requestResource.postRequest( request.toBuilder().setQuartzSchedule(Optional.of(schedule)).build(), singularityUser ); scheduler.drainPendingQueue(); Assertions.assertTrue( requestResource .getActiveRequests( singularityUser, false, false, false, 10, Collections.emptyList() ) .isEmpty() ); Assertions.assertTrue( requestManager.getRequest(requestId).get().getState() == RequestState.FINISHED ); Assertions.assertTrue(taskManager.getPendingTaskIds().isEmpty()); schedule = "*/1 * * * * ?"; requestResource.postRequest( request.toBuilder().setQuartzSchedule(Optional.of(schedule)).build(), singularityUser ); scheduler.drainPendingQueue(); Assertions.assertTrue( !requestResource .getActiveRequests( singularityUser, false, false, false, 10, Collections.emptyList() ) .isEmpty() ); Assertions.assertTrue( requestManager.getRequest(requestId).get().getState() == RequestState.ACTIVE ); Assertions.assertTrue(!taskManager.getPendingTaskIds().isEmpty()); } @Test public void testNewlyDeployedScheduledTasksAreScheduledAfterStartup() { initScheduledRequest(); initFirstDeploy(); SingularityTask runningTask = launchTask( request, firstDeploy, 1, TaskState.TASK_RUNNING ); long now = System.currentTimeMillis(); initSecondDeploy(); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, secondDeployId, now, Optional.empty(), PendingType.STARTUP, Optional.empty(), Optional.empty() ) ); deployChecker.checkDeploys(); resourceOffers(); // There's an instance running, so we shouldn't schedule a pending task yet Assertions.assertTrue(taskManager.getPendingTaskIds().isEmpty()); statusUpdate(runningTask, TaskState.TASK_FINISHED); scheduler.drainPendingQueue(); // Now a pending task should be scheduled with the new deploy Assertions.assertEquals(1, taskManager.getPendingTaskIds().size()); Assertions.assertEquals( PendingType.NEW_DEPLOY, taskManager.getPendingTaskIds().get(0).getPendingType() ); Assertions.assertEquals( secondDeployId, taskManager.getPendingTaskIds().get(0).getDeployId() ); } @Test public void testFinishedRequestCanBeDeployed() { initScheduledRequest(); initFirstDeploy(); schedule = "*/1 * * * * ? 1995"; // cause it to be pending requestResource.postRequest( request.toBuilder().setQuartzSchedule(Optional.of(schedule)).build(), singularityUser ); scheduler.drainPendingQueue(); Assertions.assertTrue( requestResource .getActiveRequests( singularityUser, false, false, false, 10, Collections.emptyList() ) .isEmpty() ); Assertions.assertTrue( requestManager.getRequest(requestId).get().getState() == RequestState.FINISHED ); SingularityDeployBuilder db = new SingularityDeployBuilder(requestId, secondDeployId); initDeploy(db, System.currentTimeMillis()); deployChecker.checkDeploys(); Assertions.assertEquals( RequestState.ACTIVE, requestManager.getRequest(requestId).get().getState() ); Assertions.assertEquals(1, requestManager.getPendingRequests().size()); } @Test public void testOneOffsDontRunByThemselves() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); requestResource.postRequest(bldr.build(), singularityUser); Assertions.assertTrue(requestManager.getPendingRequests().isEmpty()); deploy("d2"); Assertions.assertTrue(requestManager.getPendingRequests().isEmpty()); deployChecker.checkDeploys(); Assertions.assertTrue(requestManager.getPendingRequests().isEmpty()); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_FINISHED); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(0, taskManager.getActiveTaskIds().size()); Assertions.assertEquals(0, taskManager.getPendingTaskIds().size()); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(0, taskManager.getActiveTaskIds().size()); Assertions.assertEquals(0, taskManager.getPendingTaskIds().size()); } @Test public void testOneOffsDontMoveDuringDecomission() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); requestResource.postRequest(bldr.build(), singularityUser); deploy("d2"); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); validateTaskDoesntMoveDuringDecommission(); } private void validateTaskDoesntMoveDuringDecommission() { scheduler.drainPendingQueue(); sms .resourceOffers( Arrays.asList(createOffer(1, 129, 1025, "slave1", "host1", Optional.of("rack1"))) ) .join(); scheduler.drainPendingQueue(); sms .resourceOffers( Arrays.asList(createOffer(1, 129, 1025, "slave2", "host2", Optional.of("rack1"))) ) .join(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); Assertions.assertEquals( "host1", taskManager.getActiveTaskIds().get(0).getSanitizedHost() ); Assertions.assertEquals( StateChangeResult.SUCCESS, slaveManager.changeState( "slave1", MachineState.STARTING_DECOMMISSION, Optional.<String>empty(), Optional.of("user1") ) ); scheduler.checkForDecomissions(); scheduler.drainPendingQueue(); sms .resourceOffers( Arrays.asList(createOffer(1, 129, 1025, "slave2", "host2", Optional.of("rack1"))) ) .join(); cleaner.drainCleanupQueue(); scheduler.drainPendingQueue(); sms .resourceOffers( Arrays.asList(createOffer(1, 129, 1025, "slave2", "host2", Optional.of("rack1"))) ) .join(); cleaner.drainCleanupQueue(); // task should not move! Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); Assertions.assertEquals( "host1", taskManager.getActiveTaskIds().get(0).getSanitizedHost() ); Assertions.assertTrue(taskManager.getKilledTaskIdRecords().isEmpty()); Assertions.assertTrue(taskManager.getCleanupTaskIds().size() == 1); } @Test public void testCustomResourcesWithRunNowRequest() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); requestResource.postRequest(bldr.build(), singularityUser); deploy("d2"); SingularityRunNowRequest runNowRequest = new SingularityRunNowRequestBuilder() .setResources(new Resources(2, 2, 0)) .build(); requestResource.scheduleImmediately(singularityUser, requestId, runNowRequest); scheduler.drainPendingQueue(); SingularityPendingTask pendingTaskWithResourcs = taskManager.getPendingTasks().get(0); Assertions.assertTrue(pendingTaskWithResourcs.getResources().isPresent()); Assertions.assertEquals( pendingTaskWithResourcs.getResources().get().getCpus(), 2, 0.0 ); sms .resourceOffers( Arrays.asList(createOffer(5, 5, 5, "slave1", "host1", Optional.of("rack1"))) ) .join(); SingularityTask task = taskManager.getActiveTasks().get(0); Assertions.assertEquals( MesosUtils.getNumCpus( mesosProtosUtils.toResourceList(task.getMesosTask().getResources()), Optional.<String>empty() ), 2.0, 0.0 ); } @Test public void testRunOnceRunOnlyOnce() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.RUN_ONCE ); request = bldr.build(); saveRequest(request); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); deployChecker.checkDeploys(); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue( deployManager.getRequestDeployState(requestId).get().getActiveDeploy().isPresent() ); Assertions.assertTrue( !deployManager.getRequestDeployState(requestId).get().getPendingDeploy().isPresent() ); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTaskIds().isEmpty()); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d2") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); deployChecker.checkDeploys(); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue( deployManager.getRequestDeployState(requestId).get().getActiveDeploy().isPresent() ); Assertions.assertTrue( !deployManager.getRequestDeployState(requestId).get().getPendingDeploy().isPresent() ); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_FINISHED); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTaskIds().isEmpty()); } @Test public void testMultipleRunOnceTasks() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.RUN_ONCE ); request = bldr.build(); saveRequest(request); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); deployChecker.checkDeploys(); Assertions.assertEquals(1, requestManager.getSizeOfPendingQueue()); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d2") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); deployChecker.checkDeploys(); Assertions.assertEquals(2, requestManager.getSizeOfPendingQueue()); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(2, taskManager.getActiveTaskIds().size()); } @Test public void testRunOnceDontMoveDuringDecomission() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.RUN_ONCE ); request = bldr.build(); saveRequest(request); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); deployChecker.checkDeploys(); validateTaskDoesntMoveDuringDecommission(); } @Test public void testDecommissionDoesntKillPendingDeploy() { initRequest(); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); deployChecker.checkDeploys(); resourceOffers(); Assertions.assertEquals(1, taskManager.getNumActiveTasks()); slaveResource.decommissionSlave( singularityUser, taskManager.getActiveTasks().get(0).getAgentId().getValue(), null ); scheduler.checkForDecomissions(); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertEquals(1, taskManager.getNumActiveTasks()); Assertions.assertEquals(1, taskManager.getNumCleanupTasks()); Assertions.assertEquals(0, taskManager.getKilledTaskIdRecords().size()); configuration.setPendingDeployHoldTaskDuringDecommissionMillis(1); try { Thread.sleep(2); } catch (InterruptedException e) {} cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertEquals(0, taskManager.getNumActiveTasks()); Assertions.assertEquals(0, taskManager.getNumCleanupTasks()); } @Test public void testRetries() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.RUN_ONCE ); request = bldr.setNumRetriesOnFailure(Optional.of(2)).build(); saveRequest(request); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); deployChecker.checkDeploys(); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTaskIds().isEmpty()); } @Test public void testRetriesWithOverrides() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); request = bldr.setNumRetriesOnFailure(Optional.of(2)).build(); saveRequest(request); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(requestId, "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); deployChecker.checkDeploys(); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder() .setCommandLineArgs(Collections.singletonList("extraFlag")) .setResources(new Resources(17, 1337, 0)) .build() ); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); Resources resourcesForRunningTask = taskManager .getActiveTasks() .get(0) .getTaskRequest() .getPendingTask() .getResources() .get(); Assertions.assertEquals( Optional.of(Collections.singletonList("extraFlag")), taskManager .getActiveTasks() .get(0) .getTaskRequest() .getPendingTask() .getCmdLineArgsList() ); Assertions.assertEquals(17, resourcesForRunningTask.getCpus(), 0.01); Assertions.assertEquals(1337, resourcesForRunningTask.getMemoryMb(), 0.01); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); resourcesForRunningTask = taskManager .getActiveTasks() .get(0) .getTaskRequest() .getPendingTask() .getResources() .get(); Assertions.assertEquals( Optional.of(Collections.singletonList("extraFlag")), taskManager .getActiveTasks() .get(0) .getTaskRequest() .getPendingTask() .getCmdLineArgsList() ); Assertions.assertEquals(17, resourcesForRunningTask.getCpus(), 0.01); Assertions.assertEquals(1337, resourcesForRunningTask.getMemoryMb(), 0.01); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); resourcesForRunningTask = taskManager .getActiveTasks() .get(0) .getTaskRequest() .getPendingTask() .getResources() .get(); Assertions.assertEquals( Optional.of(Collections.singletonList("extraFlag")), taskManager .getActiveTasks() .get(0) .getTaskRequest() .getPendingTask() .getCmdLineArgsList() ); Assertions.assertEquals(17, resourcesForRunningTask.getCpus(), 0.01); Assertions.assertEquals(1337, resourcesForRunningTask.getMemoryMb(), 0.01); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_LOST); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTaskIds().isEmpty()); } /* @Test public void testCooldownAfterSequentialFailures() { initRequest(); initFirstDeploy(); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() == RequestState.ACTIVE); configuration.setFastFailureCooldownCount(2); SingularityTask firstTask = startTask(firstDeploy); SingularityTask secondTask = startTask(firstDeploy); statusUpdate(firstTask, TaskState.TASK_FAILED); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() == RequestState.ACTIVE); statusUpdate(secondTask, TaskState.TASK_FAILED); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() == RequestState.SYSTEM_COOLDOWN); cooldownChecker.checkCooldowns(); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() == RequestState.SYSTEM_COOLDOWN); SingularityTask thirdTask = startTask(firstDeploy); statusUpdate(thirdTask, TaskState.TASK_FINISHED); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() == RequestState.ACTIVE); } @Test public void testCooldownOnlyWhenTasksRapidlyFail() { initRequest(); initFirstDeploy(); configuration.setFastFailureCooldownCount(2); SingularityTask firstTask = startTask(firstDeploy); statusUpdate(firstTask, TaskState.TASK_FAILED, Optional.of(System.currentTimeMillis() - TimeUnit.HOURS.toMillis(5))); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() == RequestState.ACTIVE); SingularityTask secondTask = startTask(firstDeploy); statusUpdate(secondTask, TaskState.TASK_FAILED); Assertions.assertTrue(requestManager.getRequest(requestId).get().getState() != RequestState.SYSTEM_COOLDOWN); }*/ @Test public void testLBCleanup() { initLoadBalancedRequest(); initFirstDeploy(); configuration.setLoadBalancerRemovalGracePeriodMillis(10000); SingularityTask task = launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); saveLoadBalancerState( BaragonRequestState.SUCCESS, task.getTaskId(), LoadBalancerRequestType.ADD ); statusUpdate(task, TaskState.TASK_FAILED); Assertions.assertTrue(!taskManager.getLBCleanupTasks().isEmpty()); testingLbClient.setNextBaragonRequestState(BaragonRequestState.WAITING); cleaner.drainCleanupQueue(); Assertions.assertTrue(!taskManager.getLBCleanupTasks().isEmpty()); Optional<SingularityLoadBalancerUpdate> lbUpdate = taskManager.getLoadBalancerState( task.getTaskId(), LoadBalancerRequestType.REMOVE ); Assertions.assertTrue(lbUpdate.isPresent()); Assertions.assertTrue( lbUpdate.get().getLoadBalancerState() == BaragonRequestState.WAITING ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.FAILED); cleaner.drainCleanupQueue(); Assertions.assertTrue(!taskManager.getLBCleanupTasks().isEmpty()); lbUpdate = taskManager.getLoadBalancerState(task.getTaskId(), LoadBalancerRequestType.REMOVE); Assertions.assertTrue(lbUpdate.isPresent()); Assertions.assertTrue( lbUpdate.get().getLoadBalancerState() == BaragonRequestState.FAILED ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.SUCCESS); cleaner.drainCleanupQueue(); Assertions.assertTrue(!taskManager.getLBCleanupTasks().isEmpty()); configuration.setLoadBalancerRemovalGracePeriodMillis(0); cleaner.drainCleanupQueue(); Assertions.assertTrue(taskManager.getLBCleanupTasks().isEmpty()); lbUpdate = taskManager.getLoadBalancerState(task.getTaskId(), LoadBalancerRequestType.REMOVE); Assertions.assertTrue(lbUpdate.isPresent()); Assertions.assertTrue( lbUpdate.get().getLoadBalancerState() == BaragonRequestState.SUCCESS ); Assertions.assertTrue( lbUpdate.get().getLoadBalancerRequestId().getAttemptNumber() == 2 ); } @Test public void testLbCleanupDoesNotRemoveBeforeAdd() { initLoadBalancedRequest(); initFirstDeploy(); SingularityTask taskOne = launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); initSecondDeploy(); SingularityTask taskTwo = launchTask( request, secondDeploy, 1, TaskState.TASK_RUNNING ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.WAITING); deployChecker.checkDeploys(); // First task from old deploy is still starting, never got added to LB so it should not have a removal request Assertions.assertFalse( taskManager .getLoadBalancerState(taskOne.getTaskId(), LoadBalancerRequestType.ADD) .isPresent() ); Assertions.assertFalse( taskManager .getLoadBalancerState(taskOne.getTaskId(), LoadBalancerRequestType.REMOVE) .isPresent() ); // Second task should have an add request Assertions.assertTrue( taskManager .getLoadBalancerState(taskTwo.getTaskId(), LoadBalancerRequestType.ADD) .isPresent() ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.SUCCESS); deployChecker.checkDeploys(); // First task from old deploy should still have no LB updates, but should have a cleanup Assertions.assertFalse( taskManager .getLoadBalancerState(taskOne.getTaskId(), LoadBalancerRequestType.ADD) .isPresent() ); Assertions.assertFalse( taskManager .getLoadBalancerState(taskOne.getTaskId(), LoadBalancerRequestType.REMOVE) .isPresent() ); Assertions.assertTrue(taskManager.getCleanupTaskIds().contains(taskOne.getTaskId())); } @Test public void testLbCleanupSkippedOnSkipRemoveFlag() { configuration.setDeleteRemovedRequestsFromLoadBalancer(true); initLoadBalancedRequest(); initLoadBalancedDeploy(); startTask(firstDeploy); boolean removeFromLoadBalancer = false; SingularityDeleteRequestRequest deleteRequest = new SingularityDeleteRequestRequest( Optional.empty(), Optional.empty(), Optional.of(removeFromLoadBalancer) ); requestResource.deleteRequest(requestId, Optional.of(deleteRequest), singularityUser); testingLbClient.setNextBaragonRequestState(BaragonRequestState.WAITING); Assertions.assertFalse( requestManager.getCleanupRequests().isEmpty(), "Tasks should get cleaned up" ); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertFalse( requestManager.getCleanupRequests().isEmpty(), "The request should get cleaned up" ); cleaner.drainCleanupQueue(); Assertions.assertTrue( requestManager.getLbCleanupRequestIds().isEmpty(), "The request should not be removed from the load balancer" ); } @Test public void testLbCleanupOccursOnRequestDelete() { configuration.setDeleteRemovedRequestsFromLoadBalancer(true); initLoadBalancedRequest(); initLoadBalancedDeploy(); startTask(firstDeploy); requestResource.deleteRequest(requestId, Optional.empty(), singularityUser); testingLbClient.setNextBaragonRequestState(BaragonRequestState.WAITING); Assertions.assertFalse( requestManager.getCleanupRequests().isEmpty(), "Tasks should get cleaned up" ); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertFalse( requestManager.getCleanupRequests().isEmpty(), "The request should get cleaned up" ); cleaner.drainCleanupQueue(); Assertions.assertFalse( requestManager.getLbCleanupRequestIds().isEmpty(), "The request should get removed from the load balancer" ); } @Test public void testReconciliation() { Assertions.assertTrue(!taskReconciliation.isReconciliationRunning()); configuration.setCheckReconcileWhenRunningEveryMillis(1); initRequest(); initFirstDeploy(); Assertions.assertTrue( taskReconciliation.startReconciliation() == ReconciliationState.STARTED ); Awaitility .await() .atMost(10, TimeUnit.SECONDS) .until(() -> !taskReconciliation.isReconciliationRunning()); SingularityTask taskOne = launchTask( request, firstDeploy, 1, TaskState.TASK_STARTING ); SingularityTask taskTwo = launchTask(request, firstDeploy, 2, TaskState.TASK_RUNNING); saveLastActiveTaskStatus(taskOne, Optional.empty(), -1000); Assertions.assertTrue( taskReconciliation.startReconciliation() == ReconciliationState.STARTED ); Assertions.assertTrue( taskReconciliation.startReconciliation() == ReconciliationState.ALREADY_RUNNING ); Awaitility .await() .atMost(10, TimeUnit.SECONDS) .until(() -> taskReconciliation.isReconciliationRunning()); saveLastActiveTaskStatus(taskOne, Optional.of(buildTaskStatus(taskOne)), +1000); Awaitility .await() .atMost(10, TimeUnit.SECONDS) .until(() -> taskReconciliation.isReconciliationRunning()); saveLastActiveTaskStatus(taskTwo, Optional.of(buildTaskStatus(taskTwo)), +1000); Awaitility .await() .atMost(10, TimeUnit.SECONDS) .until(() -> !taskReconciliation.isReconciliationRunning()); } @Test public void testSchedulerPriority() { final SingularityRequest lowPriorityRequest = new SingularityRequestBuilder( "lowPriorityRequest", RequestType.WORKER ) .setTaskPriorityLevel(Optional.of(.25)) .build(); saveRequest(lowPriorityRequest); final SingularityRequest mediumPriorityRequest = new SingularityRequestBuilder( "mediumPriorityRequest", RequestType.WORKER ) .setTaskPriorityLevel(Optional.of(.5)) .build(); saveRequest(mediumPriorityRequest); final SingularityRequest highPriorityRequest = new SingularityRequestBuilder( "highPriorityRequest", RequestType.WORKER ) .setTaskPriorityLevel(Optional.of(.75)) .build(); saveRequest(highPriorityRequest); final SingularityDeploy lowPriorityDeploy = initAndFinishDeploy( lowPriorityRequest, "lowPriorityDeploy" ); final SingularityDeploy mediumPriorityDeploy = initAndFinishDeploy( mediumPriorityRequest, "mediumPriorityDeploy" ); final SingularityDeploy highPriorityDeploy = initAndFinishDeploy( highPriorityRequest, "highPriorityDeploy" ); // Task requests launched at ~ the same time should be in priority order long now = System.currentTimeMillis(); List<SingularityTaskRequest> requestsByPriority = Arrays.asList( buildTaskRequest(lowPriorityRequest, lowPriorityDeploy, now), buildTaskRequest(mediumPriorityRequest, mediumPriorityDeploy, now), buildTaskRequest(highPriorityRequest, highPriorityDeploy, now) ); List<SingularityTaskRequest> sortedRequestsByPriority = taskPrioritizer.getSortedDueTasks( requestsByPriority ); Assertions.assertEquals( sortedRequestsByPriority.get(0).getRequest().getId(), highPriorityRequest.getId() ); Assertions.assertEquals( sortedRequestsByPriority.get(1).getRequest().getId(), mediumPriorityRequest.getId() ); Assertions.assertEquals( sortedRequestsByPriority.get(2).getRequest().getId(), lowPriorityRequest.getId() ); // A lower priority task that is long overdue should be run before a higher priority task now = System.currentTimeMillis(); List<SingularityTaskRequest> requestsByOverdueAndPriority = Arrays.asList( buildTaskRequest(lowPriorityRequest, lowPriorityDeploy, now - 120000), // 2 min overdue buildTaskRequest(mediumPriorityRequest, mediumPriorityDeploy, now - 30000), // 60s overdue buildTaskRequest(highPriorityRequest, highPriorityDeploy, now) ); // Not overdue List<SingularityTaskRequest> sortedRequestsByOverdueAndPriority = taskPrioritizer.getSortedDueTasks( requestsByOverdueAndPriority ); Assertions.assertEquals( sortedRequestsByOverdueAndPriority.get(0).getRequest().getId(), lowPriorityRequest.getId() ); Assertions.assertEquals( sortedRequestsByOverdueAndPriority.get(1).getRequest().getId(), mediumPriorityRequest.getId() ); Assertions.assertEquals( sortedRequestsByOverdueAndPriority.get(2).getRequest().getId(), highPriorityRequest.getId() ); } @Test public void badPauseExpires() { initRequest(); requestManager.createCleanupRequest( new SingularityRequestCleanup( Optional.<String>empty(), RequestCleanupType.PAUSING, System.currentTimeMillis(), Optional.<Boolean>empty(), Optional.empty(), requestId, Optional.<String>empty(), Optional.<Boolean>empty(), Optional.<String>empty(), Optional.<String>empty(), Optional.<SingularityShellCommand>empty() ) ); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.getCleanupRequests().isEmpty()); configuration.setCleanupEverySeconds(0); sleep(1); cleaner.drainCleanupQueue(); Assertions.assertTrue(requestManager.getCleanupRequests().isEmpty()); } @Test public void testPauseLbCleanup() { initLoadBalancedRequest(); initFirstDeploy(); requestManager.saveLbCleanupRequest( new SingularityRequestLbCleanup( requestId, Sets.newHashSet("test"), "/basepath", Collections.<String>emptyList(), Optional.<SingularityLoadBalancerUpdate>empty() ) ); requestManager.pause( request, System.currentTimeMillis(), Optional.<String>empty(), Optional.<String>empty() ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.WAITING); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.getLbCleanupRequestIds().isEmpty()); Optional<SingularityLoadBalancerUpdate> lbUpdate = requestManager .getLbCleanupRequest(requestId) .get() .getLoadBalancerUpdate(); Assertions.assertTrue(lbUpdate.isPresent()); Assertions.assertTrue( lbUpdate.get().getLoadBalancerState() == BaragonRequestState.WAITING ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.FAILED); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.getLbCleanupRequestIds().isEmpty()); lbUpdate = requestManager.getLbCleanupRequest(requestId).get().getLoadBalancerUpdate(); Assertions.assertTrue(lbUpdate.isPresent()); Assertions.assertTrue( lbUpdate.get().getLoadBalancerState() == BaragonRequestState.FAILED ); testingLbClient.setNextBaragonRequestState(BaragonRequestState.SUCCESS); cleaner.drainCleanupQueue(); Assertions.assertTrue(requestManager.getLbCleanupRequestIds().isEmpty()); } @Test public void testPause() { initRequest(); initFirstDeploy(); SingularityTask taskOne = startTask(firstDeploy); requestResource.pause(requestId, Optional.empty(), singularityUser); cleaner.drainCleanupQueue(); Assertions.assertEquals(1, taskManager.getKilledTaskIdRecords().size()); statusUpdate(taskOne, TaskState.TASK_KILLED); resourceOffers(); Assertions.assertEquals(0, taskManager.getActiveTaskIds().size()); Assertions.assertEquals(0, taskManager.getPendingTasks().size()); Assertions.assertEquals( RequestState.PAUSED, requestManager.getRequest(requestId).get().getState() ); Assertions.assertEquals( requestId, requestManager.getPausedRequests(false).iterator().next().getRequest().getId() ); requestResource.unpause(requestId, Optional.empty(), singularityUser); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); Assertions.assertEquals(0, taskManager.getPendingTasks().size()); Assertions.assertEquals( RequestState.ACTIVE, requestManager.getRequest(requestId).get().getState() ); Assertions.assertEquals( requestId, requestManager.getActiveRequests(false).iterator().next().getRequest().getId() ); } @Test public void testBounce() { initRequest(); requestResource.scale( requestId, new SingularityScaleRequest( Optional.of(3), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ), singularityUser ); initFirstDeploy(); SingularityTask taskOne = startTask(firstDeploy, 1); SingularityTask taskTwo = startTask(firstDeploy, 2); SingularityTask taskThree = startTask(firstDeploy, 3); requestResource.bounce(requestId, Optional.empty(), singularityUser); Assertions.assertTrue(requestManager.cleanupRequestExists(requestId)); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.cleanupRequestExists(requestId)); Assertions.assertTrue(taskManager.getCleanupTaskIds().size() == 3); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.cleanupRequestExists(requestId)); Assertions.assertTrue(taskManager.getCleanupTaskIds().size() == 3); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTaskIds().size() == 6); cleaner.drainCleanupQueue(); Assertions.assertTrue(taskManager.getCleanupTaskIds().size() == 3); for (SingularityTask task : taskManager.getActiveTasks()) { if ( !task.getTaskId().equals(taskOne.getTaskId()) && !task.getTaskId().equals(taskTwo.getTaskId()) && !task.getTaskId().equals(taskThree.getTaskId()) ) { statusUpdate(task, TaskState.TASK_RUNNING, Optional.of(1L)); } } cleaner.drainCleanupQueue(); Assertions.assertTrue(taskManager.getCleanupTaskIds().isEmpty()); Assertions.assertTrue(taskManager.getKilledTaskIdRecords().size() == 3); } @Test public void testIncrementalBounceShutsDownOldTasksPerNewHealthyTask() { initRequest(); requestResource.scale( requestId, new SingularityScaleRequest( Optional.of(3), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ), singularityUser ); initFirstDeploy(); startTask(firstDeploy, 1); startTask(firstDeploy, 2); startTask(firstDeploy, 3); requestResource.bounce( requestId, Optional.of( new SingularityBounceRequest( Optional.of(true), Optional.empty(), Optional.of(1L), Optional.empty(), Optional.of("msg"), Optional.empty() ) ), singularityUser ); Assertions.assertTrue(requestManager.cleanupRequestExists(requestId)); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.cleanupRequestExists(requestId)); Assertions.assertEquals(3, taskManager.getCleanupTaskIds().size()); SingularityTask newTask = launchTask( request, firstDeploy, 5, TaskState.TASK_STARTING ); cleaner.drainCleanupQueue(); Assertions.assertEquals(0, taskManager.getKilledTaskIdRecords().size()); Assertions.assertEquals(4, taskManager.getActiveTaskIds().size()); statusUpdate(newTask, TaskState.TASK_RUNNING); cleaner.drainCleanupQueue(); Assertions.assertEquals(1, taskManager.getKilledTaskIdRecords().size()); Assertions.assertEquals(4, taskManager.getActiveTaskIds().size()); } @Test public void testBounceOnPendingInstancesReleasesLock() { initRequest(); initFirstDeploy(); SingularityTask task = startTask(firstDeploy, 1); statusUpdate(task, TaskState.TASK_FAILED); killKilledTasks(); Assertions.assertEquals( 0, taskManager.getActiveTaskIds().size(), "Bounce starts when tasks have not yet been launched" ); requestResource.bounce( requestId, Optional.of( new SingularityBounceRequest( Optional.empty(), Optional.of(true), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ) ), singularityUser ); // It acquires a lock on the bounce Assertions.assertTrue( requestManager.getExpiringBounce(requestId).isPresent(), "Lock on bounce should be acquired during bounce" ); cleaner.drainCleanupQueue(); scheduler.drainPendingQueue(); resourceOffers(); for (SingularityTaskId singularityTaskId : taskManager.getActiveTaskIds()) { taskManager.saveTaskHistoryUpdate( new SingularityTaskHistoryUpdate( singularityTaskId, System.currentTimeMillis(), ExtendedTaskState.TASK_RUNNING, Optional.empty(), Optional.empty(), Collections.emptySet() ) ); } cleaner.drainCleanupQueue(); killKilledTasks(); // It finishes with one task running and the bounce released Assertions.assertEquals( 1, taskManager.getActiveTaskIds().size(), "Should end bounce with target number of tasks" ); for (SingularityTaskId singularityTaskId : taskManager.getActiveTaskIds()) { String statusMessage = taskManager .getTaskHistoryUpdates(singularityTaskId) .get(0) .getStatusMessage() .get(); Assertions.assertTrue( statusMessage.contains("BOUNCE"), "Task was started by bounce" ); } Assertions.assertFalse( requestManager.getExpiringBounce(requestId).isPresent(), "Lock on bounce should be released after bounce" ); } @Test public void testBounceOnRunningInstancesReleasesLock() { initRequest(); initFirstDeploy(); startTask(firstDeploy, 1); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); requestResource.bounce( requestId, Optional.of( new SingularityBounceRequest( Optional.empty(), Optional.of(true), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ) ), singularityUser ); Assertions.assertTrue(requestManager.isBouncing(requestId)); cleaner.drainCleanupQueue(); // It acquires a lock on the bounce Assertions.assertTrue( requestManager.getExpiringBounce(requestId).isPresent(), "Lock on bounce should be acquired during bounce" ); scheduler.drainPendingQueue(); resourceOffers(); for (SingularityTaskId singularityTaskId : taskManager.getActiveTaskIds()) { taskManager.saveTaskHistoryUpdate( new SingularityTaskHistoryUpdate( singularityTaskId, System.currentTimeMillis(), ExtendedTaskState.TASK_RUNNING, Optional.empty(), Optional.empty(), Collections.emptySet() ) ); } Assertions.assertTrue( taskManager.getActiveTaskIds().size() >= 2, "Need to start at least 1 instance to begin killing old instances" ); Assertions.assertTrue(requestManager.isBouncing(requestId)); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertFalse(requestManager.isBouncing(requestId)); // It finishes with one task running and the bounce released Assertions.assertEquals( 1, taskManager.getActiveTaskIds().size(), "Should end bounce with target number of tasks" ); for (SingularityTaskId singularityTaskId : taskManager.getActiveTaskIds()) { String statusMessage = taskManager .getTaskHistoryUpdates(singularityTaskId) .get(0) .getStatusMessage() .get(); Assertions.assertTrue( statusMessage.contains("BOUNCE"), "Task was started by bounce" ); } Assertions.assertFalse( requestManager.getExpiringBounce(requestId).isPresent(), "Lock on bounce should be released after bounce" ); } @Test public void testBounceReleasesLockWithAlternateCleanupType() { initRequest(); initFirstDeploy(); startTask(firstDeploy, 1); List<SingularityTaskId> activeTaskIds = taskManager.getActiveTaskIds(); Assertions.assertEquals(1, activeTaskIds.size()); SingularityTaskId firstTaskId = activeTaskIds.get(0); requestResource.bounce( requestId, Optional.of( new SingularityBounceRequest( Optional.empty(), Optional.of(true), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ) ), singularityUser ); Assertions.assertTrue(requestManager.isBouncing(requestId)); cleaner.drainCleanupQueue(); scheduler.drainPendingQueue(); resourceOffers(); // Save a new cleanup type over the old one, and make sure the bounce lock still releases taskManager.saveTaskCleanup( new SingularityTaskCleanup( Optional.empty(), TaskCleanupType.USER_REQUESTED, System.currentTimeMillis(), firstTaskId, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ) ); for (SingularityTaskId singularityTaskId : taskManager.getActiveTaskIds()) { taskManager.saveTaskHistoryUpdate( new SingularityTaskHistoryUpdate( singularityTaskId, System.currentTimeMillis(), ExtendedTaskState.TASK_RUNNING, Optional.empty(), Optional.empty(), Collections.emptySet() ) ); } Assertions.assertTrue(requestManager.isBouncing(requestId)); cleaner.drainCleanupQueue(); killKilledTasks(); Assertions.assertFalse(requestManager.isBouncing(requestId)); } @Test public void testIncrementalBounce() { initRequest(); resourceOffers(2); // set up slaves so scale validate will pass SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); requestResource.postRequest( request .toBuilder() .setSlavePlacement(Optional.of(SlavePlacement.SEPARATE_BY_REQUEST)) .setInstances(Optional.of(2)) .build(), singularityUser ); initHCDeploy(); SingularityTask taskOne = startSeparatePlacementTask(firstDeploy, 1); SingularityTask taskTwo = startSeparatePlacementTask(firstDeploy, 2); requestManager.createCleanupRequest( new SingularityRequestCleanup( user, RequestCleanupType.INCREMENTAL_BOUNCE, System.currentTimeMillis(), Optional.<Boolean>empty(), Optional.empty(), requestId, Optional.of(firstDeployId), Optional.<Boolean>empty(), Optional.<String>empty(), Optional.<String>empty(), Optional.<SingularityShellCommand>empty() ) ); Assertions.assertTrue(requestManager.cleanupRequestExists(requestId)); cleaner.drainCleanupQueue(); Assertions.assertTrue(!requestManager.cleanupRequestExists(requestId)); Assertions.assertEquals(2, taskManager.getCleanupTaskIds().size()); scheduler.drainPendingQueue(); resourceOffers(3); SingularityTask taskThree = null; for (SingularityTask task : taskManager.getActiveTasks()) { if ( !task.getTaskId().equals(taskOne.getTaskId()) && !task.getTaskId().equals(taskTwo.getTaskId()) ) { taskThree = task; } } statusUpdate(taskThree, TaskState.TASK_RUNNING, Optional.of(1L)); Assertions.assertEquals(3, taskManager.getActiveTaskIds().size()); cleaner.drainCleanupQueue(); // No old tasks should be killed before new ones pass healthchecks Assertions.assertEquals(2, taskManager.getCleanupTaskIds().size()); taskManager.saveHealthcheckResult( new SingularityTaskHealthcheckResult( Optional.of(200), Optional.of(1000L), System.currentTimeMillis(), Optional.<String>empty(), Optional.<String>empty(), taskThree.getTaskId(), Optional.<Boolean>empty() ) ); cleaner.drainCleanupQueue(); Assertions.assertEquals(1, taskManager.getCleanupTaskIds().size()); statusUpdate(taskOne, TaskState.TASK_KILLED); resourceOffers(3); SingularityTask taskFour = null; for (SingularityTask task : taskManager.getActiveTasks()) { if ( !task.getTaskId().equals(taskOne.getTaskId()) && !task.getTaskId().equals(taskTwo.getTaskId()) && !task.getTaskId().equals(taskThree.getTaskId()) ) { taskFour = task; } } statusUpdate(taskFour, TaskState.TASK_RUNNING, Optional.of(1L)); taskManager.saveHealthcheckResult( new SingularityTaskHealthcheckResult( Optional.of(200), Optional.of(1000L), System.currentTimeMillis(), Optional.<String>empty(), Optional.<String>empty(), taskFour.getTaskId(), Optional.<Boolean>empty() ) ); cleaner.drainCleanupQueue(); Assertions.assertTrue(taskManager.getCleanupTaskIds().isEmpty()); } @Test public void testScheduledNotification() { schedule = "0 0 * * * ?"; // run every hour initScheduledRequest(); initFirstDeploy(); configuration.setWarnIfScheduledJobIsRunningForAtLeastMillis(Long.MAX_VALUE); configuration.setWarnIfScheduledJobIsRunningPastNextRunPct(200); final long now = System.currentTimeMillis(); SingularityTask firstTask = launchTask( request, firstDeploy, now - TimeUnit.HOURS.toMillis(3), 1, TaskState.TASK_RUNNING ); scheduledJobPoller.runActionOnPoll(); Mockito .verify(mailer, Mockito.times(0)) .sendTaskOverdueMail( ArgumentMatchers.<Optional<SingularityTask>>any(), ArgumentMatchers.<SingularityTaskId>any(), ArgumentMatchers.<SingularityRequest>any(), ArgumentMatchers.anyLong(), ArgumentMatchers.anyLong() ); configuration.setWarnIfScheduledJobIsRunningForAtLeastMillis( TimeUnit.HOURS.toMillis(1) ); scheduledJobPoller.runActionOnPoll(); Mockito .verify(mailer, Mockito.times(1)) .sendTaskOverdueMail( ArgumentMatchers.<Optional<SingularityTask>>any(), ArgumentMatchers.<SingularityTaskId>any(), ArgumentMatchers.<SingularityRequest>any(), ArgumentMatchers.anyLong(), ArgumentMatchers.anyLong() ); scheduledJobPoller.runActionOnPoll(); Mockito .verify(mailer, Mockito.times(1)) .sendTaskOverdueMail( ArgumentMatchers.<Optional<SingularityTask>>any(), ArgumentMatchers.<SingularityTaskId>any(), ArgumentMatchers.<SingularityRequest>any(), ArgumentMatchers.anyLong(), ArgumentMatchers.anyLong() ); statusUpdate(firstTask, TaskState.TASK_FINISHED); Optional<SingularityDeployStatistics> deployStatistics = deployManager.getDeployStatistics( requestId, firstDeployId ); long oldAvg = deployStatistics.get().getAverageRuntimeMillis().get(); Assertions.assertTrue(deployStatistics.get().getNumTasks() == 1); Assertions.assertTrue( deployStatistics.get().getAverageRuntimeMillis().get() > 1 && deployStatistics.get().getAverageRuntimeMillis().get() < TimeUnit.DAYS.toMillis(1) ); configuration.setWarnIfScheduledJobIsRunningForAtLeastMillis(1); SingularityTask secondTask = launchTask( request, firstDeploy, now - 500, 1, TaskState.TASK_RUNNING ); scheduledJobPoller.runActionOnPoll(); Mockito .verify(mailer, Mockito.times(1)) .sendTaskOverdueMail( ArgumentMatchers.<Optional<SingularityTask>>any(), ArgumentMatchers.<SingularityTaskId>any(), ArgumentMatchers.<SingularityRequest>any(), ArgumentMatchers.anyLong(), ArgumentMatchers.anyLong() ); statusUpdate(secondTask, TaskState.TASK_FINISHED); deployStatistics = deployManager.getDeployStatistics(requestId, firstDeployId); Assertions.assertTrue(deployStatistics.get().getNumTasks() == 2); Assertions.assertTrue( deployStatistics.get().getAverageRuntimeMillis().get() > 1 && deployStatistics.get().getAverageRuntimeMillis().get() < oldAvg ); saveRequest( request.toBuilder().setScheduledExpectedRuntimeMillis(Optional.of(1L)).build() ); SingularityTask thirdTask = launchTask( request, firstDeploy, now - 502, 1, TaskState.TASK_RUNNING ); scheduledJobPoller.runActionOnPoll(); Mockito .verify(mailer, Mockito.times(2)) .sendTaskOverdueMail( ArgumentMatchers.<Optional<SingularityTask>>any(), ArgumentMatchers.<SingularityTaskId>any(), ArgumentMatchers.<SingularityRequest>any(), ArgumentMatchers.anyLong(), ArgumentMatchers.anyLong() ); taskManager.deleteTaskHistory(thirdTask.getTaskId()); scheduledJobPoller.runActionOnPoll(); Mockito .verify(mailer, Mockito.times(3)) .sendTaskOverdueMail( ArgumentMatchers.<Optional<SingularityTask>>any(), ArgumentMatchers.<SingularityTaskId>any(), ArgumentMatchers.<SingularityRequest>any(), ArgumentMatchers.anyLong(), ArgumentMatchers.anyLong() ); } @Test public void testTaskOddities() { // test unparseable status update TaskStatus.Builder bldr = TaskStatus .newBuilder() .setTaskId(TaskID.newBuilder().setValue("task")) .setAgentId(AgentID.newBuilder().setValue("slave1")) .setState(TaskState.TASK_RUNNING); // should not throw exception: sms.statusUpdate(bldr.build()).join(); initRequest(); initFirstDeploy(); SingularityTask taskOne = launchTask( request, firstDeploy, 1, TaskState.TASK_STARTING ); taskManager.deleteTaskHistory(taskOne.getTaskId()); Assertions.assertTrue(taskManager.isActiveTask(taskOne.getTaskId())); statusUpdate(taskOne, TaskState.TASK_RUNNING); statusUpdate(taskOne, TaskState.TASK_FAILED); Assertions.assertTrue(!taskManager.isActiveTask(taskOne.getTaskId())); Assertions.assertEquals( 2, taskManager.getTaskHistoryUpdates(taskOne.getTaskId()).size() ); } @Test public void testOnDemandTasksPersist() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); requestResource.postRequest(bldr.build(), singularityUser); deploy("d2"); deployChecker.checkDeploys(); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); scheduler.drainPendingQueue(); resourceOffers(); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(2, taskManager.getActiveTaskIds().size()); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); scheduler.drainPendingQueue(); requestResource.scheduleImmediately( singularityUser, requestId, ((SingularityRunNowRequest) null) ); scheduler.drainPendingQueue(); Assertions.assertEquals(2, taskManager.getPendingTaskIds().size()); resourceOffers(); Assertions.assertEquals(4, taskManager.getActiveTaskIds().size()); } @Test public void testRunNowScheduledJobDoesNotRetry() { initScheduledRequest(); SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); SingularityRequest newRequest = request .toBuilder() .setNumRetriesOnFailure(Optional.of(2)) .build(); requestResource.postRequest(newRequest, singularityUser); initFirstDeploy(); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder().build() ); scheduler.drainPendingQueue(); resourceOffers(); SingularityTask task = taskManager.getActiveTasks().get(0); statusUpdate(task, TaskState.TASK_FAILED); scheduler.drainPendingQueue(); SingularityDeployStatistics deployStatistics = deployManager .getDeployStatistics( task.getTaskId().getRequestId(), task.getTaskId().getDeployId() ) .get(); Assertions.assertEquals( MesosTaskState.TASK_FAILED, deployStatistics.getLastTaskState().get().toTaskState().get() ); Assertions.assertEquals( PendingType.TASK_DONE, taskManager.getPendingTaskIds().get(0).getPendingType() ); Assertions.assertEquals(1, deployStatistics.getNumFailures()); Assertions.assertEquals(0, deployStatistics.getNumSequentialRetries()); Assertions.assertEquals( Optional.<Long>empty(), deployStatistics.getAverageRuntimeMillis() ); } @Test public void testRunNowOnDemandJobMayRetryOnFailure() { initRequestWithType(RequestType.ON_DEMAND, false); SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); SingularityRequest newRequest = request .toBuilder() .setNumRetriesOnFailure(Optional.of(2)) .build(); requestResource.postRequest(newRequest, singularityUser); initFirstDeploy(); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder().setMessage("foo bar").build() ); scheduler.drainPendingQueue(); resourceOffers(); SingularityTask task = taskManager.getActiveTasks().get(0); statusUpdate(task, TaskState.TASK_FAILED); scheduler.drainPendingQueue(); SingularityDeployStatistics deployStatistics = deployManager .getDeployStatistics( task.getTaskId().getRequestId(), task.getTaskId().getDeployId() ) .get(); Assertions.assertEquals( MesosTaskState.TASK_FAILED, deployStatistics.getLastTaskState().get().toTaskState().get() ); Assertions.assertEquals( PendingType.RETRY, taskManager.getPendingTaskIds().get(0).getPendingType() ); Assertions.assertEquals( "foo bar", taskManager.getPendingTasks().get(0).getMessage().get() ); Assertions.assertEquals(1, deployStatistics.getNumFailures()); Assertions.assertEquals(1, deployStatistics.getNumSequentialRetries()); } @Test public void testRunNowOnDemandJobsDoNotRetryAfterUserRequestedKill() { initRequestWithType(RequestType.ON_DEMAND, false); SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); SingularityRequest newRequest = request .toBuilder() .setNumRetriesOnFailure(Optional.of(2)) .build(); requestResource.postRequest(newRequest, singularityUser); initFirstDeploy(); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder().setMessage("foo bar").build() ); scheduler.drainPendingQueue(); resourceOffers(); SingularityTask task = taskManager.getActiveTasks().get(0); taskManager.saveTaskCleanup( new SingularityTaskCleanup( Optional.of(singularityUser.getId()), TaskCleanupType.USER_REQUESTED, System.currentTimeMillis(), task.getTaskId(), Optional.empty(), Optional.empty(), Optional.empty() ) ); cleaner.drainCleanupQueue(); statusUpdate(task, TaskState.TASK_KILLED); scheduler.drainPendingQueue(); SingularityDeployStatistics deployStatistics = deployManager .getDeployStatistics( task.getTaskId().getRequestId(), task.getTaskId().getDeployId() ) .get(); Assertions.assertEquals(0, taskManager.getPendingTaskIds().size()); Assertions.assertEquals( MesosTaskState.TASK_KILLED, deployStatistics.getLastTaskState().get().toTaskState().get() ); Assertions.assertEquals(0, deployStatistics.getNumFailures()); Assertions.assertEquals(0, deployStatistics.getNumSequentialRetries()); } @Test public void testOnDemandRunNowJobRespectsSpecifiedRunAtTime() { initOnDemandRequest(); initFirstDeploy(); long requestedLaunchTime = System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(10); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder().setRunAt(requestedLaunchTime).build() ); scheduler.drainPendingQueue(); SingularityPendingTaskId task = taskManager.getPendingTaskIds().get(0); long runAt = task.getNextRunAt(); Assertions.assertEquals(requestedLaunchTime, runAt); } @Test public void testScheduledRunNowJobRespectsSpecifiedRunAtTime() { initScheduledRequest(); initFirstDeploy(); long requestedLaunchTime = System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(10); requestResource.scheduleImmediately( singularityUser, requestId, new SingularityRunNowRequestBuilder().setRunAt(requestedLaunchTime).build() ); scheduler.drainPendingQueue(); SingularityPendingTaskId task = taskManager.getPendingTaskIds().get(0); long runAt = task.getNextRunAt(); Assertions.assertEquals(requestedLaunchTime, runAt); } @Test public void testJobRescheduledWhenItFinishesDuringDecommission() { initScheduledRequest(); initFirstDeploy(); resourceOffers(); SingularityTask task = launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); slaveManager.changeState( "slave1", MachineState.STARTING_DECOMMISSION, Optional.<String>empty(), Optional.of("user1") ); cleaner.drainCleanupQueue(); resourceOffers(); cleaner.drainCleanupQueue(); statusUpdate(task, TaskState.TASK_FINISHED); scheduler.drainPendingQueue(); Assertions.assertTrue(!taskManager.getPendingTaskIds().isEmpty()); } @Test public void testScaleDownTakesHighestInstances() { initRequest(); initFirstDeploy(); saveAndSchedule(request.toBuilder().setInstances(Optional.of(5))); resourceOffers(); Assertions.assertEquals(5, taskManager.getActiveTaskIds().size()); requestResource.scale( requestId, new SingularityScaleRequest( Optional.of(2), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ), singularityUser ); resourceOffers(); cleaner.drainCleanupQueue(); Assertions.assertEquals(3, taskManager.getKilledTaskIdRecords().size()); for (SingularityKilledTaskIdRecord taskId : taskManager.getKilledTaskIdRecords()) { Assertions.assertTrue(taskId.getTaskId().getInstanceNo() > 2); scheduler.drainPendingQueue(); } } @Test public void testScaleDownTakesHighestInstancesWithPendingTask() { initRequest(); initFirstDeploy(); saveAndSchedule(request.toBuilder().setInstances(Optional.of(5))); resourceOffers(); Assertions.assertEquals(5, taskManager.getActiveTaskIds().size()); SingularityTaskId instance2 = null; for (SingularityTaskId taskId : taskManager.getActiveTaskIds()) { if (taskId.getInstanceNo() == 2) { instance2 = taskId; } } statusUpdate(taskManager.getTask(instance2).get(), TaskState.TASK_KILLED); killKilledTasks(); scheduler.drainPendingQueue(); requestResource.scale( requestId, new SingularityScaleRequest( Optional.of(3), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ), singularityUser ); scheduler.drainPendingQueue(); cleaner.drainCleanupQueue(); // instances 4 and 5 should get killed Assertions.assertEquals(2, taskManager.getKilledTaskIdRecords().size()); killKilledTasks(); resourceOffers(); // instances 1,2,3 should be active Assertions.assertEquals(3, taskManager.getActiveTaskIds().size()); for (SingularityTaskId taskId : taskManager.getActiveTaskIds()) { Assertions.assertTrue(taskId.getInstanceNo() < 4); } } @Test public void testRequestsInPendingQueueAreOrderedByTimestamp() { long now = System.currentTimeMillis(); initRequestWithType(RequestType.SCHEDULED, false); startFirstDeploy(); SingularityPendingRequest pendingDeployRequest = new SingularityPendingRequest( requestId, firstDeploy.getId(), now, Optional.empty(), PendingType.NEW_DEPLOY, firstDeploy.getSkipHealthchecksOnDeploy(), Optional.empty() ); SingularityPendingRequest pendingRunNowRequest = new SingularityPendingRequest( requestId, firstDeploy.getId(), now + 200, Optional.empty(), PendingType.IMMEDIATE, firstDeploy.getSkipHealthchecksOnDeploy(), Optional.empty() ); requestManager.addToPendingQueue(pendingDeployRequest); requestManager.addToPendingQueue(pendingRunNowRequest); Assertions.assertEquals(2, requestManager.getPendingRequests().size()); // Was added first Assertions.assertEquals( PendingType.NEW_DEPLOY, requestManager.getPendingRequests().get(0).getPendingType() ); // Was added second Assertions.assertEquals( PendingType.IMMEDIATE, requestManager.getPendingRequests().get(1).getPendingType() ); resourceOffers(); } @Test public void testImmediateRequestsAreConsistentlyDeleted() { long now = System.currentTimeMillis(); initRequestWithType(RequestType.SCHEDULED, false); startFirstDeploy(); SingularityPendingRequest pendingDeployRequest = new SingularityPendingRequest( requestId, firstDeploy.getId(), now, Optional.empty(), PendingType.NEW_DEPLOY, firstDeploy.getSkipHealthchecksOnDeploy(), Optional.empty() ); SingularityPendingRequest pendingRunNowRequest = new SingularityPendingRequest( requestId, firstDeploy.getId(), now + 200, Optional.empty(), PendingType.IMMEDIATE, firstDeploy.getSkipHealthchecksOnDeploy(), Optional.empty() ); requestManager.addToPendingQueue(pendingDeployRequest); requestManager.addToPendingQueue(pendingRunNowRequest); // Pending queue has two requests: NEW_DEPLOY & IMMEDIATE Assertions.assertEquals(2, requestManager.getPendingRequests().size()); requestManager.deletePendingRequest(pendingDeployRequest); // Just the immediate run Assertions.assertEquals(1, requestManager.getPendingRequests().size()); requestManager.deletePendingRequest(pendingRunNowRequest); // Immediate run was successfully deleted Assertions.assertEquals(0, requestManager.getPendingRequests().size()); } @Test public void testWaitAfterTaskWorks() { initRequest(); initFirstDeploy(); SingularityTask task = launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); statusUpdate(task, TaskState.TASK_FAILED); scheduler.drainPendingQueue(); Assertions.assertTrue( taskManager.getPendingTaskIds().get(0).getNextRunAt() - System.currentTimeMillis() < 1000L ); resourceOffers(); long extraWait = 100000L; saveAndSchedule( request .toBuilder() .setWaitAtLeastMillisAfterTaskFinishesForReschedule(Optional.of(extraWait)) .setInstances(Optional.of(2)) ); resourceOffers(); statusUpdate(taskManager.getActiveTasks().get(0), TaskState.TASK_FAILED); scheduler.drainPendingQueue(); Assertions.assertTrue( taskManager.getPendingTaskIds().get(0).getNextRunAt() - System.currentTimeMillis() > 1000L ); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); } @Test public void testRemovedRequestData() { long now = System.currentTimeMillis(); initRequest(); SingularityDeployBuilder db = new SingularityDeployBuilder(requestId, firstDeployId); db.setMaxTaskRetries(Optional.of(1)); initDeploy(db, now); deployChecker.checkDeploys(); Assertions.assertEquals( DeployState.WAITING, deployManager.getPendingDeploys().get(0).getCurrentDeployState() ); requestManager.startDeletingRequest( request, Optional.empty(), Optional.<String>empty(), Optional.<String>empty(), Optional.<String>empty() ); requestManager.markDeleted( request, now, Optional.<String>empty(), Optional.<String>empty() ); deployChecker.checkDeploys(); SingularityDeployResult deployResult = deployManager .getDeployResult(requestId, firstDeployId) .get(); Assertions.assertEquals(DeployState.FAILED, deployResult.getDeployState()); Assertions.assertTrue(deployResult.getMessage().get().contains("MISSING")); } @Test public void itCorrectlyUpdatesRequestDeletingStateHistory() { initRequest(); Assertions.assertEquals( RequestState.ACTIVE, requestManager.getRequest(requestId).get().getState() ); Assertions.assertEquals(1, requestManager.getRequestHistory(requestId).size()); requestManager.startDeletingRequest( request, Optional.empty(), Optional.<String>empty(), Optional.<String>empty(), Optional.of("the cake is a lie") ); Assertions.assertEquals( RequestState.DELETING, requestManager.getRequest(requestId).get().getState() ); Assertions.assertEquals(2, requestManager.getRequestHistory(requestId).size()); cleaner.drainCleanupQueue(); Assertions.assertEquals(3, requestManager.getRequestHistory(requestId).size()); List<RequestHistoryType> historyTypes = new ArrayList<>(); for (SingularityRequestHistory request : requestManager.getRequestHistory( requestId )) { historyTypes.add(request.getEventType()); } Assertions.assertTrue(historyTypes.contains(RequestHistoryType.CREATED)); Assertions.assertTrue(historyTypes.contains(RequestHistoryType.DELETING)); Assertions.assertTrue(historyTypes.contains(RequestHistoryType.DELETED)); } @Test public void itSetsRequestStateToDeletedAfterAllTasksAreCleanedUp() { initRequest(); SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(2)).build(), singularityUser ); initFirstDeploy(); launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); launchTask(request, firstDeploy, 2, TaskState.TASK_RUNNING); Assertions.assertEquals( requestId, requestManager.getActiveRequests().iterator().next().getRequest().getId() ); Assertions.assertEquals(2, taskManager.getActiveTaskIds().size()); requestManager.startDeletingRequest( request, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ); Assertions.assertEquals( requestId, requestManager.getCleanupRequests().get(0).getRequestId() ); Assertions.assertEquals( RequestState.DELETING, requestManager.getRequest(requestId).get().getState() ); cleaner.drainCleanupQueue(); Assertions.assertEquals(0, taskManager.getCleanupTaskIds().size()); killKilledTasks(); cleaner.drainCleanupQueue(); Assertions.assertFalse(requestManager.getRequest(requestId).isPresent()); } @Test public void itSetsRequestStateToDeletedIfTaskCleanupFails() { initRequest(); SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(2)).build(), singularityUser ); initFirstDeploy(); SingularityTask firstTask = launchTask( request, firstDeploy, 1, TaskState.TASK_RUNNING ); launchTask(request, firstDeploy, 2, TaskState.TASK_RUNNING); Assertions.assertEquals( requestId, requestManager.getActiveRequests().iterator().next().getRequest().getId() ); Assertions.assertEquals(2, taskManager.getActiveTaskIds().size()); requestManager.startDeletingRequest( request, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ); Assertions.assertEquals( requestId, requestManager.getCleanupRequests().get(0).getRequestId() ); Assertions.assertEquals( RequestState.DELETING, requestManager.getRequest(requestId).get().getState() ); statusUpdate(firstTask, TaskState.TASK_FAILED); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); cleaner.drainCleanupQueue(); Assertions.assertEquals(0, taskManager.getCleanupTaskIds().size()); killKilledTasks(); cleaner.drainCleanupQueue(); Assertions.assertFalse(requestManager.getRequest(requestId).isPresent()); } @Test public void testMaxTasksPerOffer() { configuration.setMaxTasksPerOffer(3); initRequest(); initFirstDeploy(); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(20)).build(), singularityUser ); scheduler.drainPendingQueue(); sms.resourceOffers(Arrays.asList(createOffer(36, 12024, 50000))).join(); Assertions.assertTrue(taskManager.getActiveTasks().size() == 3); sms .resourceOffers( Arrays.asList( createOffer(20, 20000, 50000, "slave1", "host1"), createOffer(20, 20000, 50000, "slave2", "host2") ) ) .join(); Assertions.assertTrue(taskManager.getActiveTasks().size() == 9); configuration.setMaxTasksPerOffer(0); resourceOffers(); Assertions.assertTrue(taskManager.getActiveTasks().size() == 20); } @Test public void testRequestedPorts() { final SingularityDeployBuilder deployBuilder = dockerDeployWithPorts(); initRequest(); initAndFinishDeploy(request, deployBuilder, Optional.of(new Resources(1, 64, 3, 0))); requestResource.postRequest( request.toBuilder().setInstances(Optional.of(2)).build(), singularityUser ); scheduler.drainPendingQueue(); String[] portRangeWithNoRequestedPorts = { "65:70" }; sms .resourceOffers( Arrays.asList( createOffer( 20, 20000, 50000, "slave1", "host1", Optional.<String>empty(), Collections.<String, String>emptyMap(), portRangeWithNoRequestedPorts ) ) ) .join(); Assertions.assertEquals(0, taskManager.getActiveTasks().size()); String[] portRangeWithSomeRequestedPorts = { "80:82" }; sms .resourceOffers( Arrays.asList( createOffer( 20, 20000, 50000, "slave1", "host1", Optional.<String>empty(), Collections.<String, String>emptyMap(), portRangeWithSomeRequestedPorts ) ) ) .join(); Assertions.assertEquals(0, taskManager.getActiveTasks().size()); String[] portRangeWithRequestedButNotEnoughPorts = { "80:80", "8080:8080" }; sms .resourceOffers( Arrays.asList( createOffer( 20, 20000, 50000, "slave1", "host1", Optional.<String>empty(), Collections.<String, String>emptyMap(), portRangeWithRequestedButNotEnoughPorts ) ) ) .join(); Assertions.assertEquals(0, taskManager.getActiveTasks().size()); String[] portRangeWithNeededPorts = { "80:83", "8080:8080" }; sms .resourceOffers( Arrays.asList( createOffer( 20, 20000, 50000, "slave1", "host1", Optional.<String>empty(), Collections.<String, String>emptyMap(), portRangeWithNeededPorts ) ) ) .join(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); } private SingularityDeployBuilder dockerDeployWithPorts() { final SingularityDockerPortMapping literalMapping = new SingularityDockerPortMapping( Optional.<SingularityPortMappingType>empty(), 80, Optional.of(SingularityPortMappingType.LITERAL), 8080, Optional.<String>empty() ); final SingularityDockerPortMapping offerMapping = new SingularityDockerPortMapping( Optional.<SingularityPortMappingType>empty(), 81, Optional.of(SingularityPortMappingType.FROM_OFFER), 0, Optional.of("udp") ); final SingularityContainerInfo containerInfo = new SingularityContainerInfo( SingularityContainerType.DOCKER, Optional.<List<SingularityVolume>>empty(), Optional.of( new SingularityDockerInfo( "docker-image", true, SingularityDockerNetworkType.BRIDGE, Optional.of(Arrays.asList(literalMapping, offerMapping)), Optional.of(false), Optional.of(ImmutableMap.of("env", "var=value")), Optional.empty() ) ) ); final SingularityDeployBuilder deployBuilder = new SingularityDeployBuilder( requestId, "test-docker-ports-deploy" ); deployBuilder.setContainerInfo(Optional.of(containerInfo)); return deployBuilder; } @Test public void testQueueMultipleOneOffs() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); requestResource.postRequest(bldr.build(), singularityUser); deploy("on_demand_deploy"); deployChecker.checkDeploys(); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, "on_demand_deploy", System.currentTimeMillis(), Optional.<String>empty(), PendingType.ONEOFF, Optional.<List<String>>empty(), Optional.<String>empty(), Optional.<Boolean>empty(), Optional.<String>empty(), Optional.<String>empty() ) ); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, "on_demand_deploy", System.currentTimeMillis(), Optional.<String>empty(), PendingType.ONEOFF, Optional.<List<String>>empty(), Optional.<String>empty(), Optional.<Boolean>empty(), Optional.<String>empty(), Optional.<String>empty() ) ); scheduler.drainPendingQueue(); Assertions.assertEquals(2, taskManager.getPendingTaskIds().size()); } @Test public void testPriorityFreezeKillsActiveTasks() { final SingularityRequest lowPriorityRequest = new SingularityRequestBuilder( "lowPriorityRequest", RequestType.WORKER ) .setTaskPriorityLevel(Optional.of(.25)) .build(); saveRequest(lowPriorityRequest); final SingularityRequest mediumPriorityRequest = new SingularityRequestBuilder( "mediumPriorityRequest", RequestType.WORKER ) .setTaskPriorityLevel(Optional.of(.5)) .build(); saveRequest(mediumPriorityRequest); final SingularityRequest highPriorityRequest = new SingularityRequestBuilder( "highPriorityRequest", RequestType.WORKER ) .setTaskPriorityLevel(Optional.of(.75)) .build(); saveRequest(highPriorityRequest); final SingularityDeploy lowPriorityDeploy = initAndFinishDeploy( lowPriorityRequest, "lowPriorityDeploy" ); final SingularityDeploy mediumPriorityDeploy = initAndFinishDeploy( mediumPriorityRequest, "mediumPriorityDeploy" ); SingularityDeploy highPriorityDeploy = initAndFinishDeploy( highPriorityRequest, "highPriorityDeploy" ); final SingularityTask lowPriorityTask = launchTask( lowPriorityRequest, lowPriorityDeploy, 2, 1, TaskState.TASK_RUNNING ); final SingularityTask mediumPriorityTask = launchTask( mediumPriorityRequest, mediumPriorityDeploy, 1, 1, TaskState.TASK_RUNNING ); final SingularityTask highPriorityTask = launchTask( highPriorityRequest, highPriorityDeploy, 10, 1, TaskState.TASK_RUNNING ); // priority freeze of .5 means that lowPriorityRequest's task should have a cleanup priorityResource.createPriorityFreeze( singularityUser, new SingularityPriorityFreeze(.5, true, Optional.of("test"), Optional.empty()) ); // perform the killing priorityKillPoller.runActionOnPoll(); // assert lowPriorityRequest has a PRIORITY_KILL task cleanup and that mediumPriorityRequest and highPriorityRequest should not have cleanups Assertions.assertEquals( TaskCleanupType.PRIORITY_KILL, taskManager .getTaskCleanup(lowPriorityTask.getTaskId().getId()) .get() .getCleanupType() ); Assertions.assertEquals( false, taskManager.getTaskCleanup(mediumPriorityTask.getTaskId().getId()).isPresent() ); Assertions.assertEquals( false, taskManager.getTaskCleanup(highPriorityTask.getTaskId().getId()).isPresent() ); // kill task(s) with cleanups cleaner.drainCleanupQueue(); killKilledTasks(); // assert lowPriorityTask was killed, mediumPriorityTask and highPriorityTask are still running Assertions.assertEquals( ExtendedTaskState.TASK_KILLED, taskManager .getTaskHistory(lowPriorityTask.getTaskId()) .get() .getLastTaskUpdate() .get() .getTaskState() ); Assertions.assertEquals( ExtendedTaskState.TASK_RUNNING, taskManager .getTaskHistory(mediumPriorityTask.getTaskId()) .get() .getLastTaskUpdate() .get() .getTaskState() ); Assertions.assertEquals( ExtendedTaskState.TASK_RUNNING, taskManager .getTaskHistory(highPriorityTask.getTaskId()) .get() .getLastTaskUpdate() .get() .getTaskState() ); // assert lowPriorityRequest has a pending task final SingularityPendingTaskId pendingTaskId = taskManager.getPendingTaskIds().get(0); Assertions.assertEquals(PendingType.TASK_DONE, pendingTaskId.getPendingType()); Assertions.assertEquals(lowPriorityRequest.getId(), pendingTaskId.getRequestId()); // end the priority freeze priorityResource.deleteActivePriorityFreeze(singularityUser); // launch task(s) scheduler.drainPendingQueue(); resourceOffers(); // assert lowPriorityRequest has a new task running Assertions.assertNotEquals( lowPriorityTask.getTaskId(), taskManager.getActiveTaskIdsForRequest(lowPriorityRequest.getId()).get(0).getId() ); } @Test public void testPriorityFreezeDoesntLaunchTasks() { // deploy lowPriorityRequest (affected by priority freeze) final SingularityRequest lowPriorityRequest = new SingularityRequestBuilder( "lowPriorityRequest", RequestType.ON_DEMAND ) .setTaskPriorityLevel(Optional.of(.25)) .build(); saveRequest(lowPriorityRequest); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(lowPriorityRequest.getId(), "d1") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); // deploy medium priority request (NOT affected by priority freeze) final SingularityRequest mediumPriorityRequest = new SingularityRequestBuilder( "mediumPriorityRequest", RequestType.ON_DEMAND ) .setTaskPriorityLevel(Optional.of(.5)) .build(); saveRequest(mediumPriorityRequest); deployResource.deploy( new SingularityDeployRequest( new SingularityDeployBuilder(mediumPriorityRequest.getId(), "d2") .setCommand(Optional.of("cmd")) .build(), Optional.empty(), Optional.empty() ), singularityUser ); // create priority freeze priorityManager.createPriorityFreeze( new SingularityPriorityFreezeParent( new SingularityPriorityFreeze( 0.3, true, Optional.<String>empty(), Optional.<String>empty() ), System.currentTimeMillis(), Optional.<String>empty() ) ); // launch both tasks requestResource.scheduleImmediately( singularityUser, lowPriorityRequest.getId(), ((SingularityRunNowRequest) null) ); requestResource.scheduleImmediately( singularityUser, mediumPriorityRequest.getId(), ((SingularityRunNowRequest) null) ); // drain pending queue scheduler.drainPendingQueue(); resourceOffers(); // assert that lowPriorityRequest has a pending task Assertions.assertEquals(1, taskManager.getPendingTaskIds().size()); Assertions.assertEquals( lowPriorityRequest.getId(), taskManager.getPendingTaskIds().get(0).getRequestId() ); // assert that only mediumPriorityRequest has an active task Assertions.assertEquals( 0, taskManager.getActiveTaskIdsForRequest(lowPriorityRequest.getId()).size() ); Assertions.assertEquals( 1, taskManager.getActiveTaskIdsForRequest(mediumPriorityRequest.getId()).size() ); // delete priority freeze Assertions.assertEquals( SingularityDeleteResult.DELETED, priorityManager.deleteActivePriorityFreeze() ); // drain pending scheduler.drainPendingQueue(); resourceOffers(); // check that both requests have active tasks Assertions.assertEquals( 1, taskManager.getActiveTaskIdsForRequest(lowPriorityRequest.getId()).size() ); Assertions.assertEquals( 1, taskManager.getActiveTaskIdsForRequest(mediumPriorityRequest.getId()).size() ); } @Test public void testObsoletePendingRequestsRemoved() { initRequest(); initFirstDeploy(); SingularityTask taskOne = startTask(firstDeploy); requestResource.pause(requestId, Optional.empty(), singularityUser); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, firstDeployId, System.currentTimeMillis(), Optional.<String>empty(), PendingType.NEW_DEPLOY, Optional.<Boolean>empty(), Optional.<String>empty() ) ); Assertions.assertEquals(requestManager.getPendingRequests().size(), 1); scheduler.drainPendingQueue(); Assertions.assertEquals(requestManager.getPendingRequests().size(), 0); } @Test public void testCronScheduleChanges() throws Exception { final String requestId = "test-change-cron"; final String oldSchedule = "*/5 * * * *"; final String oldScheduleQuartz = "0 */5 * * * ?"; final String newSchedule = "*/30 * * * *"; final String newScheduleQuartz = "0 */30 * * * ?"; SingularityRequest request = new SingularityRequestBuilder( requestId, RequestType.SCHEDULED ) .setSchedule(Optional.of(oldSchedule)) .build(); request = validator.checkSingularityRequest( request, Optional.<SingularityRequest>empty(), Optional.<SingularityDeploy>empty(), Optional.<SingularityDeploy>empty() ); saveRequest(request); Assertions.assertEquals( oldScheduleQuartz, requestManager.getRequest(requestId).get().getRequest().getQuartzScheduleSafe() ); initAndFinishDeploy(request, "1"); scheduler.drainPendingQueue(); final SingularityRequest newRequest = request .toBuilder() .setSchedule(Optional.of(newSchedule)) .setQuartzSchedule(Optional.<String>empty()) .build(); final SingularityDeploy newDeploy = new SingularityDeployBuilder(request.getId(), "2") .setCommand(Optional.of("sleep 100")) .build(); deployResource.deploy( new SingularityDeployRequest( newDeploy, Optional.empty(), Optional.empty(), Optional.of(newRequest) ), singularityUser ); deployChecker.checkDeploys(); scheduler.drainPendingQueue(); Assertions.assertEquals( newScheduleQuartz, requestManager.getRequest(requestId).get().getRequest().getQuartzScheduleSafe() ); } @Test public void testImmediateRunReplacesScheduledTask() { initScheduledRequest(); SingularityDeploy deploy = SingularityDeploy .newBuilder(requestId, firstDeployId) .setCommand(Optional.of("sleep 100")) .build(); SingularityDeployRequest singularityDeployRequest = new SingularityDeployRequest( deploy, Optional.empty(), Optional.empty(), Optional.empty() ); deployResource.deploy(singularityDeployRequest, singularityUser); scheduler.drainPendingQueue(); SingularityPendingTask task1 = createAndSchedulePendingTask(firstDeployId); Assertions.assertEquals(1, taskManager.getPendingTaskIds().size()); Assertions.assertEquals( PendingType.NEW_DEPLOY, taskManager.getPendingTaskIds().get(0).getPendingType() ); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, deploy.getId(), System.currentTimeMillis(), Optional.empty(), PendingType.IMMEDIATE, deploy.getSkipHealthchecksOnDeploy(), Optional.empty() ) ); scheduler.drainPendingQueue(); Assertions.assertEquals(1, taskManager.getPendingTaskIds().size()); Assertions.assertEquals( PendingType.IMMEDIATE, taskManager.getPendingTaskIds().get(0).getPendingType() ); } @Test public void testSchedulerDropsMultipleScheduledTaskInstances() { initScheduledRequest(); SingularityDeploy deploy = SingularityDeploy .newBuilder(requestId, firstDeployId) .setCommand(Optional.of("sleep 100")) .build(); SingularityDeployRequest singularityDeployRequest = new SingularityDeployRequest( deploy, Optional.empty(), Optional.empty(), Optional.empty() ); deployResource.deploy(singularityDeployRequest, singularityUser); scheduler.drainPendingQueue(); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, firstDeployId, Instant.now().plus(3, ChronoUnit.DAYS).toEpochMilli(), Optional.empty(), PendingType.NEW_DEPLOY, Optional.empty(), Optional.empty() ) ); SingularityRunNowRequest runNowRequest = new SingularityRunNowRequestBuilder() .build(); requestResource.scheduleImmediately(singularityUser, requestId, runNowRequest); Assertions.assertEquals(2, requestManager.getPendingRequests().size()); Assertions.assertEquals( PendingType.IMMEDIATE, requestManager.getPendingRequests().get(0).getPendingType() ); Assertions.assertEquals( PendingType.NEW_DEPLOY, requestManager.getPendingRequests().get(1).getPendingType() ); scheduler.drainPendingQueue(); Assertions.assertEquals(1, taskManager.getPendingTaskIds().size()); Assertions.assertEquals( PendingType.IMMEDIATE, taskManager.getPendingTaskIds().get(0).getPendingType() ); Assertions.assertEquals(0, requestManager.getPendingRequests().size()); } @Test public void testInvalidQuartzTimeZoneErrors() { SingularityRequest req = new SingularityRequestBuilder( requestId, RequestType.SCHEDULED ) .setQuartzSchedule(Optional.of("*/1 * * * * ? 2020")) .setScheduleType(Optional.of(ScheduleType.QUARTZ)) .setScheduleTimeZone(Optional.of("invalid_timezone")) .build(); Assertions.assertThrows( WebApplicationException.class, () -> requestResource.postRequest(req, singularityUser) ); } @Test public void testDifferentQuartzTimeZones() { final Optional<String> schedule = Optional.of("* 30 14 22 3 ? 2083"); SingularityRequest requestEST = new SingularityRequestBuilder( "est_id", RequestType.SCHEDULED ) .setSchedule(schedule) .setScheduleType(Optional.of(ScheduleType.QUARTZ)) .setScheduleTimeZone(Optional.of("EST")) // fixed in relation to GMT .build(); SingularityRequest requestGMT = new SingularityRequestBuilder( "gmt_id", RequestType.SCHEDULED ) .setSchedule(schedule) .setScheduleType(Optional.of(ScheduleType.QUARTZ)) .setScheduleTimeZone(Optional.of("GMT")) .build(); requestResource.postRequest(requestEST, singularityUser); requestResource.postRequest(requestGMT, singularityUser); SingularityDeploy deployEST = new SingularityDeployBuilder( requestEST.getId(), "est_deploy_id" ) .setCommand(Optional.of("sleep 1")) .build(); SingularityDeploy deployGMT = new SingularityDeployBuilder( requestGMT.getId(), "gmt_deploy_id" ) .setCommand(Optional.of("sleep 1")) .build(); deployResource.deploy( new SingularityDeployRequest( deployEST, Optional.empty(), Optional.empty(), Optional.empty() ), singularityUser ); deployResource.deploy( new SingularityDeployRequest( deployGMT, Optional.empty(), Optional.empty(), Optional.empty() ), singularityUser ); deployChecker.checkDeploys(); scheduler.drainPendingQueue(); final long nextRunEST; final long nextRunGMT; final long fiveHoursInMilliseconds = TimeUnit.HOURS.toMillis(5); final List<SingularityPendingTaskId> pendingTaskIds = taskManager.getPendingTaskIds(); if (pendingTaskIds.get(0).getRequestId().equals(requestEST.getId())) { nextRunEST = pendingTaskIds.get(0).getNextRunAt(); nextRunGMT = pendingTaskIds.get(1).getNextRunAt(); } else { nextRunEST = pendingTaskIds.get(1).getNextRunAt(); nextRunGMT = pendingTaskIds.get(0).getNextRunAt(); } // GMT happens first, so EST is a larger timestamp Assertions.assertEquals(nextRunEST - nextRunGMT, fiveHoursInMilliseconds); } @Test public void testDeployCleanupOverwritesTaskBounceCleanup() { initRequest(); initFirstDeploy(); final SingularityTask oldTask = startTask(firstDeploy); taskResource.killTask( oldTask.getTaskId().getId(), Optional.of( new SingularityKillTaskRequest( Optional.empty(), Optional.empty(), Optional.empty(), Optional.of(true), Optional.empty() ) ), singularityUser ); final Optional<SingularityTaskCleanup> taskCleanup = taskManager.getTaskCleanup( oldTask.getTaskId().getId() ); Assertions.assertTrue(taskCleanup.isPresent()); Assertions.assertEquals( TaskCleanupType.USER_REQUESTED_TASK_BOUNCE, taskCleanup.get().getCleanupType() ); initSecondDeploy(); startTask(secondDeploy); deployChecker.checkDeploys(); Assertions.assertEquals( DeployState.SUCCEEDED, deployManager.getDeployResult(requestId, secondDeployId).get().getDeployState() ); Assertions.assertEquals( TaskCleanupType.DEPLOY_STEP_FINISHED, taskManager.getTaskCleanup(oldTask.getTaskId().getId()).get().getCleanupType() ); cleaner.drainCleanupQueue(); Assertions.assertFalse( taskManager.getTaskCleanup(oldTask.getTaskId().getId()).isPresent() ); } @Test public void testCleanerFindsTasksWithSkippedHealthchecks() { initRequest(); resourceOffers(2); // set up slaves so scale validate will pass SingularityRequest request = requestResource .getRequest(requestId, singularityUser) .getRequest(); long now = System.currentTimeMillis(); requestManager.saveHistory( new SingularityRequestHistory( now, Optional.<String>empty(), RequestHistoryType.UPDATED, request .toBuilder() .setSkipHealthchecks(Optional.of(true)) .setInstances(Optional.of(2)) .build(), Optional.<String>empty() ) ); firstDeploy = initDeploy( new SingularityDeployBuilder(request.getId(), firstDeployId) .setCommand(Optional.of("sleep 100")) .setHealthcheckUri(Optional.of("http://uri")), System.currentTimeMillis() ); SingularityTask taskOne = launchTask( request, firstDeploy, now + 1000, now + 2000, 1, TaskState.TASK_RUNNING ); finishDeploy( new SingularityDeployMarker( requestId, firstDeployId, now + 2000, Optional.<String>empty(), Optional.<String>empty() ), firstDeploy ); SingularityRequest updatedRequest = request .toBuilder() .setSkipHealthchecks(Optional.<Boolean>empty()) .setInstances(Optional.of(2)) .build(); requestManager.saveHistory( new SingularityRequestHistory( now + 3000, Optional.<String>empty(), RequestHistoryType.UPDATED, updatedRequest, Optional.<String>empty() ) ); SingularityTask newTaskTwoWithCheck = prepTask( updatedRequest, firstDeploy, now + 4000, 2 ); taskManager.createTaskAndDeletePendingTask(newTaskTwoWithCheck); statusUpdate(newTaskTwoWithCheck, TaskState.TASK_RUNNING, Optional.of(now + 5000)); taskManager.saveHealthcheckResult( new SingularityTaskHealthcheckResult( Optional.of(200), Optional.of(1000L), now + 6000, Optional.<String>empty(), Optional.<String>empty(), newTaskTwoWithCheck.getTaskId(), Optional.<Boolean>empty() ) ); SingularityTask unhealthyTaskThree = prepTask( updatedRequest, firstDeploy, now + 4000, 3 ); taskManager.createTaskAndDeletePendingTask(unhealthyTaskThree); statusUpdate(unhealthyTaskThree, TaskState.TASK_RUNNING, Optional.of(now + 5000)); List<SingularityTaskId> activeTaskIds = taskManager.getActiveTaskIdsForRequest( requestId ); List<SingularityTaskId> healthyTaskIds = deployHealthHelper.getHealthyTasks( updatedRequest, Optional.of(firstDeploy), activeTaskIds, false ); Assertions.assertTrue(!healthyTaskIds.contains(unhealthyTaskThree.getTaskId())); Assertions.assertEquals(2, healthyTaskIds.size()); // Healthchecked and skip-healthchecked tasks should both be here Assertions.assertEquals( DeployHealth.WAITING, deployHealthHelper.getDeployHealth( updatedRequest, Optional.of(firstDeploy), activeTaskIds, false ) ); taskManager.saveHealthcheckResult( new SingularityTaskHealthcheckResult( Optional.of(200), Optional.of(1000L), now + 6000, Optional.<String>empty(), Optional.<String>empty(), unhealthyTaskThree.getTaskId(), Optional.<Boolean>empty() ) ); Assertions.assertEquals( DeployHealth.HEALTHY, deployHealthHelper.getDeployHealth( updatedRequest, Optional.of(firstDeploy), activeTaskIds, false ) ); } @Test public void testScaleWithBounceDoesNotLaunchExtraInstances() { initRequest(); initFirstDeploy(); launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); requestResource.scale( requestId, new SingularityScaleRequest( Optional.of(5), Optional.of(1L), Optional.empty(), Optional.empty(), Optional.empty(), Optional.of(true), Optional.empty(), Optional.empty() ), singularityUser ); Assertions.assertEquals(1, requestManager.getCleanupRequests().size()); cleaner.drainCleanupQueue(); Assertions.assertEquals(1, taskManager.getNumCleanupTasks()); scheduler.drainPendingQueue(); Assertions.assertEquals(5, taskManager.getPendingTaskIds().size()); } @Test public void testAcceptOffersWithRoleForRequestWithRole() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); bldr.setRequiredRole(Optional.of("test-role")); requestResource.postRequest(bldr.build(), singularityUser); deploy("d2"); SingularityRunNowRequest runNowRequest = new SingularityRunNowRequestBuilder() .setResources(new Resources(2, 2, 0)) .build(); requestResource.scheduleImmediately(singularityUser, requestId, runNowRequest); scheduler.drainPendingQueue(); SingularityPendingTask pendingTaskWithResources = taskManager .getPendingTasks() .get(0); Assertions.assertTrue(pendingTaskWithResources.getResources().isPresent()); Assertions.assertEquals( pendingTaskWithResources.getResources().get().getCpus(), 2, 0.0 ); sms.resourceOffers(Arrays.asList(createOffer(5, 5, 5))).join(); pendingTaskWithResources = taskManager.getPendingTasks().get(0); Assertions.assertTrue(pendingTaskWithResources.getResources().isPresent()); Assertions.assertEquals( pendingTaskWithResources.getResources().get().getCpus(), 2, 0.0 ); sms .resourceOffers(Arrays.asList(createOffer(5, 5, 5, Optional.of("test-role")))) .join(); SingularityTask task = taskManager.getActiveTasks().get(0); Assertions.assertEquals( MesosUtils.getNumCpus( mesosProtosUtils.toResourceList(task.getMesosTask().getResources()), Optional.of("test-role") ), 2.0, 0.0 ); } @Test public void testNotAcceptOfferWithRoleForRequestWithoutRole() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); requestResource.postRequest(bldr.build(), singularityUser); deploy("d2"); SingularityRunNowRequest runNowRequest = new SingularityRunNowRequestBuilder() .setResources(new Resources(2, 2, 0)) .build(); requestResource.scheduleImmediately(singularityUser, requestId, runNowRequest); scheduler.drainPendingQueue(); SingularityPendingTask pendingTaskWithResources = taskManager .getPendingTasks() .get(0); Assertions.assertTrue(pendingTaskWithResources.getResources().isPresent()); Assertions.assertEquals( pendingTaskWithResources.getResources().get().getCpus(), 2, 0.0 ); sms .resourceOffers(Arrays.asList(createOffer(5, 5, 5, Optional.of("test-role")))) .join(); pendingTaskWithResources = taskManager.getPendingTasks().get(0); Assertions.assertTrue(pendingTaskWithResources.getResources().isPresent()); Assertions.assertEquals( pendingTaskWithResources.getResources().get().getCpus(), 2, 0.0 ); } @Test public void testMaxOnDemandTasks() { SingularityRequestBuilder bldr = new SingularityRequestBuilder( requestId, RequestType.ON_DEMAND ); bldr.setInstances(Optional.of(1)); requestResource.postRequest(bldr.build(), singularityUser); deploy("on_demand_deploy"); deployChecker.checkDeploys(); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, "on_demand_deploy", System.currentTimeMillis(), Optional.<String>empty(), PendingType.ONEOFF, Optional.<List<String>>empty(), Optional.<String>empty(), Optional.<Boolean>empty(), Optional.<String>empty(), Optional.<String>empty() ) ); requestManager.addToPendingQueue( new SingularityPendingRequest( requestId, "on_demand_deploy", System.currentTimeMillis(), Optional.<String>empty(), PendingType.ONEOFF, Optional.<List<String>>empty(), Optional.<String>empty(), Optional.<Boolean>empty(), Optional.<String>empty(), Optional.<String>empty() ) ); scheduler.drainPendingQueue(); resourceOffers(); Assertions.assertEquals(1, taskManager.getActiveTaskIds().size()); } @Test public void testCleanupsCreatedOnScaleDown() { initRequest(); SingularityRequestBuilder bldr = request.toBuilder(); bldr.setInstances(Optional.of(2)); requestResource.postRequest(bldr.build(), singularityUser); initFirstDeploy(); SingularityTask firstTask = launchTask( request, firstDeploy, 1, TaskState.TASK_RUNNING ); SingularityTask secondTask = launchTask( request, firstDeploy, 2, TaskState.TASK_RUNNING ); Assertions.assertEquals(0, taskManager.getNumCleanupTasks()); bldr.setInstances(Optional.of(1)); requestResource.postRequest(bldr.build(), singularityUser); Assertions.assertEquals(1, taskManager.getNumCleanupTasks()); Assertions.assertEquals( taskManager.getCleanupTaskIds().get(0), secondTask.getTaskId() ); } @Test public void testRecoveredTask() { // set up the slave first sms .resourceOffers( Arrays.asList(createOffer(1, 129, 1025, "slave1", "host1", Optional.of("rack1"))) ) .join(); initRequest(); initFirstDeploy(); SingularityTask task = launchTask(request, firstDeploy, 1, TaskState.TASK_RUNNING); Assertions.assertEquals(1, taskManager.getNumActiveTasks()); TaskStatus lost = TaskStatus .newBuilder() .setTaskId(MesosProtosUtils.toTaskId(task.getMesosTask().getTaskId())) .setAgentId(MesosProtosUtils.toAgentId(task.getAgentId())) .setReason(Reason.REASON_AGENT_REMOVED) .setMessage("health check timed out") .setState(TaskState.TASK_LOST) .build(); sms.statusUpdate(lost).join(); Assertions.assertEquals(0, taskManager.getNumActiveTasks()); Assertions.assertTrue(taskManager.getTaskHistory(task.getTaskId()).isPresent()); TaskStatus recovered = TaskStatus .newBuilder() .setTaskId(MesosProtosUtils.toTaskId(task.getMesosTask().getTaskId())) .setAgentId(MesosProtosUtils.toAgentId(task.getAgentId())) .setReason(Reason.REASON_AGENT_REREGISTERED) .setMessage("agent reregistered") .setState(TaskState.TASK_RUNNING) .build(); sms.statusUpdate(recovered).join(); Assertions.assertEquals(1, taskManager.getNumActiveTasks()); Assertions.assertEquals(1, requestManager.getSizeOfPendingQueue()); } @Test public void itRetriesLostShortRunningRequests() { runTest(RequestType.ON_DEMAND, Reason.REASON_AGENT_RESTARTED, true); } @Test public void itDoesNotRetryLostLongRunningRequests() { runTest(RequestType.SERVICE, Reason.REASON_AGENT_RESTARTED, false); } @Test public void itDoesNotRetryLostRequestsDueToNonAgentFailures() { runTest(RequestType.ON_DEMAND, Reason.REASON_CONTAINER_LIMITATION_DISK, false); } private void runTest(RequestType requestType, Reason reason, boolean shouldRetry) { initRequestWithType(requestType, false); initFirstDeploy(); SingularityTask task = startTask(firstDeploy); Assertions.assertEquals(0, taskManager.getPendingTaskIds().size()); Assertions.assertEquals(0, requestManager.getPendingRequests().size()); try { updateHandler .processStatusUpdateAsync( TaskStatus .newBuilder() .setState(TaskState.TASK_LOST) .setReason(reason) .setTaskId(TaskID.newBuilder().setValue(task.getTaskId().getId())) .build() ) .get(); } catch (InterruptedException | ExecutionException e) { Assertions.assertTrue(false); } if (shouldRetry) { Assertions.assertEquals(requestManager.getPendingRequests().size(), 1); Assertions.assertEquals( requestManager.getPendingRequests().get(0).getPendingType(), PendingType.RETRY ); } else { if (requestManager.getPendingRequests().size() > 0) { Assertions.assertEquals( requestManager.getPendingRequests().get(0).getPendingType(), PendingType.TASK_DONE ); } } scheduler.drainPendingQueue(); } }
Add tests.
SingularityService/src/test/java/com/hubspot/singularity/scheduler/SingularitySchedulerTest.java
Add tests.
Java
apache-2.0
e67fc5bc24b01f274bdf288b2e20143f56dc2994
0
schwarzmx/cp-common-utils,schwarzmx/cp-common-utils,mhgrove/cp-common-utils,mhgrove/cp-common-utils
/* * Copyright (c) 2005-2010 Clark & Parsia, LLC. <http://www.clarkparsia.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.clarkparsia.utils.web; import com.clarkparsia.utils.io.Encoder; import com.clarkparsia.utils.io.IOUtil; import java.net.URL; import java.net.MalformedURLException; import java.net.URLConnection; import java.net.HttpURLConnection; import java.io.InputStream; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.util.HashMap; import java.util.Map; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Arrays; import java.util.zip.GZIPInputStream; /** * <p></p> * * @author Michael Grove * @since 1.0 */ public class Request { private URL mURL; private Method mMethod; private ParameterList mParameters = new ParameterList(); private InputStream mBody; private Map<String, Header> mHeaders = new HashMap<String, Header>(); private int mTimeout = -1; private boolean mFollowRedirects; public Request(String theURL) throws MalformedURLException { this(Method.GET, new URL(theURL)); } public Request(URL theURL) { this(Method.GET, theURL); } public Request(Method theMethod, URL theURL) { mMethod = theMethod; mURL = theURL; } public static Request formPost(URL theURL, ParameterList theParams) { Request aRequest = new Request(Method.POST, theURL); aRequest.addHeader(new Header(HttpHeaders.ContentType.getName(), MimeTypes.FormUrlEncoded.getMimeType())); aRequest.setBody(theParams.toString()); return aRequest; } /** * Return the current timeout value * @return the timeout value in milliseconds or -1 for no timeout */ public int getTimeout() { return mTimeout; } /** * Set the timeout associated associated with this request * @param theTimeout the timeout in milliseconds, or -1 for no timeout * @return this request */ public Request setTimeout(final int theTimeout) { mTimeout = theTimeout; return this; } /** * Return whether or not this request will follow redirects * @return true to follow redirects, false otherwise */ public boolean isFollowRedirects() { return mFollowRedirects; } /** * Set whether or not this request will follow redirects * @param theFollowRedirects true to follow redirects, false otherwise * @return this request */ public Request setFollowRedirects(final boolean theFollowRedirects) { mFollowRedirects = theFollowRedirects; return this; } /** * Add a parameter to this web request * @param theKey the parameter key * @param theValue the parameter value * @return this request */ public Request addParameter(String theKey, String theValue) { return addParameter(new Parameter(theKey, theValue)); } /** * Adds a parameter to this web request * @param theParameter the parameter to add * @return this request */ public Request addParameter(Parameter theParameter) { mParameters.add(theParameter); return this; } /** * Sets the list of parameters for this web request * @param theParameters the list of parameters * @return this request */ public Request setParameters(final ParameterList theParameters) { mParameters = theParameters; return this; } /** * Add a header to this request * @param theHeader the header to add * @return this request */ public Request addHeader(Header theHeader) { if (mHeaders.containsKey(theHeader.getName())) { theHeader.addValues(mHeaders.get(theHeader.getName()).getValues()); } mHeaders.put(theHeader.getName(), theHeader); return this; } public Request addHeader(String theName, String... theValue) { addHeader(new Header(theName, Arrays.asList(theValue))); return this; } public Request setBody(String theString) { try { mBody = new ByteArrayInputStream(theString.getBytes(Encoder.UTF8.name())); } catch (UnsupportedEncodingException e) { // can safely be ignored, we know java supports UTF8 } return this; } public Request setBody(final InputStream theBody) { mBody = theBody; return this; } public URL getURL() { return mURL; } public Method getMethod() { return mMethod; } public ParameterList getParameters() { return mParameters; } public InputStream getBody() { return mBody; } public Collection<Header> getHeaders() { return Collections.unmodifiableCollection(mHeaders.values()); } private URL getURLWithParams() throws IOException { if (getMethod().equals(Method.GET) && !getParameters().isEmpty()) { try { return new URL(getURL().toString() + "?" + getParameters().getURLEncoded()); } catch (MalformedURLException e) { throw new IOException(e.getMessage()); } } else { return getURL(); } } public Header getHeader(String theName) { return mHeaders.get(theName); } public Response execute() throws IOException { // TODO: use-caches?, if-modified-since, HTTPS security twiddling, HTTP Authentication, chunking, user interactions? URLConnection aTempConn = getURLWithParams().openConnection(); if (!(aTempConn instanceof HttpURLConnection)) { throw new IllegalArgumentException("Only HTTP or HTTPS are supported"); } HttpURLConnection aConn = (HttpURLConnection) aTempConn; aConn.setDoInput(true); if (getTimeout() != -1) { aConn.setConnectTimeout(getTimeout()); } aConn.setInstanceFollowRedirects(isFollowRedirects()); aConn.setRequestMethod(getMethod().name()); for (Header aHeader : getHeaders()) { aConn.setRequestProperty(aHeader.getName(), aHeader.getHeaderValue()); } InputStream aInput = getBody(); if (aInput != null) { aConn.setDoOutput(true); OutputStream aOut = aConn.getOutputStream(); IOUtil.transfer(aInput, aOut); if (aOut != null) { aOut.flush(); aOut.close(); } aInput.close(); } aConn.connect(); Response aResponse = new Response(); aResponse.setResponseCode(aConn.getResponseCode()); Collection<Header> aResponseHeaders = new HashSet<Header>(); Map<String, List<String>> aHeaderMap = aConn.getHeaderFields(); for (String aName : aHeaderMap.keySet()) { aResponseHeaders.add(new Header(aName, aHeaderMap.get(aName))); } aResponse.setHeaders(aResponseHeaders); aResponse.setMessage(aConn.getResponseMessage()); InputStream aResponseStream = null; try { aResponseStream = aConn.getInputStream(); // if this is GZIP encoded, then wrap the input stream String contentEncoding = aConn.getContentEncoding(); if ("gzip".equals(contentEncoding)) { aResponseStream = new GZIPInputStream(aResponseStream); } // ideally we'd like to return the response body as an inputstream and let the caller read from it at demand // rather than pulling the entire thing into memory, but doing that (i think) keeps open the connection // which is undesirable aResponse.setContent(IOUtil.readStringFromStream(aResponseStream)); } catch (IOException e) { aResponseStream = aConn.getErrorStream(); try { aResponse.setContent(IOUtil.readStringFromStream(aResponseStream)); } catch (IOException e1) { throw e1; } } finally { if (aResponseStream != null) { aResponseStream.close(); } } aConn.disconnect(); return aResponse; } }
src/com/clarkparsia/utils/web/Request.java
/* * Copyright (c) 2005-2010 Clark & Parsia, LLC. <http://www.clarkparsia.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.clarkparsia.utils.web; import com.clarkparsia.utils.io.Encoder; import com.clarkparsia.utils.io.IOUtil; import java.net.URL; import java.net.MalformedURLException; import java.net.URLConnection; import java.net.HttpURLConnection; import java.io.InputStream; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.util.HashMap; import java.util.Map; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Arrays; import java.util.zip.GZIPInputStream; /** * <p></p> * * @author Michael Grove * @since 1.0 */ public class Request { private URL mURL; private Method mMethod; private ParameterList mParameters = new ParameterList(); private InputStream mBody; private Map<String, Header> mHeaders = new HashMap<String, Header>(); private int mTimeout = -1; private boolean mFollowRedirects; public Request(String theURL) throws MalformedURLException { this(Method.GET, new URL(theURL)); } public Request(URL theURL) { this(Method.GET, theURL); } public Request(Method theMethod, URL theURL) { mMethod = theMethod; mURL = theURL; } public static Request formPost(URL theURL, ParameterList theParams) { Request aRequest = new Request(Method.POST, theURL); aRequest.addHeader(new Header(HttpHeaders.ContentType.getName(), MimeTypes.FormUrlEncoded.getMimeType())); aRequest.setBody(theParams.toString()); return aRequest; } public int getTimeout() { return mTimeout; } public Request setTimeout(final int theTimeout) { mTimeout = theTimeout; return this; } public boolean isFollowRedirects() { return mFollowRedirects; } public Request setFollowRedirects(final boolean theFollowRedirects) { mFollowRedirects = theFollowRedirects; return this; } public Request addParameter(Parameter theParameter) { mParameters.add(theParameter); return this; } public Request setParameters(final ParameterList theParameters) { mParameters = theParameters; return this; } public Request addHeader(Header theHeader) { if (mHeaders.containsKey(theHeader.getName())) { theHeader.addValues(mHeaders.get(theHeader.getName()).getValues()); } mHeaders.put(theHeader.getName(), theHeader); return this; } public Request addHeader(String theName, String... theValue) { addHeader(new Header(theName, Arrays.asList(theValue))); return this; } public Request setBody(String theString) { try { mBody = new ByteArrayInputStream(theString.getBytes(Encoder.UTF8.name())); } catch (UnsupportedEncodingException e) { // can safely be ignored, we know java supports UTF8 } return this; } public Request setBody(final InputStream theBody) { mBody = theBody; return this; } public URL getURL() { return mURL; } public Method getMethod() { return mMethod; } public ParameterList getParameters() { return mParameters; } public InputStream getBody() { return mBody; } public Collection<Header> getHeaders() { return Collections.unmodifiableCollection(mHeaders.values()); } private URL getURLWithParams() throws IOException { if (getMethod().equals(Method.GET) && !getParameters().isEmpty()) { try { return new URL(getURL().toString() + "?" + getParameters().getURLEncoded()); } catch (MalformedURLException e) { throw new IOException(e.getMessage()); } } else { return getURL(); } } public Header getHeader(String theName) { return mHeaders.get(theName); } public Response execute() throws IOException { // TODO: use-caches?, if-modified-since, HTTPS security twiddling, HTTP Authentication, chunking, user interactions? URLConnection aTempConn = getURLWithParams().openConnection(); if (!(aTempConn instanceof HttpURLConnection)) { throw new IllegalArgumentException("Only HTTP or HTTPS are supported"); } HttpURLConnection aConn = (HttpURLConnection) aTempConn; aConn.setDoInput(true); if (getTimeout() != -1) { aConn.setConnectTimeout(getTimeout()); } aConn.setInstanceFollowRedirects(isFollowRedirects()); aConn.setRequestMethod(getMethod().name()); for (Header aHeader : getHeaders()) { aConn.setRequestProperty(aHeader.getName(), aHeader.getHeaderValue()); } InputStream aInput = getBody(); if (aInput != null) { aConn.setDoOutput(true); OutputStream aOut = aConn.getOutputStream(); IOUtil.transfer(aInput, aOut); if (aOut != null) { aOut.flush(); aOut.close(); } aInput.close(); } aConn.connect(); Response aResponse = new Response(); aResponse.setResponseCode(aConn.getResponseCode()); Collection<Header> aResponseHeaders = new HashSet<Header>(); Map<String, List<String>> aHeaderMap = aConn.getHeaderFields(); for (String aName : aHeaderMap.keySet()) { aResponseHeaders.add(new Header(aName, aHeaderMap.get(aName))); } aResponse.setHeaders(aResponseHeaders); aResponse.setMessage(aConn.getResponseMessage()); InputStream aResponseStream = null; try { aResponseStream = aConn.getInputStream(); // if this is GZIP encoded, then wrap the input stream String contentEncoding = aConn.getContentEncoding(); if ("gzip".equals(contentEncoding)) { aResponseStream = new GZIPInputStream(aResponseStream); } // ideally we'd like to return the response body as an inputstream and let the caller read from it at demand // rather than pulling the entire thing into memory, but doing that (i think) keeps open the connection // which is undesirable aResponse.setContent(IOUtil.readStringFromStream(aResponseStream)); } catch (IOException e) { aResponseStream = aConn.getErrorStream(); try { aResponse.setContent(IOUtil.readStringFromStream(aResponseStream)); } catch (IOException e1) { throw e1; } } finally { if (aResponseStream != null) { aResponseStream.close(); } } aConn.disconnect(); return aResponse; } }
javadocing and adding a couple convenience functions
src/com/clarkparsia/utils/web/Request.java
javadocing and adding a couple convenience functions
Java
apache-2.0
de34e5a4fcd4201c63b1c1415cb9b20cfed208e5
0
DyncKathline/LiveGiftLayout
package org.dync.giftlibrary.widget; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.content.Context; import android.util.Log; import org.dync.giftlibrary.util.ThreadUtil; import java.util.ArrayList; /** * Created by KathLine on 2017/1/8. */ public class GiftControl implements GiftFrameLayout.LeftGiftAnimationStatusListener { private static final String TAG = "GiftControl"; /** * 礼物队列(在多个线程中使用此List) */ private ArrayList<GiftModel> mGiftQueue; /** * 礼物1 */ private GiftFrameLayout mFirstItemGift; /** * 礼物2 */ private GiftFrameLayout mSecondItemGift; public GiftControl(Context context) { mGiftQueue = new ArrayList<>(); } public void setGiftLayout(GiftFrameLayout giftFrameLayout1, GiftFrameLayout giftFrameLayout2) { mFirstItemGift = giftFrameLayout1; mSecondItemGift = giftFrameLayout2; mFirstItemGift.setIndex(0); mSecondItemGift.setIndex(1); mFirstItemGift.firstHideLayout(); mSecondItemGift.firstHideLayout(); mFirstItemGift.setGiftAnimationListener(this); mSecondItemGift.setGiftAnimationListener(this); } public void loadGift(GiftModel gift) { loadGift(gift, true); } /** * 加入礼物,具有实时连击效果 * * @param gift * @param supportCombo 是否支持实时连击,如果为true:支持,否则不支持 */ public void loadGift(GiftModel gift, boolean supportCombo) { if (mGiftQueue != null) { if (supportCombo) { if (mFirstItemGift.isShowing()) { if (mFirstItemGift.getCurrentGiftId().equals(gift.getGiftId()) && mFirstItemGift.getCurrentSendUserId().equals(gift.getSendUserId())) { //连击 Log.i(TAG, "addGiftQueue: ========mFirstItemGift连击========礼物:" + gift.getGiftId() + ",连击X" + gift.getGiftCuont()); mFirstItemGift.setGiftCount(gift.getGiftCuont()); mFirstItemGift.setSendGiftTime(gift.getSendGiftTime()); return; } } if (mSecondItemGift.isShowing()) { if (mSecondItemGift.getCurrentGiftId().equals(gift.getGiftId()) && mSecondItemGift.getCurrentSendUserId().equals(gift.getSendUserId())) { //连击 Log.i(TAG, "addGiftQueue: ========mSecondItemGift连击========礼物:" + gift.getGiftId() + ",连击X" + gift.getGiftCuont()); mSecondItemGift.setGiftCount(gift.getGiftCuont()); mSecondItemGift.setSendGiftTime(gift.getSendGiftTime()); return; } } } addGiftQueue(gift, supportCombo); } } private void addGiftQueue(final GiftModel gift, final boolean supportCombo) { if (mGiftQueue != null) { if (mGiftQueue.size() == 0) { Log.d(TAG, "addGiftQueue---集合个数:" + mGiftQueue.size() + ",礼物:" + gift.getGiftId()); mGiftQueue.add(gift); showGift(); return; } } Log.d(TAG, "addGiftQueue---集合个数:" + mGiftQueue.size() + ",礼物:" + gift.getGiftId()); ThreadUtil.runInThread(new Runnable() { @Override public void run() { if (supportCombo) { boolean addflag = false; for (GiftModel model : mGiftQueue) { if (model.getGiftId().equals(gift.getGiftId()) && model.getSendUserId().equals(gift.getSendUserId())) { Log.d(TAG, "addGiftQueue: ========已有集合========" + gift.getGiftId() + ",礼物数:" + gift.getGiftCuont()); model.setGiftCuont(model.getGiftCuont() + gift.getGiftCuont()); addflag = true; break; } } //如果在现有的集合中不存在同一人发的礼物就加入到现有集合中 if (!addflag) { Log.d(TAG, "addGiftQueue: --------新的集合--------" + gift.getGiftId() + ",礼物数:" + gift.getGiftCuont()); mGiftQueue.add(gift); } } else { mGiftQueue.add(gift); } } }); } /** * 显示礼物 */ public synchronized void showGift() { if (isEmpty()) { return; } Log.d(TAG, "showGift: begin->集合个数:" + mGiftQueue.size()); if (!mFirstItemGift.isShowing() && mFirstItemGift.isEnd()) { boolean hasGift = mFirstItemGift.setGift(getGift()); if (hasGift) { mFirstItemGift.startAnimation(); } } if (!mSecondItemGift.isShowing() && mSecondItemGift.isEnd()) { boolean hasGift = mSecondItemGift.setGift(getGift()); if (hasGift) { mSecondItemGift.startAnimation(); } } Log.d(TAG, "showGift: end->集合个数:" + mGiftQueue.size()); } /** * 取出礼物 * * @return */ private synchronized GiftModel getGift() { GiftModel gift = null; if (mGiftQueue.size() != 0) { gift = mGiftQueue.get(0); mGiftQueue.remove(0); Log.i(TAG, "getGift---集合个数:" + mGiftQueue.size() + ",送出礼物---" + gift.getGiftId() + ",礼物数X" + gift.getGiftCuont()); } return gift; } public int getCurGiftCount(String giftId, String userName) { int curGiftCount = -1; GiftModel firstGift = mFirstItemGift.getGift(); GiftModel secondGift = mSecondItemGift.getGift(); if (firstGift != null && firstGift.getGiftId().equals(giftId) && firstGift.getSendUserName().equals(userName)) { curGiftCount = mFirstItemGift.getGiftCount(); } else if (secondGift != null && secondGift.getGiftId().equals(giftId) && secondGift.getSendUserName().equals(userName)) { curGiftCount = mSecondItemGift.getGiftCount(); } return curGiftCount; } @Override public void dismiss(int index) { if (index == 0) { reStartAnimation(mFirstItemGift, index); } else if (index == 1) { reStartAnimation(mSecondItemGift, index); } } private void reStartAnimation(final GiftFrameLayout giftFrameLayout, final int index) { //动画结束,这时不能触发连击动画 giftFrameLayout.setCurrentShowStatus(false); Log.d(TAG, "reStartAnimation: 动画结束"); AnimatorSet animatorSet = giftFrameLayout.endAnmation(); if (animatorSet != null) { animatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { Log.i(TAG, "礼物动画dismiss: index = " + index); //动画完全结束 giftFrameLayout.CurrentEndStatus(true); showGift(); } }); } } /** * 清除所有礼物 */ public synchronized void cleanAll() { if (mGiftQueue != null) { mGiftQueue.clear(); } if (mFirstItemGift != null) { mFirstItemGift.clearHandler(); mFirstItemGift.stopCheckGiftCount(); } if (mSecondItemGift != null) { mSecondItemGift.clearHandler(); mSecondItemGift.stopCheckGiftCount(); } } /** * 礼物是否为空 * * @return */ public synchronized boolean isEmpty() { if (mGiftQueue == null || mGiftQueue.size() == 0) { return true; } else { return false; } } }
giftlibrary/src/main/java/org/dync/giftlibrary/widget/GiftControl.java
package org.dync.giftlibrary.widget; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.content.Context; import android.util.Log; import java.util.concurrent.CopyOnWriteArrayList; /** * Created by KathLine on 2017/1/8. */ public class GiftControl implements GiftFrameLayout.LeftGiftAnimationStatusListener { private static final String TAG = "GiftControl"; /** * 礼物队列(在多个线程中使用此List) */ private CopyOnWriteArrayList<GiftModel> mGiftQueue; /** * 礼物1 */ private GiftFrameLayout mFirstItemGift; /** * 礼物2 */ private GiftFrameLayout mSecondItemGift; public GiftControl(Context context) { mGiftQueue = new CopyOnWriteArrayList<>(); } public void setGiftLayout(GiftFrameLayout giftFrameLayout1, GiftFrameLayout giftFrameLayout2) { mFirstItemGift = giftFrameLayout1; mSecondItemGift = giftFrameLayout2; mFirstItemGift.setIndex(0); mSecondItemGift.setIndex(1); mFirstItemGift.firstHideLayout(); mSecondItemGift.firstHideLayout(); mFirstItemGift.setGiftAnimationListener(this); mSecondItemGift.setGiftAnimationListener(this); } public void loadGift(GiftModel gift) { loadGift(gift, true); } /** * 加入礼物,具有实时连击效果 * * @param gift * @param supportCombo 是否支持实时连击,如果为true:支持,否则不支持 */ public void loadGift(GiftModel gift, boolean supportCombo) { if (mGiftQueue != null) { if (supportCombo) { if (mFirstItemGift.isShowing()) { if (mFirstItemGift.getCurrentGiftId().equals(gift.getGiftId()) && mFirstItemGift.getCurrentSendUserId().equals(gift.getSendUserId())) { //连击 Log.i(TAG, "addGiftQueue: ========mFirstItemGift连击========礼物:" + gift.getGiftId() + ",连击X" + gift.getGiftCuont()); mFirstItemGift.setGiftCount(gift.getGiftCuont()); mFirstItemGift.setSendGiftTime(gift.getSendGiftTime()); return; } } if (mSecondItemGift.isShowing()) { if (mSecondItemGift.getCurrentGiftId().equals(gift.getGiftId()) && mSecondItemGift.getCurrentSendUserId().equals(gift.getSendUserId())) { //连击 Log.i(TAG, "addGiftQueue: ========mSecondItemGift连击========礼物:" + gift.getGiftId() + ",连击X" + gift.getGiftCuont()); mSecondItemGift.setGiftCount(gift.getGiftCuont()); mSecondItemGift.setSendGiftTime(gift.getSendGiftTime()); return; } } } addGiftQueue(gift, supportCombo); } } private void addGiftQueue(GiftModel gift, boolean supportCombo) { if (mGiftQueue != null) { if (mGiftQueue.size() == 0) { Log.d(TAG, "addGiftQueue---集合个数:" + mGiftQueue.size() + ",礼物:" + gift.getGiftId()); mGiftQueue.add(gift); showGift(); return; } } Log.d(TAG, "addGiftQueue---集合个数:" + mGiftQueue.size() + ",礼物:" + gift.getGiftId()); if (supportCombo) { boolean addflag = false; for (GiftModel model : mGiftQueue) { if (model.getGiftId().equals(gift.getGiftId()) && model.getSendUserId().equals(gift.getSendUserId())) { Log.d(TAG, "addGiftQueue: ========已有集合========" + gift.getGiftId() + ",礼物数:" + gift.getGiftCuont()); model.setGiftCuont(model.getGiftCuont() + gift.getGiftCuont()); addflag = true; break; } } //如果在现有的集合中不存在同一人发的礼物就加入到现有集合中 if (!addflag) { Log.d(TAG, "addGiftQueue: --------新的集合--------" + gift.getGiftId() + ",礼物数:" + gift.getGiftCuont()); mGiftQueue.add(gift); } } else { mGiftQueue.add(gift); } } /** * 显示礼物 */ public synchronized void showGift() { if (isEmpty()) { return; } Log.d(TAG, "showGift: begin->集合个数:" + mGiftQueue.size()); if (!mFirstItemGift.isShowing() && mFirstItemGift.isEnd()) { boolean hasGift = mFirstItemGift.setGift(getGift()); if (hasGift) { mFirstItemGift.startAnimation(); } } if (!mSecondItemGift.isShowing() && mSecondItemGift.isEnd()) { boolean hasGift = mSecondItemGift.setGift(getGift()); if (hasGift) { mSecondItemGift.startAnimation(); } } Log.d(TAG, "showGift: end->集合个数:" + mGiftQueue.size()); } /** * 取出礼物 * * @return */ private synchronized GiftModel getGift() { GiftModel gift = null; if (mGiftQueue.size() != 0) { gift = mGiftQueue.get(0); mGiftQueue.remove(0); Log.i(TAG, "getGift---集合个数:" + mGiftQueue.size() + ",送出礼物---" + gift.getGiftId() + ",礼物数X" + gift.getGiftCuont()); } return gift; } public int getCurGiftCount(String giftId, String userName) { int curGiftCount = -1; GiftModel firstGift = mFirstItemGift.getGift(); GiftModel secondGift = mSecondItemGift.getGift(); if (firstGift != null && firstGift.getGiftId().equals(giftId) && firstGift.getSendUserName().equals(userName)) { curGiftCount = mFirstItemGift.getGiftCount(); } else if (secondGift != null && secondGift.getGiftId().equals(giftId) && secondGift.getSendUserName().equals(userName)) { curGiftCount = mSecondItemGift.getGiftCount(); } return curGiftCount; } @Override public void dismiss(int index) { if (index == 0) { reStartAnimation(mFirstItemGift, index); } else if (index == 1) { reStartAnimation(mSecondItemGift, index); } } private void reStartAnimation(final GiftFrameLayout giftFrameLayout, final int index) { //动画结束,这时不能触发连击动画 giftFrameLayout.setCurrentShowStatus(false); Log.d(TAG, "reStartAnimation: 动画结束"); AnimatorSet animatorSet = giftFrameLayout.endAnmation(); if (animatorSet != null) { animatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { Log.i(TAG, "礼物动画dismiss: index = " + index); //动画完全结束 giftFrameLayout.CurrentEndStatus(true); showGift(); } }); } } /** * 清除所有礼物 */ public synchronized void cleanAll() { if (mGiftQueue != null) { mGiftQueue.clear(); } if (mFirstItemGift != null) { mFirstItemGift.clearHandler(); mFirstItemGift.stopCheckGiftCount(); } if (mSecondItemGift != null) { mSecondItemGift.clearHandler(); mSecondItemGift.stopCheckGiftCount(); } } /** * 礼物是否为空 * * @return */ public synchronized boolean isEmpty() { if (mGiftQueue == null || mGiftQueue.size() == 0) { return true; } else { return false; } } }
优化了创建大量礼物时,内存增长特别大的问题
giftlibrary/src/main/java/org/dync/giftlibrary/widget/GiftControl.java
优化了创建大量礼物时,内存增长特别大的问题
Java
apache-2.0
c7aaeb9af3d0ceb40bd7e5c99e903a8cd7c4bbb1
0
baomidou/mybatis-plus,baomidou/mybatis-plus
package com.baomidou.mybatisplus.core.conditions.query; import lombok.Data; import java.io.Serializable; /** * 共享查询字段 * * @author miemie * @since 2018-11-20 */ @Data public class SharedSqlSelect implements Serializable { private static final long serialVersionUID = -1536422416594422874L; /** * 查询字段 */ private String sqlSelect; }
mybatis-plus-core/src/main/java/com/baomidou/mybatisplus/core/conditions/query/SharedSqlSelect.java
package com.baomidou.mybatisplus.core.conditions.query; import lombok.Data; /** * 共享查询字段 * * @author miemie * @since 2018-11-20 */ @Data public class SharedSqlSelect { /** * 查询字段 */ private String sqlSelect; }
SharedSqlSelect 实现序列化
mybatis-plus-core/src/main/java/com/baomidou/mybatisplus/core/conditions/query/SharedSqlSelect.java
SharedSqlSelect 实现序列化
Java
apache-2.0
613fead98cacde4002944d116bf2d8642ea8f6e1
0
Karm/undertow,baranowb/undertow,baranowb/undertow,stuartwdouglas/undertow,undertow-io/undertow,jamezp/undertow,Karm/undertow,pferraro/undertow,aldaris/undertow,pferraro/undertow,pferraro/undertow,jstourac/undertow,soul2zimate/undertow,darranl/undertow,golovnin/undertow,golovnin/undertow,aldaris/undertow,jamezp/undertow,rhusar/undertow,baranowb/undertow,jstourac/undertow,stuartwdouglas/undertow,rhusar/undertow,undertow-io/undertow,jamezp/undertow,jstourac/undertow,aldaris/undertow,darranl/undertow,darranl/undertow,undertow-io/undertow,soul2zimate/undertow,rhusar/undertow,stuartwdouglas/undertow,golovnin/undertow,soul2zimate/undertow,Karm/undertow
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.servlet.api; import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executor; import javax.servlet.DispatcherType; import javax.servlet.MultipartConfigElement; import javax.servlet.ServletContextListener; import javax.servlet.descriptor.JspConfigDescriptor; import io.undertow.security.api.AuthenticationMechanism; import io.undertow.security.api.AuthenticationMechanismFactory; import io.undertow.security.api.AuthenticationMode; import io.undertow.security.api.NotificationReceiver; import io.undertow.security.api.SecurityContextFactory; import io.undertow.security.idm.IdentityManager; import io.undertow.server.HandlerWrapper; import io.undertow.server.handlers.resource.ResourceManager; import io.undertow.server.session.SecureRandomSessionIdGenerator; import io.undertow.server.session.SessionIdGenerator; import io.undertow.server.session.SessionListener; import io.undertow.servlet.ServletExtension; import io.undertow.servlet.UndertowServletMessages; import io.undertow.servlet.core.DefaultAuthorizationManager; import io.undertow.servlet.core.InMemorySessionManagerFactory; import io.undertow.servlet.util.DefaultClassIntrospector; import io.undertow.util.ImmediateAuthenticationMechanismFactory; /** * Represents a servlet deployment. * * @author Stuart Douglas */ public class DeploymentInfo implements Cloneable { private String deploymentName; private String displayName; private String contextPath; private ClassLoader classLoader; private ResourceManager resourceManager = ResourceManager.EMPTY_RESOURCE_MANAGER; private ClassIntrospecter classIntrospecter = DefaultClassIntrospector.INSTANCE; private int majorVersion = 4; private int minorVersion = 0; private int containerMajorVersion = 4; private int containerMinorVersion = 0; private Executor executor; private Executor asyncExecutor; private Path tempDir; private JspConfigDescriptor jspConfigDescriptor; private DefaultServletConfig defaultServletConfig; private SessionManagerFactory sessionManagerFactory = new InMemorySessionManagerFactory(); private LoginConfig loginConfig; private IdentityManager identityManager; private ConfidentialPortManager confidentialPortManager; private boolean allowNonStandardWrappers = false; private int defaultSessionTimeout = 60 * 30; private ConcurrentMap<String, Object> servletContextAttributeBackingMap; private ServletSessionConfig servletSessionConfig; private String hostName = "localhost"; private boolean denyUncoveredHttpMethods = false; private ServletStackTraces servletStackTraces = ServletStackTraces.LOCAL_ONLY; private boolean invalidateSessionOnLogout = false; private int defaultCookieVersion = 0; private SessionPersistenceManager sessionPersistenceManager; private String defaultEncoding; private String defaultRequestEncoding; private String defaultResponseEncoding; private String urlEncoding = null; private boolean ignoreFlush = false; private AuthorizationManager authorizationManager = DefaultAuthorizationManager.INSTANCE; private AuthenticationMechanism jaspiAuthenticationMechanism; private SecurityContextFactory securityContextFactory; private String serverName = "Undertow"; private MetricsCollector metricsCollector = null; private SessionConfigWrapper sessionConfigWrapper = null; private boolean eagerFilterInit = false; private boolean disableCachingForSecuredPages = true; private boolean escapeErrorMessage = true; private boolean sendCustomReasonPhraseOnError = false; private boolean useCachedAuthenticationMechanism = true; private AuthenticationMode authenticationMode = AuthenticationMode.PRO_ACTIVE; private ExceptionHandler exceptionHandler; private final Map<String, ServletInfo> servlets = new HashMap<>(); private final Map<String, FilterInfo> filters = new HashMap<>(); private final List<FilterMappingInfo> filterServletNameMappings = new ArrayList<>(); private final List<FilterMappingInfo> filterUrlMappings = new ArrayList<>(); private final List<ListenerInfo> listeners = new ArrayList<>(); private final List<ServletContainerInitializerInfo> servletContainerInitializers = new ArrayList<>(); private final List<ThreadSetupHandler> threadSetupActions = new ArrayList<>(); private final Map<String, String> initParameters = new HashMap<>(); private final Map<String, Object> servletContextAttributes = new HashMap<>(); private final Map<String, String> localeCharsetMapping = new HashMap<>(); private final List<String> welcomePages = new ArrayList<>(); private final List<ErrorPage> errorPages = new ArrayList<>(); private final List<MimeMapping> mimeMappings = new ArrayList<>(); private final List<SecurityConstraint> securityConstraints = new ArrayList<>(); private final Set<String> securityRoles = new HashSet<>(); private final List<NotificationReceiver> notificationReceivers = new ArrayList<>(); private final Map<String, AuthenticationMechanismFactory> authenticationMechanisms = new HashMap<>(); private final List<LifecycleInterceptor> lifecycleInterceptors = new ArrayList<>(); private final List<SessionListener> sessionListeners = new ArrayList<>(); /** * additional servlet extensions */ private final List<ServletExtension> servletExtensions = new ArrayList<>(); /** * map of additional roles that should be applied to the given principal. */ private final Map<String, Set<String>> principalVersusRolesMap = new HashMap<>(); /** * Wrappers that are applied before the servlet initial handler, and before any servlet related object have been * created. If a wrapper wants to bypass servlet entirely it should register itself here. */ private final List<HandlerWrapper> initialHandlerChainWrappers = new ArrayList<>(); /** * Handler chain wrappers that are applied outside all other handlers, including security but after the initial * servlet handler. */ private final List<HandlerWrapper> outerHandlerChainWrappers = new ArrayList<>(); /** * Handler chain wrappers that are applied just before the servlet request is dispatched. At this point the security * handlers have run, and any security information is attached to the request. */ private final List<HandlerWrapper> innerHandlerChainWrappers = new ArrayList<>(); /** * A handler chain wrapper to wrap the initial stages of the security handlers, if this is set it is assumed it * is taking over the responsibility of setting the {@link io.undertow.security.api.SecurityContext} that can handle authentication and the * remaining Undertow handlers specific to authentication will be skipped. */ private HandlerWrapper initialSecurityWrapper = null; /** * Handler chain wrappers that are applied just before the authentication mechanism is called. Theses handlers are * always called, even if authentication is not required */ private final List<HandlerWrapper> securityWrappers = new ArrayList<>(); /** * Multipart config that will be applied to all servlets that do not have an explicit config */ private MultipartConfigElement defaultMultipartConfig; /** * Cache of common content types, to prevent allocations when parsing the charset */ private int contentTypeCacheSize = 100; private boolean changeSessionIdOnLogin = true; private SessionIdGenerator sessionIdGenerator = new SecureRandomSessionIdGenerator(); /** * Config for the {@link io.undertow.servlet.handlers.CrawlerSessionManagerHandler} */ private CrawlerSessionManagerConfig crawlerSessionManagerConfig; private boolean securityDisabled; private boolean checkOtherSessionManagers = true; private final List<ServletContextListener> deploymentCompleteListeners = new ArrayList<>(); /** * A map of content encoding to file extension for pre compressed resource (e.g. gzip -> .gz) */ private final Map<String, String> preCompressedResources = new HashMap<>(); public void validate() { if (deploymentName == null) { throw UndertowServletMessages.MESSAGES.paramCannotBeNull("deploymentName"); } if (contextPath == null) { throw UndertowServletMessages.MESSAGES.paramCannotBeNull("contextName"); } if (classLoader == null) { throw UndertowServletMessages.MESSAGES.paramCannotBeNull("classLoader"); } if (resourceManager == null) { throw UndertowServletMessages.MESSAGES.paramCannotBeNull("resourceManager"); } if (classIntrospecter == null) { throw UndertowServletMessages.MESSAGES.paramCannotBeNull("classIntrospecter"); } for (final ServletInfo servlet : this.servlets.values()) { servlet.validate(); } for (final FilterInfo filter : this.filters.values()) { filter.validate(); } for (FilterMappingInfo mapping : this.filterServletNameMappings) { if (!this.filters.containsKey(mapping.getFilterName())) { throw UndertowServletMessages.MESSAGES.filterNotFound(mapping.getFilterName(), mapping.getMappingType() + " - " + mapping.getMapping()); } } for (FilterMappingInfo mapping : this.filterUrlMappings) { if (!this.filters.containsKey(mapping.getFilterName())) { throw UndertowServletMessages.MESSAGES.filterNotFound(mapping.getFilterName(), mapping.getMappingType() + " - " + mapping.getMapping()); } } } public String getDeploymentName() { return deploymentName; } public DeploymentInfo setDeploymentName(final String deploymentName) { this.deploymentName = deploymentName; return this; } public String getDisplayName() { return displayName; } public DeploymentInfo setDisplayName(final String displayName) { this.displayName = displayName; return this; } public String getContextPath() { return contextPath; } public DeploymentInfo setContextPath(final String contextPath) { if(contextPath != null && contextPath.isEmpty()) { this.contextPath = "/"; //we represent the root context as / instead of "", but both work } else { this.contextPath = contextPath; } return this; } public ClassLoader getClassLoader() { return classLoader; } public DeploymentInfo setClassLoader(final ClassLoader classLoader) { this.classLoader = classLoader; return this; } public ResourceManager getResourceManager() { return resourceManager; } public DeploymentInfo setResourceManager(final ResourceManager resourceManager) { this.resourceManager = resourceManager; return this; } public ClassIntrospecter getClassIntrospecter() { return classIntrospecter; } public DeploymentInfo setClassIntrospecter(final ClassIntrospecter classIntrospecter) { this.classIntrospecter = classIntrospecter; return this; } public boolean isAllowNonStandardWrappers() { return allowNonStandardWrappers; } public DeploymentInfo setAllowNonStandardWrappers(final boolean allowNonStandardWrappers) { this.allowNonStandardWrappers = allowNonStandardWrappers; return this; } public int getDefaultSessionTimeout() { return defaultSessionTimeout; } /** * @param defaultSessionTimeout The default session timeout, in seconds */ public DeploymentInfo setDefaultSessionTimeout(final int defaultSessionTimeout) { this.defaultSessionTimeout = defaultSessionTimeout; return this; } public String getDefaultEncoding() { return defaultEncoding; } /** * Sets the default encoding that will be used for servlet responses * * @param defaultEncoding The default encoding */ public DeploymentInfo setDefaultEncoding(String defaultEncoding) { this.defaultEncoding = defaultEncoding; return this; } public String getUrlEncoding() { return urlEncoding; } /** * Sets the URL encoding. This will only take effect if the {@link io.undertow.UndertowOptions#DECODE_URL} * parameter has been set to false. This allows multiple deployments in the same server to use a different URL encoding * * @param urlEncoding The encoding to use */ public DeploymentInfo setUrlEncoding(String urlEncoding) { this.urlEncoding = urlEncoding; return this; } public DeploymentInfo addServlet(final ServletInfo servlet) { servlets.put(servlet.getName(), servlet); return this; } public DeploymentInfo addServlets(final ServletInfo... servlets) { for (final ServletInfo servlet : servlets) { addServlet(servlet); } return this; } public DeploymentInfo addServlets(final Collection<ServletInfo> servlets) { for (final ServletInfo servlet : servlets) { addServlet(servlet); } return this; } public Map<String, ServletInfo> getServlets() { return servlets; } public DeploymentInfo addFilter(final FilterInfo filter) { filters.put(filter.getName(), filter); return this; } public DeploymentInfo addFilters(final FilterInfo... filters) { for (final FilterInfo filter : filters) { addFilter(filter); } return this; } public DeploymentInfo addFilters(final Collection<FilterInfo> filters) { for (final FilterInfo filter : filters) { addFilter(filter); } return this; } public Map<String, FilterInfo> getFilters() { return filters; } public DeploymentInfo addFilterUrlMapping(final String filterName, final String mapping, DispatcherType dispatcher) { filterUrlMappings.add(new FilterMappingInfo(filterName, FilterMappingInfo.MappingType.URL, mapping, dispatcher)); return this; } public DeploymentInfo addFilterServletNameMapping(final String filterName, final String mapping, DispatcherType dispatcher) { filterServletNameMappings.add(new FilterMappingInfo(filterName, FilterMappingInfo.MappingType.SERVLET, mapping, dispatcher)); return this; } public DeploymentInfo insertFilterUrlMapping(final int pos, final String filterName, final String mapping, DispatcherType dispatcher) { filterUrlMappings.add(pos, new FilterMappingInfo(filterName, FilterMappingInfo.MappingType.URL, mapping, dispatcher)); return this; } public DeploymentInfo insertFilterServletNameMapping(final int pos, final String filterName, final String mapping, DispatcherType dispatcher) { filterServletNameMappings.add(pos, new FilterMappingInfo(filterName, FilterMappingInfo.MappingType.SERVLET, mapping, dispatcher)); return this; } public List<FilterMappingInfo> getFilterMappings() { final ArrayList<FilterMappingInfo> ret = new ArrayList<>(filterUrlMappings); ret.addAll(filterServletNameMappings); return ret; } public DeploymentInfo addListener(final ListenerInfo listener) { listeners.add(listener); return this; } public DeploymentInfo addListeners(final ListenerInfo... listeners) { this.listeners.addAll(Arrays.asList(listeners)); return this; } public DeploymentInfo addListeners(final Collection<ListenerInfo> listeners) { this.listeners.addAll(listeners); return this; } public List<ListenerInfo> getListeners() { return listeners; } public int getMajorVersion() { return majorVersion; } public DeploymentInfo setMajorVersion(final int majorVersion) { this.majorVersion = majorVersion; return this; } public int getMinorVersion() { return minorVersion; } public DeploymentInfo setMinorVersion(final int minorVersion) { this.minorVersion = minorVersion; return this; } public DeploymentInfo addServletContainerInitalizer(final ServletContainerInitializerInfo servletContainerInitializer) { servletContainerInitializers.add(servletContainerInitializer); return this; } public DeploymentInfo addServletContainerInitalizers(final ServletContainerInitializerInfo... servletContainerInitializer) { servletContainerInitializers.addAll(Arrays.asList(servletContainerInitializer)); return this; } public DeploymentInfo addServletContainerInitalizers(final List<ServletContainerInitializerInfo> servletContainerInitializer) { servletContainerInitializers.addAll(servletContainerInitializer); return this; } public List<ServletContainerInitializerInfo> getServletContainerInitializers() { return servletContainerInitializers; } @Deprecated public DeploymentInfo addThreadSetupAction(final ThreadSetupAction action) { threadSetupActions.add(new LegacyThreadSetupActionWrapper(action)); return this; } public DeploymentInfo addThreadSetupAction(final ThreadSetupHandler action) { threadSetupActions.add(action); return this; } public List<ThreadSetupHandler> getThreadSetupActions() { return threadSetupActions; } public boolean isEagerFilterInit() { return eagerFilterInit; } public DeploymentInfo setEagerFilterInit(boolean eagerFilterInit) { this.eagerFilterInit = eagerFilterInit; return this; } public DeploymentInfo addInitParameter(final String name, final String value) { initParameters.put(name, value); return this; } public Map<String, String> getInitParameters() { return initParameters; } public DeploymentInfo addServletContextAttribute(final String name, final Object value) { servletContextAttributes.put(name, value); return this; } public Map<String, Object> getServletContextAttributes() { return servletContextAttributes; } public DeploymentInfo addWelcomePage(final String welcomePage) { this.welcomePages.add(welcomePage); return this; } public DeploymentInfo addWelcomePages(final String... welcomePages) { this.welcomePages.addAll(Arrays.asList(welcomePages)); return this; } public DeploymentInfo addWelcomePages(final Collection<String> welcomePages) { this.welcomePages.addAll(welcomePages); return this; } public List<String> getWelcomePages() { return welcomePages; } public DeploymentInfo addErrorPage(final ErrorPage errorPage) { this.errorPages.add(errorPage); return this; } public DeploymentInfo addErrorPages(final ErrorPage... errorPages) { this.errorPages.addAll(Arrays.asList(errorPages)); return this; } public DeploymentInfo addErrorPages(final Collection<ErrorPage> errorPages) { this.errorPages.addAll(errorPages); return this; } public List<ErrorPage> getErrorPages() { return errorPages; } public DeploymentInfo addMimeMapping(final MimeMapping mimeMappings) { this.mimeMappings.add(mimeMappings); return this; } public DeploymentInfo addMimeMappings(final MimeMapping... mimeMappings) { this.mimeMappings.addAll(Arrays.asList(mimeMappings)); return this; } public DeploymentInfo addMimeMappings(final Collection<MimeMapping> mimeMappings) { this.mimeMappings.addAll(mimeMappings); return this; } public List<MimeMapping> getMimeMappings() { return mimeMappings; } public DeploymentInfo addSecurityConstraint(final SecurityConstraint securityConstraint) { this.securityConstraints.add(securityConstraint); return this; } public DeploymentInfo addSecurityConstraints(final SecurityConstraint... securityConstraints) { this.securityConstraints.addAll(Arrays.asList(securityConstraints)); return this; } public DeploymentInfo addSecurityConstraints(final Collection<SecurityConstraint> securityConstraints) { this.securityConstraints.addAll(securityConstraints); return this; } public List<SecurityConstraint> getSecurityConstraints() { return securityConstraints; } public Executor getExecutor() { return executor; } /** * Sets the executor that will be used to run servlet invocations. If this is null then the XNIO worker pool will be * used. * <p> * Individual servlets may use a different executor * <p> * If this is null then the current executor is used, which is generally the XNIO worker pool * * @param executor The executor * @see ServletInfo#executor */ public DeploymentInfo setExecutor(final Executor executor) { this.executor = executor; return this; } public Executor getAsyncExecutor() { return asyncExecutor; } /** * Sets the executor that is used to run async tasks. * <p> * If this is null then {@link #executor} is used, if this is also null then the default is used * * @param asyncExecutor The executor */ public DeploymentInfo setAsyncExecutor(final Executor asyncExecutor) { this.asyncExecutor = asyncExecutor; return this; } public File getTempDir() { if(tempDir == null) { return null; } return tempDir.toFile(); } public Path getTempPath() { return tempDir; } public DeploymentInfo setTempDir(final File tempDir) { this.tempDir = tempDir != null ? tempDir.toPath() : null; return this; } public DeploymentInfo setTempDir(final Path tempDir) { this.tempDir = tempDir; return this; } public boolean isIgnoreFlush() { return ignoreFlush; } public DeploymentInfo setIgnoreFlush(boolean ignoreFlush) { this.ignoreFlush = ignoreFlush; return this; } public JspConfigDescriptor getJspConfigDescriptor() { return jspConfigDescriptor; } public DeploymentInfo setJspConfigDescriptor(JspConfigDescriptor jspConfigDescriptor) { this.jspConfigDescriptor = jspConfigDescriptor; return this; } public DefaultServletConfig getDefaultServletConfig() { return defaultServletConfig; } public DeploymentInfo setDefaultServletConfig(final DefaultServletConfig defaultServletConfig) { this.defaultServletConfig = defaultServletConfig; return this; } public DeploymentInfo addLocaleCharsetMapping(final String locale, final String charset) { localeCharsetMapping.put(locale, charset); return this; } public Map<String, String> getLocaleCharsetMapping() { return localeCharsetMapping; } public SessionManagerFactory getSessionManagerFactory() { return sessionManagerFactory; } public DeploymentInfo setSessionManagerFactory(final SessionManagerFactory sessionManagerFactory) { this.sessionManagerFactory = sessionManagerFactory; return this; } public LoginConfig getLoginConfig() { return loginConfig; } public DeploymentInfo setLoginConfig(LoginConfig loginConfig) { this.loginConfig = loginConfig; return this; } public IdentityManager getIdentityManager() { return identityManager; } public DeploymentInfo setIdentityManager(IdentityManager identityManager) { this.identityManager = identityManager; return this; } public ConfidentialPortManager getConfidentialPortManager() { return confidentialPortManager; } public DeploymentInfo setConfidentialPortManager(ConfidentialPortManager confidentialPortManager) { this.confidentialPortManager = confidentialPortManager; return this; } public DeploymentInfo addSecurityRole(final String role) { this.securityRoles.add(role); return this; } public DeploymentInfo addSecurityRoles(final String... roles) { this.securityRoles.addAll(Arrays.asList(roles)); return this; } public DeploymentInfo addSecurityRoles(final Collection<String> roles) { this.securityRoles.addAll(roles); return this; } public Set<String> getSecurityRoles() { return securityRoles; } /** * Adds an outer handler wrapper. This handler will be run after the servlet initial handler, * but before any other handlers. These are only run on REQUEST invocations, they * are not invoked on a FORWARD or INCLUDE. * * @param wrapper The wrapper */ public DeploymentInfo addOuterHandlerChainWrapper(final HandlerWrapper wrapper) { outerHandlerChainWrappers.add(wrapper); return this; } public List<HandlerWrapper> getOuterHandlerChainWrappers() { return outerHandlerChainWrappers; } /** * Adds an inner handler chain wrapper. This handler will be run after the security handler, * but before any other servlet handlers, and will be run for every request * * @param wrapper The wrapper */ public DeploymentInfo addInnerHandlerChainWrapper(final HandlerWrapper wrapper) { innerHandlerChainWrappers.add(wrapper); return this; } public List<HandlerWrapper> getInnerHandlerChainWrappers() { return innerHandlerChainWrappers; } public DeploymentInfo addInitialHandlerChainWrapper(final HandlerWrapper wrapper) { initialHandlerChainWrappers.add(wrapper); return this; } public List<HandlerWrapper> getInitialHandlerChainWrappers() { return initialHandlerChainWrappers; } /** * Sets the initial handler wrapper that will take over responsibility for establishing * a security context that will handle authentication for the request. * * Undertow specific authentication mechanisms will not be installed but Undertow handlers will * still make the decision as to if authentication is required and will subsequently * call {@link io.undertow.security.api.SecurityContext#authenticate()} as required. * * @param wrapper the {@link HandlerWrapper} to handle the initial security context installation. * @return {@code this} to allow chaining. */ public DeploymentInfo setInitialSecurityWrapper(final HandlerWrapper wrapper) { this.initialSecurityWrapper = wrapper; return this; } public HandlerWrapper getInitialSecurityWrapper() { return initialSecurityWrapper; } /** * Adds a security handler. These are invoked before the authentication mechanism, and are always invoked * even if authentication is not required. * @param wrapper * @return */ public DeploymentInfo addSecurityWrapper(final HandlerWrapper wrapper) { securityWrappers.add(wrapper); return this; } public List<HandlerWrapper> getSecurityWrappers() { return securityWrappers; } public DeploymentInfo addNotificationReceiver(final NotificationReceiver notificationReceiver) { this.notificationReceivers.add(notificationReceiver); return this; } public DeploymentInfo addNotificactionReceivers(final NotificationReceiver... notificationReceivers) { this.notificationReceivers.addAll(Arrays.asList(notificationReceivers)); return this; } public DeploymentInfo addNotificationReceivers(final Collection<NotificationReceiver> notificationReceivers) { this.notificationReceivers.addAll(notificationReceivers); return this; } public List<NotificationReceiver> getNotificationReceivers() { return notificationReceivers; } public ConcurrentMap<String, Object> getServletContextAttributeBackingMap() { return servletContextAttributeBackingMap; } /** * Sets the map that will be used by the ServletContext implementation to store attributes. * <p> * This should usuablly be null, in which case Undertow will create a new map. This is only * used in situations where you want multiple deployments to share the same servlet context * attributes. * * @param servletContextAttributeBackingMap * The backing map */ public DeploymentInfo setServletContextAttributeBackingMap(final ConcurrentMap<String, Object> servletContextAttributeBackingMap) { this.servletContextAttributeBackingMap = servletContextAttributeBackingMap; return this; } public ServletSessionConfig getServletSessionConfig() { return servletSessionConfig; } public DeploymentInfo setServletSessionConfig(final ServletSessionConfig servletSessionConfig) { this.servletSessionConfig = servletSessionConfig; return this; } /** * @return the host name */ public String getHostName() { return hostName; } public DeploymentInfo setHostName(final String hostName) { this.hostName = hostName; return this; } public boolean isDenyUncoveredHttpMethods() { return denyUncoveredHttpMethods; } public DeploymentInfo setDenyUncoveredHttpMethods(final boolean denyUncoveredHttpMethods) { this.denyUncoveredHttpMethods = denyUncoveredHttpMethods; return this; } public ServletStackTraces getServletStackTraces() { return servletStackTraces; } public DeploymentInfo setServletStackTraces(ServletStackTraces servletStackTraces) { this.servletStackTraces = servletStackTraces; return this; } public boolean isInvalidateSessionOnLogout() { return invalidateSessionOnLogout; } public DeploymentInfo setInvalidateSessionOnLogout(boolean invalidateSessionOnLogout) { this.invalidateSessionOnLogout = invalidateSessionOnLogout; return this; } public int getDefaultCookieVersion() { return defaultCookieVersion; } public DeploymentInfo setDefaultCookieVersion(int defaultCookieVersion) { this.defaultCookieVersion = defaultCookieVersion; return this; } public SessionPersistenceManager getSessionPersistenceManager() { return sessionPersistenceManager; } public DeploymentInfo setSessionPersistenceManager(SessionPersistenceManager sessionPersistenceManager) { this.sessionPersistenceManager = sessionPersistenceManager; return this; } public AuthorizationManager getAuthorizationManager() { return authorizationManager; } public DeploymentInfo setAuthorizationManager(AuthorizationManager authorizationManager) { this.authorizationManager = authorizationManager; return this; } public DeploymentInfo addPrincipalVsRoleMapping(final String principal, final String mapping) { Set<String> set = principalVersusRolesMap.get(principal); if (set == null) { principalVersusRolesMap.put(principal, set = new HashSet<>()); } set.add(mapping); return this; } public DeploymentInfo addPrincipalVsRoleMappings(final String principal, final String... mappings) { Set<String> set = principalVersusRolesMap.get(principal); if (set == null) { principalVersusRolesMap.put(principal, set = new HashSet<>()); } set.addAll(Arrays.asList(mappings)); return this; } public DeploymentInfo addPrincipalVsRoleMappings(final String principal, final Collection<String> mappings) { Set<String> set = principalVersusRolesMap.get(principal); if (set == null) { principalVersusRolesMap.put(principal, set = new HashSet<>()); } set.addAll(mappings); return this; } public Map<String, Set<String>> getPrincipalVersusRolesMap() { return principalVersusRolesMap; } /** * Removes all configured authentication mechanisms from the deployment. * * @return this deployment info */ public DeploymentInfo clearLoginMethods() { if(loginConfig != null) { loginConfig.getAuthMethods().clear(); } return this; } /** * Adds an authentication mechanism directly to the deployment. This mechanism will be first in the list. * * In general you should just use {@link #addAuthenticationMechanism(String, io.undertow.security.api.AuthenticationMechanismFactory)} * and allow the user to configure the methods they want by name. * * This method is essentially a convenience method, if is the same as registering a factory under the provided name that returns * and authentication mechanism, and then adding it to the login config list. * * If you want your mechanism to be the only one in the deployment you should first invoke {@link #clearLoginMethods()}. * * @param name The authentication mechanism name * @param mechanism The mechanism * @return this deployment info */ public DeploymentInfo addFirstAuthenticationMechanism(final String name, final AuthenticationMechanism mechanism) { authenticationMechanisms.put(name, new ImmediateAuthenticationMechanismFactory(mechanism)); if(loginConfig == null) { loginConfig = new LoginConfig(null); } loginConfig.addFirstAuthMethod(new AuthMethodConfig(name)); return this; } /** * Adds an authentication mechanism directly to the deployment. This mechanism will be last in the list. * * In general you should just use {@link #addAuthenticationMechanism(String, io.undertow.security.api.AuthenticationMechanismFactory)} * and allow the user to configure the methods they want by name. * * This method is essentially a convenience method, if is the same as registering a factory under the provided name that returns * and authentication mechanism, and then adding it to the login config list. * * If you want your mechanism to be the only one in the deployment you should first invoke {@link #clearLoginMethods()}. * * @param name The authentication mechanism name * @param mechanism The mechanism * @return */ public DeploymentInfo addLastAuthenticationMechanism(final String name, final AuthenticationMechanism mechanism) { authenticationMechanisms.put(name, new ImmediateAuthenticationMechanismFactory(mechanism)); if(loginConfig == null) { loginConfig = new LoginConfig(null); } loginConfig.addLastAuthMethod(new AuthMethodConfig(name)); return this; } /** * Adds an authentication mechanism. The name is case insenstive, and will be converted to uppercase internally. * * @param name The name * @param factory The factory * @return */ public DeploymentInfo addAuthenticationMechanism(final String name, final AuthenticationMechanismFactory factory) { authenticationMechanisms.put(name.toUpperCase(Locale.US), factory); return this; } public Map<String, AuthenticationMechanismFactory> getAuthenticationMechanisms() { return authenticationMechanisms; } /** * Returns true if the specified mechanism is present in the login config * @param mechanismName The mechanism name * @return true if the mechanism is enabled */ public boolean isAuthenticationMechanismPresent(final String mechanismName) { if(loginConfig != null) { for(AuthMethodConfig method : loginConfig.getAuthMethods()) { if(method.getName().equalsIgnoreCase(mechanismName)) { return true; } } } return false; } /** * Adds an additional servlet extension to the deployment. Servlet extensions are generally discovered * using META-INF/services entries, however this may not be practical in all environments. * @param servletExtension The servlet extension * @return this */ public DeploymentInfo addServletExtension(final ServletExtension servletExtension) { this.servletExtensions.add(servletExtension); return this; } public List<ServletExtension> getServletExtensions() { return servletExtensions; } public AuthenticationMechanism getJaspiAuthenticationMechanism() { return jaspiAuthenticationMechanism; } public DeploymentInfo setJaspiAuthenticationMechanism(AuthenticationMechanism jaspiAuthenticationMechanism) { this.jaspiAuthenticationMechanism = jaspiAuthenticationMechanism; return this; } public SecurityContextFactory getSecurityContextFactory() { return this.securityContextFactory; } public DeploymentInfo setSecurityContextFactory(final SecurityContextFactory securityContextFactory) { this.securityContextFactory = securityContextFactory; return this; } public String getServerName() { return serverName; } public DeploymentInfo setServerName(String serverName) { this.serverName = serverName; return this; } public DeploymentInfo setMetricsCollector(MetricsCollector metricsCollector){ this.metricsCollector = metricsCollector; return this; } public MetricsCollector getMetricsCollector() { return metricsCollector; } public SessionConfigWrapper getSessionConfigWrapper() { return sessionConfigWrapper; } public DeploymentInfo setSessionConfigWrapper(SessionConfigWrapper sessionConfigWrapper) { this.sessionConfigWrapper = sessionConfigWrapper; return this; } public boolean isDisableCachingForSecuredPages() { return disableCachingForSecuredPages; } public DeploymentInfo setDisableCachingForSecuredPages(boolean disableCachingForSecuredPages) { this.disableCachingForSecuredPages = disableCachingForSecuredPages; return this; } public DeploymentInfo addLifecycleInterceptor(final LifecycleInterceptor interceptor) { lifecycleInterceptors.add(interceptor); return this; } public List<LifecycleInterceptor> getLifecycleInterceptors() { return lifecycleInterceptors; } /** * Returns the exception handler that is used by this deployment. By default this will simply * log unhandled exceptions */ public ExceptionHandler getExceptionHandler() { return exceptionHandler; } /** * Sets the default exception handler for this deployment * @param exceptionHandler The exception handler * @return */ public DeploymentInfo setExceptionHandler(ExceptionHandler exceptionHandler) { this.exceptionHandler = exceptionHandler; return this; } public boolean isEscapeErrorMessage() { return escapeErrorMessage; } /** * Set if if the message passed to {@link javax.servlet.http.HttpServletResponse#sendError(int, String)} should be escaped. * * If this is false applications must be careful not to use user provided data (such as the URI) in the message * * @param escapeErrorMessage If the error message should be escaped */ public DeploymentInfo setEscapeErrorMessage(boolean escapeErrorMessage) { this.escapeErrorMessage = escapeErrorMessage; return this; } public DeploymentInfo addSessionListener(SessionListener sessionListener) { this.sessionListeners.add(sessionListener); return this; } public List<SessionListener> getSessionListeners() { return sessionListeners; } public AuthenticationMode getAuthenticationMode() { return authenticationMode; } /** * Sets if this deployment should use pro-active authentication and always authenticate if the credentials are present * or constraint driven auth which will only call the authentication mechanisms for protected resources. * * Pro active auth means that requests for unprotected resources will still be associated with a user, which may be * useful for access logging. * * * @param authenticationMode The authentication mode to use * @return */ public DeploymentInfo setAuthenticationMode(AuthenticationMode authenticationMode) { this.authenticationMode = authenticationMode; return this; } public MultipartConfigElement getDefaultMultipartConfig() { return defaultMultipartConfig; } public DeploymentInfo setDefaultMultipartConfig(MultipartConfigElement defaultMultipartConfig) { this.defaultMultipartConfig = defaultMultipartConfig; return this; } public int getContentTypeCacheSize() { return contentTypeCacheSize; } public DeploymentInfo setContentTypeCacheSize(int contentTypeCacheSize) { this.contentTypeCacheSize = contentTypeCacheSize; return this; } public SessionIdGenerator getSessionIdGenerator() { return sessionIdGenerator; } public DeploymentInfo setSessionIdGenerator(SessionIdGenerator sessionIdGenerator) { this.sessionIdGenerator = sessionIdGenerator; return this; } public boolean isSendCustomReasonPhraseOnError() { return sendCustomReasonPhraseOnError; } public CrawlerSessionManagerConfig getCrawlerSessionManagerConfig() { return crawlerSessionManagerConfig; } public DeploymentInfo setCrawlerSessionManagerConfig(CrawlerSessionManagerConfig crawlerSessionManagerConfig) { this.crawlerSessionManagerConfig = crawlerSessionManagerConfig; return this; } /** * If this is true then the message parameter of {@link javax.servlet.http.HttpServletResponse#sendError(int, String)} and * {@link javax.servlet.http.HttpServletResponse#setStatus(int, String)} will be used as the HTTP reason phrase in * the response. * * @param sendCustomReasonPhraseOnError If the parameter to sendError should be used as a HTTP reason phrase * @return this */ public DeploymentInfo setSendCustomReasonPhraseOnError(boolean sendCustomReasonPhraseOnError) { this.sendCustomReasonPhraseOnError = sendCustomReasonPhraseOnError; return this; } public boolean isChangeSessionIdOnLogin() { return changeSessionIdOnLogin; } public DeploymentInfo setChangeSessionIdOnLogin(boolean changeSessionIdOnLogin) { this.changeSessionIdOnLogin = changeSessionIdOnLogin; return this; } public boolean isUseCachedAuthenticationMechanism() { return useCachedAuthenticationMechanism; } /** * If this is set to false the the cached authenticated session mechanism won't be installed. If you want FORM and * other auth methods that require caching to work then you need to install another caching based auth method (such * as SSO). * @param useCachedAuthenticationMechanism If Undertow should use its internal authentication cache mechanism * @return this */ public DeploymentInfo setUseCachedAuthenticationMechanism(boolean useCachedAuthenticationMechanism) { this.useCachedAuthenticationMechanism = useCachedAuthenticationMechanism; return this; } public boolean isSecurityDisabled() { return securityDisabled; } public DeploymentInfo setSecurityDisabled(boolean securityDisabled) { this.securityDisabled = securityDisabled; return this; } public boolean isCheckOtherSessionManagers() { return checkOtherSessionManagers; } /** * If this is true then when an existing invalid session id is found all other deployments in the container will have their * session managers checked to see if it represents a valid session. If it does then the session id will be re-used. */ public DeploymentInfo setCheckOtherSessionManagers(boolean checkOtherSessionManagers) { this.checkOtherSessionManagers = checkOtherSessionManagers; return this; } public String getDefaultRequestEncoding() { return defaultRequestEncoding; } public DeploymentInfo setDefaultRequestEncoding(String defaultRequestEncoding) { this.defaultRequestEncoding = defaultRequestEncoding; return this; } public String getDefaultResponseEncoding() { return defaultResponseEncoding; } public DeploymentInfo setDefaultResponseEncoding(String defaultResponseEncoding) { this.defaultResponseEncoding = defaultResponseEncoding; return this; } /** * Adds a pre compressed resource encoding and maps it to a file extension * * * @param encoding The content encoding * @param extension The file extension * @return this builder */ public DeploymentInfo addPreCompressedResourceEncoding(String encoding, String extension) { preCompressedResources.put(encoding, extension); return this; } public Map<String, String> getPreCompressedResources() { return preCompressedResources; } public int getContainerMajorVersion() { return containerMajorVersion; } public DeploymentInfo setContainerMajorVersion(int containerMajorVersion) { this.containerMajorVersion = containerMajorVersion; return this; } public int getContainerMinorVersion() { return containerMinorVersion; } public DeploymentInfo setContainerMinorVersion(int containerMinorVersion) { this.containerMinorVersion = containerMinorVersion; return this; } /** * Add's a listener that is only invoked once all other deployment steps have been completed * * The listeners <code>contextDestroyed</code> method will be called after all undeployment steps are undertaken * * @param servletContextListener * @return */ public DeploymentInfo addDeploymentCompleteListener(ServletContextListener servletContextListener) { deploymentCompleteListeners.add(servletContextListener); return this; } public List<ServletContextListener> getDeploymentCompleteListeners() { return deploymentCompleteListeners; } @Override public DeploymentInfo clone() { final DeploymentInfo info = new DeploymentInfo() .setClassLoader(classLoader) .setContextPath(contextPath) .setResourceManager(resourceManager) .setMajorVersion(majorVersion) .setMinorVersion(minorVersion) .setDeploymentName(deploymentName) .setClassIntrospecter(classIntrospecter); for (Map.Entry<String, ServletInfo> e : servlets.entrySet()) { info.addServlet(e.getValue().clone()); } for (Map.Entry<String, FilterInfo> e : filters.entrySet()) { info.addFilter(e.getValue().clone()); } info.displayName = displayName; info.filterUrlMappings.addAll(filterUrlMappings); info.filterServletNameMappings.addAll(filterServletNameMappings); info.listeners.addAll(listeners); info.servletContainerInitializers.addAll(servletContainerInitializers); info.threadSetupActions.addAll(threadSetupActions); info.initParameters.putAll(initParameters); info.servletContextAttributes.putAll(servletContextAttributes); info.welcomePages.addAll(welcomePages); info.errorPages.addAll(errorPages); info.mimeMappings.addAll(mimeMappings); info.executor = executor; info.asyncExecutor = asyncExecutor; info.tempDir = tempDir; info.jspConfigDescriptor = jspConfigDescriptor; info.defaultServletConfig = defaultServletConfig; info.localeCharsetMapping.putAll(localeCharsetMapping); info.sessionManagerFactory = sessionManagerFactory; if (loginConfig != null) { info.loginConfig = loginConfig.clone(); } info.identityManager = identityManager; info.confidentialPortManager = confidentialPortManager; info.defaultEncoding = defaultEncoding; info.urlEncoding = urlEncoding; info.securityConstraints.addAll(securityConstraints); info.outerHandlerChainWrappers.addAll(outerHandlerChainWrappers); info.innerHandlerChainWrappers.addAll(innerHandlerChainWrappers); info.initialSecurityWrapper = initialSecurityWrapper; info.securityWrappers.addAll(securityWrappers); info.initialHandlerChainWrappers.addAll(initialHandlerChainWrappers); info.securityRoles.addAll(securityRoles); info.notificationReceivers.addAll(notificationReceivers); info.allowNonStandardWrappers = allowNonStandardWrappers; info.defaultSessionTimeout = defaultSessionTimeout; info.servletContextAttributeBackingMap = servletContextAttributeBackingMap; info.servletSessionConfig = servletSessionConfig; info.hostName = hostName; info.denyUncoveredHttpMethods = denyUncoveredHttpMethods; info.servletStackTraces = servletStackTraces; info.invalidateSessionOnLogout = invalidateSessionOnLogout; info.defaultCookieVersion = defaultCookieVersion; info.sessionPersistenceManager = sessionPersistenceManager; info.principalVersusRolesMap.putAll(principalVersusRolesMap); info.ignoreFlush = ignoreFlush; info.authorizationManager = authorizationManager; info.authenticationMechanisms.putAll(authenticationMechanisms); info.servletExtensions.addAll(servletExtensions); info.jaspiAuthenticationMechanism = jaspiAuthenticationMechanism; info.securityContextFactory = securityContextFactory; info.serverName = serverName; info.metricsCollector = metricsCollector; info.sessionConfigWrapper = sessionConfigWrapper; info.eagerFilterInit = eagerFilterInit; info.disableCachingForSecuredPages = disableCachingForSecuredPages; info.exceptionHandler = exceptionHandler; info.escapeErrorMessage = escapeErrorMessage; info.sessionListeners.addAll(sessionListeners); info.lifecycleInterceptors.addAll(lifecycleInterceptors); info.authenticationMode = authenticationMode; info.defaultMultipartConfig = defaultMultipartConfig; info.contentTypeCacheSize = contentTypeCacheSize; info.sessionIdGenerator = sessionIdGenerator; info.sendCustomReasonPhraseOnError = sendCustomReasonPhraseOnError; info.changeSessionIdOnLogin = changeSessionIdOnLogin; info.crawlerSessionManagerConfig = crawlerSessionManagerConfig; info.securityDisabled = securityDisabled; info.useCachedAuthenticationMechanism = useCachedAuthenticationMechanism; info.checkOtherSessionManagers = checkOtherSessionManagers; info.defaultRequestEncoding = defaultRequestEncoding; info.defaultResponseEncoding = defaultResponseEncoding; info.preCompressedResources.putAll(preCompressedResources); info.containerMajorVersion = containerMajorVersion; info.containerMinorVersion = containerMinorVersion; info.deploymentCompleteListeners.addAll(deploymentCompleteListeners); return info; } }
servlet/src/main/java/io/undertow/servlet/api/DeploymentInfo.java
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.servlet.api; import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executor; import javax.servlet.DispatcherType; import javax.servlet.MultipartConfigElement; import javax.servlet.ServletContextListener; import javax.servlet.descriptor.JspConfigDescriptor; import io.undertow.security.api.AuthenticationMechanism; import io.undertow.security.api.AuthenticationMechanismFactory; import io.undertow.security.api.AuthenticationMode; import io.undertow.security.api.NotificationReceiver; import io.undertow.security.api.SecurityContextFactory; import io.undertow.security.idm.IdentityManager; import io.undertow.server.HandlerWrapper; import io.undertow.server.handlers.resource.ResourceManager; import io.undertow.server.session.SecureRandomSessionIdGenerator; import io.undertow.server.session.SessionIdGenerator; import io.undertow.server.session.SessionListener; import io.undertow.servlet.ServletExtension; import io.undertow.servlet.UndertowServletMessages; import io.undertow.servlet.core.DefaultAuthorizationManager; import io.undertow.servlet.core.InMemorySessionManagerFactory; import io.undertow.servlet.util.DefaultClassIntrospector; import io.undertow.util.ImmediateAuthenticationMechanismFactory; /** * Represents a servlet deployment. * * @author Stuart Douglas */ public class DeploymentInfo implements Cloneable { private String deploymentName; private String displayName; private String contextPath; private ClassLoader classLoader; private ResourceManager resourceManager = ResourceManager.EMPTY_RESOURCE_MANAGER; private ClassIntrospecter classIntrospecter = DefaultClassIntrospector.INSTANCE; private int majorVersion = 4; private int minorVersion = 0; private int containerMajorVersion = 4; private int containerMinorVersion = 0; private Executor executor; private Executor asyncExecutor; private Path tempDir; private JspConfigDescriptor jspConfigDescriptor; private DefaultServletConfig defaultServletConfig; private SessionManagerFactory sessionManagerFactory = new InMemorySessionManagerFactory(); private LoginConfig loginConfig; private IdentityManager identityManager; private ConfidentialPortManager confidentialPortManager; private boolean allowNonStandardWrappers = false; private int defaultSessionTimeout = 60 * 30; private ConcurrentMap<String, Object> servletContextAttributeBackingMap; private ServletSessionConfig servletSessionConfig; private String hostName = "localhost"; private boolean denyUncoveredHttpMethods = false; private ServletStackTraces servletStackTraces = ServletStackTraces.LOCAL_ONLY; private boolean invalidateSessionOnLogout = false; private int defaultCookieVersion = 0; private SessionPersistenceManager sessionPersistenceManager; private String defaultEncoding; private String defaultRequestEncoding; private String defaultResponseEncoding; private String urlEncoding = null; private boolean ignoreFlush = false; private AuthorizationManager authorizationManager = DefaultAuthorizationManager.INSTANCE; private AuthenticationMechanism jaspiAuthenticationMechanism; private SecurityContextFactory securityContextFactory; private String serverName = "Undertow"; private MetricsCollector metricsCollector = null; private SessionConfigWrapper sessionConfigWrapper = null; private boolean eagerFilterInit = false; private boolean disableCachingForSecuredPages = true; private boolean escapeErrorMessage = true; private boolean sendCustomReasonPhraseOnError = false; private boolean useCachedAuthenticationMechanism = true; private AuthenticationMode authenticationMode = AuthenticationMode.PRO_ACTIVE; private ExceptionHandler exceptionHandler; private final Map<String, ServletInfo> servlets = new HashMap<>(); private final Map<String, FilterInfo> filters = new HashMap<>(); private final List<FilterMappingInfo> filterServletNameMappings = new ArrayList<>(); private final List<FilterMappingInfo> filterUrlMappings = new ArrayList<>(); private final List<ListenerInfo> listeners = new ArrayList<>(); private final List<ServletContainerInitializerInfo> servletContainerInitializers = new ArrayList<>(); private final List<ThreadSetupHandler> threadSetupActions = new ArrayList<>(); private final Map<String, String> initParameters = new HashMap<>(); private final Map<String, Object> servletContextAttributes = new HashMap<>(); private final Map<String, String> localeCharsetMapping = new HashMap<>(); private final List<String> welcomePages = new ArrayList<>(); private final List<ErrorPage> errorPages = new ArrayList<>(); private final List<MimeMapping> mimeMappings = new ArrayList<>(); private final List<SecurityConstraint> securityConstraints = new ArrayList<>(); private final Set<String> securityRoles = new HashSet<>(); private final List<NotificationReceiver> notificationReceivers = new ArrayList<>(); private final Map<String, AuthenticationMechanismFactory> authenticationMechanisms = new HashMap<>(); private final List<LifecycleInterceptor> lifecycleInterceptors = new ArrayList<>(); private final List<SessionListener> sessionListeners = new ArrayList<>(); /** * additional servlet extensions */ private final List<ServletExtension> servletExtensions = new ArrayList<>(); /** * map of additional roles that should be applied to the given principal. */ private final Map<String, Set<String>> principalVersusRolesMap = new HashMap<>(); /** * Wrappers that are applied before the servlet initial handler, and before any servlet related object have been * created. If a wrapper wants to bypass servlet entirely it should register itself here. */ private final List<HandlerWrapper> initialHandlerChainWrappers = new ArrayList<>(); /** * Handler chain wrappers that are applied outside all other handlers, including security but after the initial * servlet handler. */ private final List<HandlerWrapper> outerHandlerChainWrappers = new ArrayList<>(); /** * Handler chain wrappers that are applied just before the servlet request is dispatched. At this point the security * handlers have run, and any security information is attached to the request. */ private final List<HandlerWrapper> innerHandlerChainWrappers = new ArrayList<>(); /** * A handler chain wrapper to wrap the initial stages of the security handlers, if this is set it is assumed it * is taking over the responsibility of setting the {@link io.undertow.security.api.SecurityContext} that can handle authentication and the * remaining Undertow handlers specific to authentication will be skipped. */ private HandlerWrapper initialSecurityWrapper = null; /** * Handler chain wrappers that are applied just before the authentication mechanism is called. Theses handlers are * always called, even if authentication is not required */ private final List<HandlerWrapper> securityWrappers = new ArrayList<>(); /** * Multipart config that will be applied to all servlets that do not have an explicit config */ private MultipartConfigElement defaultMultipartConfig; /** * Cache of common content types, to prevent allocations when parsing the charset */ private int contentTypeCacheSize = 100; private boolean changeSessionIdOnLogin = true; private SessionIdGenerator sessionIdGenerator = new SecureRandomSessionIdGenerator(); /** * Config for the {@link io.undertow.servlet.handlers.CrawlerSessionManagerHandler} */ private CrawlerSessionManagerConfig crawlerSessionManagerConfig; private boolean securityDisabled; private boolean checkOtherSessionManagers = true; private final List<ServletContextListener> deploymentCompleteListeners = new ArrayList<>(); /** * A map of content encoding to file extension for pre compressed resource (e.g. gzip -> .gz) */ private final Map<String, String> preCompressedResources = new HashMap<>(); public void validate() { if (deploymentName == null) { throw UndertowServletMessages.MESSAGES.paramCannotBeNull("deploymentName"); } if (contextPath == null) { throw UndertowServletMessages.MESSAGES.paramCannotBeNull("contextName"); } if (classLoader == null) { throw UndertowServletMessages.MESSAGES.paramCannotBeNull("classLoader"); } if (resourceManager == null) { throw UndertowServletMessages.MESSAGES.paramCannotBeNull("resourceManager"); } if (classIntrospecter == null) { throw UndertowServletMessages.MESSAGES.paramCannotBeNull("classIntrospecter"); } for (final ServletInfo servlet : this.servlets.values()) { servlet.validate(); } for (final FilterInfo filter : this.filters.values()) { filter.validate(); } for (FilterMappingInfo mapping : this.filterServletNameMappings) { if (!this.filters.containsKey(mapping.getFilterName())) { throw UndertowServletMessages.MESSAGES.filterNotFound(mapping.getFilterName(), mapping.getMappingType() + " - " + mapping.getMapping()); } } for (FilterMappingInfo mapping : this.filterUrlMappings) { if (!this.filters.containsKey(mapping.getFilterName())) { throw UndertowServletMessages.MESSAGES.filterNotFound(mapping.getFilterName(), mapping.getMappingType() + " - " + mapping.getMapping()); } } } public String getDeploymentName() { return deploymentName; } public DeploymentInfo setDeploymentName(final String deploymentName) { this.deploymentName = deploymentName; return this; } public String getDisplayName() { return displayName; } public DeploymentInfo setDisplayName(final String displayName) { this.displayName = displayName; return this; } public String getContextPath() { return contextPath; } public DeploymentInfo setContextPath(final String contextPath) { if(contextPath != null && contextPath.isEmpty()) { this.contextPath = "/"; //we represent the root context as / instead of "", but both work } else { this.contextPath = contextPath; } return this; } public ClassLoader getClassLoader() { return classLoader; } public DeploymentInfo setClassLoader(final ClassLoader classLoader) { this.classLoader = classLoader; return this; } public ResourceManager getResourceManager() { return resourceManager; } public DeploymentInfo setResourceManager(final ResourceManager resourceManager) { this.resourceManager = resourceManager; return this; } public ClassIntrospecter getClassIntrospecter() { return classIntrospecter; } public DeploymentInfo setClassIntrospecter(final ClassIntrospecter classIntrospecter) { this.classIntrospecter = classIntrospecter; return this; } public boolean isAllowNonStandardWrappers() { return allowNonStandardWrappers; } public DeploymentInfo setAllowNonStandardWrappers(final boolean allowNonStandardWrappers) { this.allowNonStandardWrappers = allowNonStandardWrappers; return this; } public int getDefaultSessionTimeout() { return defaultSessionTimeout; } /** * @param defaultSessionTimeout The default session timeout, in seconds */ public DeploymentInfo setDefaultSessionTimeout(final int defaultSessionTimeout) { this.defaultSessionTimeout = defaultSessionTimeout; return this; } public String getDefaultEncoding() { return defaultEncoding; } /** * Sets the default encoding that will be used for servlet responses * * @param defaultEncoding The default encoding */ public DeploymentInfo setDefaultEncoding(String defaultEncoding) { this.defaultEncoding = defaultEncoding; return this; } public String getUrlEncoding() { return urlEncoding; } /** * Sets the URL encoding. This will only take effect if the {@link io.undertow.UndertowOptions#DECODE_URL} * parameter has been set to false. This allows multiple deployments in the same server to use a different URL encoding * * @param urlEncoding The encoding to use */ public DeploymentInfo setUrlEncoding(String urlEncoding) { this.urlEncoding = urlEncoding; return this; } public DeploymentInfo addServlet(final ServletInfo servlet) { servlets.put(servlet.getName(), servlet); return this; } public DeploymentInfo addServlets(final ServletInfo... servlets) { for (final ServletInfo servlet : servlets) { addServlet(servlet); } return this; } public DeploymentInfo addServlets(final Collection<ServletInfo> servlets) { for (final ServletInfo servlet : servlets) { addServlet(servlet); } return this; } public Map<String, ServletInfo> getServlets() { return Collections.unmodifiableMap(servlets); } public DeploymentInfo addFilter(final FilterInfo filter) { filters.put(filter.getName(), filter); return this; } public DeploymentInfo addFilters(final FilterInfo... filters) { for (final FilterInfo filter : filters) { addFilter(filter); } return this; } public DeploymentInfo addFilters(final Collection<FilterInfo> filters) { for (final FilterInfo filter : filters) { addFilter(filter); } return this; } public Map<String, FilterInfo> getFilters() { return Collections.unmodifiableMap(filters); } public DeploymentInfo addFilterUrlMapping(final String filterName, final String mapping, DispatcherType dispatcher) { filterUrlMappings.add(new FilterMappingInfo(filterName, FilterMappingInfo.MappingType.URL, mapping, dispatcher)); return this; } public DeploymentInfo addFilterServletNameMapping(final String filterName, final String mapping, DispatcherType dispatcher) { filterServletNameMappings.add(new FilterMappingInfo(filterName, FilterMappingInfo.MappingType.SERVLET, mapping, dispatcher)); return this; } public DeploymentInfo insertFilterUrlMapping(final int pos, final String filterName, final String mapping, DispatcherType dispatcher) { filterUrlMappings.add(pos, new FilterMappingInfo(filterName, FilterMappingInfo.MappingType.URL, mapping, dispatcher)); return this; } public DeploymentInfo insertFilterServletNameMapping(final int pos, final String filterName, final String mapping, DispatcherType dispatcher) { filterServletNameMappings.add(pos, new FilterMappingInfo(filterName, FilterMappingInfo.MappingType.SERVLET, mapping, dispatcher)); return this; } public List<FilterMappingInfo> getFilterMappings() { final ArrayList<FilterMappingInfo> ret = new ArrayList<>(filterUrlMappings); ret.addAll(filterServletNameMappings); return Collections.unmodifiableList(ret); } public DeploymentInfo addListener(final ListenerInfo listener) { listeners.add(listener); return this; } public DeploymentInfo addListeners(final ListenerInfo... listeners) { this.listeners.addAll(Arrays.asList(listeners)); return this; } public DeploymentInfo addListeners(final Collection<ListenerInfo> listeners) { this.listeners.addAll(listeners); return this; } public List<ListenerInfo> getListeners() { return listeners; } public int getMajorVersion() { return majorVersion; } public DeploymentInfo setMajorVersion(final int majorVersion) { this.majorVersion = majorVersion; return this; } public int getMinorVersion() { return minorVersion; } public DeploymentInfo setMinorVersion(final int minorVersion) { this.minorVersion = minorVersion; return this; } public DeploymentInfo addServletContainerInitalizer(final ServletContainerInitializerInfo servletContainerInitializer) { servletContainerInitializers.add(servletContainerInitializer); return this; } public DeploymentInfo addServletContainerInitalizers(final ServletContainerInitializerInfo... servletContainerInitializer) { servletContainerInitializers.addAll(Arrays.asList(servletContainerInitializer)); return this; } public DeploymentInfo addServletContainerInitalizers(final List<ServletContainerInitializerInfo> servletContainerInitializer) { servletContainerInitializers.addAll(servletContainerInitializer); return this; } public List<ServletContainerInitializerInfo> getServletContainerInitializers() { return servletContainerInitializers; } @Deprecated public DeploymentInfo addThreadSetupAction(final ThreadSetupAction action) { threadSetupActions.add(new LegacyThreadSetupActionWrapper(action)); return this; } public DeploymentInfo addThreadSetupAction(final ThreadSetupHandler action) { threadSetupActions.add(action); return this; } public List<ThreadSetupHandler> getThreadSetupActions() { return threadSetupActions; } public boolean isEagerFilterInit() { return eagerFilterInit; } public DeploymentInfo setEagerFilterInit(boolean eagerFilterInit) { this.eagerFilterInit = eagerFilterInit; return this; } public DeploymentInfo addInitParameter(final String name, final String value) { initParameters.put(name, value); return this; } public Map<String, String> getInitParameters() { return Collections.unmodifiableMap(initParameters); } public DeploymentInfo addServletContextAttribute(final String name, final Object value) { servletContextAttributes.put(name, value); return this; } public Map<String, Object> getServletContextAttributes() { return Collections.unmodifiableMap(servletContextAttributes); } public DeploymentInfo addWelcomePage(final String welcomePage) { this.welcomePages.add(welcomePage); return this; } public DeploymentInfo addWelcomePages(final String... welcomePages) { this.welcomePages.addAll(Arrays.asList(welcomePages)); return this; } public DeploymentInfo addWelcomePages(final Collection<String> welcomePages) { this.welcomePages.addAll(welcomePages); return this; } public List<String> getWelcomePages() { return Collections.unmodifiableList(welcomePages); } public DeploymentInfo addErrorPage(final ErrorPage errorPage) { this.errorPages.add(errorPage); return this; } public DeploymentInfo addErrorPages(final ErrorPage... errorPages) { this.errorPages.addAll(Arrays.asList(errorPages)); return this; } public DeploymentInfo addErrorPages(final Collection<ErrorPage> errorPages) { this.errorPages.addAll(errorPages); return this; } public List<ErrorPage> getErrorPages() { return Collections.unmodifiableList(errorPages); } public DeploymentInfo addMimeMapping(final MimeMapping mimeMappings) { this.mimeMappings.add(mimeMappings); return this; } public DeploymentInfo addMimeMappings(final MimeMapping... mimeMappings) { this.mimeMappings.addAll(Arrays.asList(mimeMappings)); return this; } public DeploymentInfo addMimeMappings(final Collection<MimeMapping> mimeMappings) { this.mimeMappings.addAll(mimeMappings); return this; } public List<MimeMapping> getMimeMappings() { return Collections.unmodifiableList(mimeMappings); } public DeploymentInfo addSecurityConstraint(final SecurityConstraint securityConstraint) { this.securityConstraints.add(securityConstraint); return this; } public DeploymentInfo addSecurityConstraints(final SecurityConstraint... securityConstraints) { this.securityConstraints.addAll(Arrays.asList(securityConstraints)); return this; } public DeploymentInfo addSecurityConstraints(final Collection<SecurityConstraint> securityConstraints) { this.securityConstraints.addAll(securityConstraints); return this; } public List<SecurityConstraint> getSecurityConstraints() { return Collections.unmodifiableList(securityConstraints); } public Executor getExecutor() { return executor; } /** * Sets the executor that will be used to run servlet invocations. If this is null then the XNIO worker pool will be * used. * <p> * Individual servlets may use a different executor * <p> * If this is null then the current executor is used, which is generally the XNIO worker pool * * @param executor The executor * @see ServletInfo#executor */ public DeploymentInfo setExecutor(final Executor executor) { this.executor = executor; return this; } public Executor getAsyncExecutor() { return asyncExecutor; } /** * Sets the executor that is used to run async tasks. * <p> * If this is null then {@link #executor} is used, if this is also null then the default is used * * @param asyncExecutor The executor */ public DeploymentInfo setAsyncExecutor(final Executor asyncExecutor) { this.asyncExecutor = asyncExecutor; return this; } public File getTempDir() { if(tempDir == null) { return null; } return tempDir.toFile(); } public Path getTempPath() { return tempDir; } public DeploymentInfo setTempDir(final File tempDir) { this.tempDir = tempDir != null ? tempDir.toPath() : null; return this; } public DeploymentInfo setTempDir(final Path tempDir) { this.tempDir = tempDir; return this; } public boolean isIgnoreFlush() { return ignoreFlush; } public DeploymentInfo setIgnoreFlush(boolean ignoreFlush) { this.ignoreFlush = ignoreFlush; return this; } public JspConfigDescriptor getJspConfigDescriptor() { return jspConfigDescriptor; } public DeploymentInfo setJspConfigDescriptor(JspConfigDescriptor jspConfigDescriptor) { this.jspConfigDescriptor = jspConfigDescriptor; return this; } public DefaultServletConfig getDefaultServletConfig() { return defaultServletConfig; } public DeploymentInfo setDefaultServletConfig(final DefaultServletConfig defaultServletConfig) { this.defaultServletConfig = defaultServletConfig; return this; } public DeploymentInfo addLocaleCharsetMapping(final String locale, final String charset) { localeCharsetMapping.put(locale, charset); return this; } public Map<String, String> getLocaleCharsetMapping() { return localeCharsetMapping; } public SessionManagerFactory getSessionManagerFactory() { return sessionManagerFactory; } public DeploymentInfo setSessionManagerFactory(final SessionManagerFactory sessionManagerFactory) { this.sessionManagerFactory = sessionManagerFactory; return this; } public LoginConfig getLoginConfig() { return loginConfig; } public DeploymentInfo setLoginConfig(LoginConfig loginConfig) { this.loginConfig = loginConfig; return this; } public IdentityManager getIdentityManager() { return identityManager; } public DeploymentInfo setIdentityManager(IdentityManager identityManager) { this.identityManager = identityManager; return this; } public ConfidentialPortManager getConfidentialPortManager() { return confidentialPortManager; } public DeploymentInfo setConfidentialPortManager(ConfidentialPortManager confidentialPortManager) { this.confidentialPortManager = confidentialPortManager; return this; } public DeploymentInfo addSecurityRole(final String role) { this.securityRoles.add(role); return this; } public DeploymentInfo addSecurityRoles(final String... roles) { this.securityRoles.addAll(Arrays.asList(roles)); return this; } public DeploymentInfo addSecurityRoles(final Collection<String> roles) { this.securityRoles.addAll(roles); return this; } public Set<String> getSecurityRoles() { return Collections.unmodifiableSet(securityRoles); } /** * Adds an outer handler wrapper. This handler will be run after the servlet initial handler, * but before any other handlers. These are only run on REQUEST invocations, they * are not invoked on a FORWARD or INCLUDE. * * @param wrapper The wrapper */ public DeploymentInfo addOuterHandlerChainWrapper(final HandlerWrapper wrapper) { outerHandlerChainWrappers.add(wrapper); return this; } public List<HandlerWrapper> getOuterHandlerChainWrappers() { return Collections.unmodifiableList(outerHandlerChainWrappers); } /** * Adds an inner handler chain wrapper. This handler will be run after the security handler, * but before any other servlet handlers, and will be run for every request * * @param wrapper The wrapper */ public DeploymentInfo addInnerHandlerChainWrapper(final HandlerWrapper wrapper) { innerHandlerChainWrappers.add(wrapper); return this; } public List<HandlerWrapper> getInnerHandlerChainWrappers() { return Collections.unmodifiableList(innerHandlerChainWrappers); } public DeploymentInfo addInitialHandlerChainWrapper(final HandlerWrapper wrapper) { initialHandlerChainWrappers.add(wrapper); return this; } public List<HandlerWrapper> getInitialHandlerChainWrappers() { return Collections.unmodifiableList(initialHandlerChainWrappers); } /** * Sets the initial handler wrapper that will take over responsibility for establishing * a security context that will handle authentication for the request. * * Undertow specific authentication mechanisms will not be installed but Undertow handlers will * still make the decision as to if authentication is required and will subsequently * call {@link io.undertow.security.api.SecurityContext#authenticate()} as required. * * @param wrapper the {@link HandlerWrapper} to handle the initial security context installation. * @return {@code this} to allow chaining. */ public DeploymentInfo setInitialSecurityWrapper(final HandlerWrapper wrapper) { this.initialSecurityWrapper = wrapper; return this; } public HandlerWrapper getInitialSecurityWrapper() { return initialSecurityWrapper; } /** * Adds a security handler. These are invoked before the authentication mechanism, and are always invoked * even if authentication is not required. * @param wrapper * @return */ public DeploymentInfo addSecurityWrapper(final HandlerWrapper wrapper) { securityWrappers.add(wrapper); return this; } public List<HandlerWrapper> getSecurityWrappers() { return Collections.unmodifiableList(securityWrappers); } public DeploymentInfo addNotificationReceiver(final NotificationReceiver notificationReceiver) { this.notificationReceivers.add(notificationReceiver); return this; } public DeploymentInfo addNotificactionReceivers(final NotificationReceiver... notificationReceivers) { this.notificationReceivers.addAll(Arrays.asList(notificationReceivers)); return this; } public DeploymentInfo addNotificationReceivers(final Collection<NotificationReceiver> notificationReceivers) { this.notificationReceivers.addAll(notificationReceivers); return this; } public List<NotificationReceiver> getNotificationReceivers() { return Collections.unmodifiableList(notificationReceivers); } public ConcurrentMap<String, Object> getServletContextAttributeBackingMap() { return servletContextAttributeBackingMap; } /** * Sets the map that will be used by the ServletContext implementation to store attributes. * <p> * This should usuablly be null, in which case Undertow will create a new map. This is only * used in situations where you want multiple deployments to share the same servlet context * attributes. * * @param servletContextAttributeBackingMap * The backing map */ public DeploymentInfo setServletContextAttributeBackingMap(final ConcurrentMap<String, Object> servletContextAttributeBackingMap) { this.servletContextAttributeBackingMap = servletContextAttributeBackingMap; return this; } public ServletSessionConfig getServletSessionConfig() { return servletSessionConfig; } public DeploymentInfo setServletSessionConfig(final ServletSessionConfig servletSessionConfig) { this.servletSessionConfig = servletSessionConfig; return this; } /** * @return the host name */ public String getHostName() { return hostName; } public DeploymentInfo setHostName(final String hostName) { this.hostName = hostName; return this; } public boolean isDenyUncoveredHttpMethods() { return denyUncoveredHttpMethods; } public DeploymentInfo setDenyUncoveredHttpMethods(final boolean denyUncoveredHttpMethods) { this.denyUncoveredHttpMethods = denyUncoveredHttpMethods; return this; } public ServletStackTraces getServletStackTraces() { return servletStackTraces; } public DeploymentInfo setServletStackTraces(ServletStackTraces servletStackTraces) { this.servletStackTraces = servletStackTraces; return this; } public boolean isInvalidateSessionOnLogout() { return invalidateSessionOnLogout; } public DeploymentInfo setInvalidateSessionOnLogout(boolean invalidateSessionOnLogout) { this.invalidateSessionOnLogout = invalidateSessionOnLogout; return this; } public int getDefaultCookieVersion() { return defaultCookieVersion; } public DeploymentInfo setDefaultCookieVersion(int defaultCookieVersion) { this.defaultCookieVersion = defaultCookieVersion; return this; } public SessionPersistenceManager getSessionPersistenceManager() { return sessionPersistenceManager; } public DeploymentInfo setSessionPersistenceManager(SessionPersistenceManager sessionPersistenceManager) { this.sessionPersistenceManager = sessionPersistenceManager; return this; } public AuthorizationManager getAuthorizationManager() { return authorizationManager; } public DeploymentInfo setAuthorizationManager(AuthorizationManager authorizationManager) { this.authorizationManager = authorizationManager; return this; } public DeploymentInfo addPrincipalVsRoleMapping(final String principal, final String mapping) { Set<String> set = principalVersusRolesMap.get(principal); if (set == null) { principalVersusRolesMap.put(principal, set = new HashSet<>()); } set.add(mapping); return this; } public DeploymentInfo addPrincipalVsRoleMappings(final String principal, final String... mappings) { Set<String> set = principalVersusRolesMap.get(principal); if (set == null) { principalVersusRolesMap.put(principal, set = new HashSet<>()); } set.addAll(Arrays.asList(mappings)); return this; } public DeploymentInfo addPrincipalVsRoleMappings(final String principal, final Collection<String> mappings) { Set<String> set = principalVersusRolesMap.get(principal); if (set == null) { principalVersusRolesMap.put(principal, set = new HashSet<>()); } set.addAll(mappings); return this; } public Map<String, Set<String>> getPrincipalVersusRolesMap() { return Collections.unmodifiableMap(principalVersusRolesMap); } /** * Removes all configured authentication mechanisms from the deployment. * * @return this deployment info */ public DeploymentInfo clearLoginMethods() { if(loginConfig != null) { loginConfig.getAuthMethods().clear(); } return this; } /** * Adds an authentication mechanism directly to the deployment. This mechanism will be first in the list. * * In general you should just use {@link #addAuthenticationMechanism(String, io.undertow.security.api.AuthenticationMechanismFactory)} * and allow the user to configure the methods they want by name. * * This method is essentially a convenience method, if is the same as registering a factory under the provided name that returns * and authentication mechanism, and then adding it to the login config list. * * If you want your mechanism to be the only one in the deployment you should first invoke {@link #clearLoginMethods()}. * * @param name The authentication mechanism name * @param mechanism The mechanism * @return this deployment info */ public DeploymentInfo addFirstAuthenticationMechanism(final String name, final AuthenticationMechanism mechanism) { authenticationMechanisms.put(name, new ImmediateAuthenticationMechanismFactory(mechanism)); if(loginConfig == null) { loginConfig = new LoginConfig(null); } loginConfig.addFirstAuthMethod(new AuthMethodConfig(name)); return this; } /** * Adds an authentication mechanism directly to the deployment. This mechanism will be last in the list. * * In general you should just use {@link #addAuthenticationMechanism(String, io.undertow.security.api.AuthenticationMechanismFactory)} * and allow the user to configure the methods they want by name. * * This method is essentially a convenience method, if is the same as registering a factory under the provided name that returns * and authentication mechanism, and then adding it to the login config list. * * If you want your mechanism to be the only one in the deployment you should first invoke {@link #clearLoginMethods()}. * * @param name The authentication mechanism name * @param mechanism The mechanism * @return */ public DeploymentInfo addLastAuthenticationMechanism(final String name, final AuthenticationMechanism mechanism) { authenticationMechanisms.put(name, new ImmediateAuthenticationMechanismFactory(mechanism)); if(loginConfig == null) { loginConfig = new LoginConfig(null); } loginConfig.addLastAuthMethod(new AuthMethodConfig(name)); return this; } /** * Adds an authentication mechanism. The name is case insenstive, and will be converted to uppercase internally. * * @param name The name * @param factory The factory * @return */ public DeploymentInfo addAuthenticationMechanism(final String name, final AuthenticationMechanismFactory factory) { authenticationMechanisms.put(name.toUpperCase(Locale.US), factory); return this; } public Map<String, AuthenticationMechanismFactory> getAuthenticationMechanisms() { return Collections.unmodifiableMap(authenticationMechanisms); } /** * Returns true if the specified mechanism is present in the login config * @param mechanismName The mechanism name * @return true if the mechanism is enabled */ public boolean isAuthenticationMechanismPresent(final String mechanismName) { if(loginConfig != null) { for(AuthMethodConfig method : loginConfig.getAuthMethods()) { if(method.getName().equalsIgnoreCase(mechanismName)) { return true; } } } return false; } /** * Adds an additional servlet extension to the deployment. Servlet extensions are generally discovered * using META-INF/services entries, however this may not be practical in all environments. * @param servletExtension The servlet extension * @return this */ public DeploymentInfo addServletExtension(final ServletExtension servletExtension) { this.servletExtensions.add(servletExtension); return this; } public List<ServletExtension> getServletExtensions() { return servletExtensions; } public AuthenticationMechanism getJaspiAuthenticationMechanism() { return jaspiAuthenticationMechanism; } public DeploymentInfo setJaspiAuthenticationMechanism(AuthenticationMechanism jaspiAuthenticationMechanism) { this.jaspiAuthenticationMechanism = jaspiAuthenticationMechanism; return this; } public SecurityContextFactory getSecurityContextFactory() { return this.securityContextFactory; } public DeploymentInfo setSecurityContextFactory(final SecurityContextFactory securityContextFactory) { this.securityContextFactory = securityContextFactory; return this; } public String getServerName() { return serverName; } public DeploymentInfo setServerName(String serverName) { this.serverName = serverName; return this; } public DeploymentInfo setMetricsCollector(MetricsCollector metricsCollector){ this.metricsCollector = metricsCollector; return this; } public MetricsCollector getMetricsCollector() { return metricsCollector; } public SessionConfigWrapper getSessionConfigWrapper() { return sessionConfigWrapper; } public DeploymentInfo setSessionConfigWrapper(SessionConfigWrapper sessionConfigWrapper) { this.sessionConfigWrapper = sessionConfigWrapper; return this; } public boolean isDisableCachingForSecuredPages() { return disableCachingForSecuredPages; } public DeploymentInfo setDisableCachingForSecuredPages(boolean disableCachingForSecuredPages) { this.disableCachingForSecuredPages = disableCachingForSecuredPages; return this; } public DeploymentInfo addLifecycleInterceptor(final LifecycleInterceptor interceptor) { lifecycleInterceptors.add(interceptor); return this; } public List<LifecycleInterceptor> getLifecycleInterceptors() { return Collections.unmodifiableList(lifecycleInterceptors); } /** * Returns the exception handler that is used by this deployment. By default this will simply * log unhandled exceptions */ public ExceptionHandler getExceptionHandler() { return exceptionHandler; } /** * Sets the default exception handler for this deployment * @param exceptionHandler The exception handler * @return */ public DeploymentInfo setExceptionHandler(ExceptionHandler exceptionHandler) { this.exceptionHandler = exceptionHandler; return this; } public boolean isEscapeErrorMessage() { return escapeErrorMessage; } /** * Set if if the message passed to {@link javax.servlet.http.HttpServletResponse#sendError(int, String)} should be escaped. * * If this is false applications must be careful not to use user provided data (such as the URI) in the message * * @param escapeErrorMessage If the error message should be escaped */ public DeploymentInfo setEscapeErrorMessage(boolean escapeErrorMessage) { this.escapeErrorMessage = escapeErrorMessage; return this; } public DeploymentInfo addSessionListener(SessionListener sessionListener) { this.sessionListeners.add(sessionListener); return this; } public List<SessionListener> getSessionListeners() { return Collections.unmodifiableList(sessionListeners); } public AuthenticationMode getAuthenticationMode() { return authenticationMode; } /** * Sets if this deployment should use pro-active authentication and always authenticate if the credentials are present * or constraint driven auth which will only call the authentication mechanisms for protected resources. * * Pro active auth means that requests for unprotected resources will still be associated with a user, which may be * useful for access logging. * * * @param authenticationMode The authentication mode to use * @return */ public DeploymentInfo setAuthenticationMode(AuthenticationMode authenticationMode) { this.authenticationMode = authenticationMode; return this; } public MultipartConfigElement getDefaultMultipartConfig() { return defaultMultipartConfig; } public DeploymentInfo setDefaultMultipartConfig(MultipartConfigElement defaultMultipartConfig) { this.defaultMultipartConfig = defaultMultipartConfig; return this; } public int getContentTypeCacheSize() { return contentTypeCacheSize; } public DeploymentInfo setContentTypeCacheSize(int contentTypeCacheSize) { this.contentTypeCacheSize = contentTypeCacheSize; return this; } public SessionIdGenerator getSessionIdGenerator() { return sessionIdGenerator; } public DeploymentInfo setSessionIdGenerator(SessionIdGenerator sessionIdGenerator) { this.sessionIdGenerator = sessionIdGenerator; return this; } public boolean isSendCustomReasonPhraseOnError() { return sendCustomReasonPhraseOnError; } public CrawlerSessionManagerConfig getCrawlerSessionManagerConfig() { return crawlerSessionManagerConfig; } public DeploymentInfo setCrawlerSessionManagerConfig(CrawlerSessionManagerConfig crawlerSessionManagerConfig) { this.crawlerSessionManagerConfig = crawlerSessionManagerConfig; return this; } /** * If this is true then the message parameter of {@link javax.servlet.http.HttpServletResponse#sendError(int, String)} and * {@link javax.servlet.http.HttpServletResponse#setStatus(int, String)} will be used as the HTTP reason phrase in * the response. * * @param sendCustomReasonPhraseOnError If the parameter to sendError should be used as a HTTP reason phrase * @return this */ public DeploymentInfo setSendCustomReasonPhraseOnError(boolean sendCustomReasonPhraseOnError) { this.sendCustomReasonPhraseOnError = sendCustomReasonPhraseOnError; return this; } public boolean isChangeSessionIdOnLogin() { return changeSessionIdOnLogin; } public DeploymentInfo setChangeSessionIdOnLogin(boolean changeSessionIdOnLogin) { this.changeSessionIdOnLogin = changeSessionIdOnLogin; return this; } public boolean isUseCachedAuthenticationMechanism() { return useCachedAuthenticationMechanism; } /** * If this is set to false the the cached authenticated session mechanism won't be installed. If you want FORM and * other auth methods that require caching to work then you need to install another caching based auth method (such * as SSO). * @param useCachedAuthenticationMechanism If Undertow should use its internal authentication cache mechanism * @return this */ public DeploymentInfo setUseCachedAuthenticationMechanism(boolean useCachedAuthenticationMechanism) { this.useCachedAuthenticationMechanism = useCachedAuthenticationMechanism; return this; } public boolean isSecurityDisabled() { return securityDisabled; } public DeploymentInfo setSecurityDisabled(boolean securityDisabled) { this.securityDisabled = securityDisabled; return this; } public boolean isCheckOtherSessionManagers() { return checkOtherSessionManagers; } /** * If this is true then when an existing invalid session id is found all other deployments in the container will have their * session managers checked to see if it represents a valid session. If it does then the session id will be re-used. */ public DeploymentInfo setCheckOtherSessionManagers(boolean checkOtherSessionManagers) { this.checkOtherSessionManagers = checkOtherSessionManagers; return this; } public String getDefaultRequestEncoding() { return defaultRequestEncoding; } public DeploymentInfo setDefaultRequestEncoding(String defaultRequestEncoding) { this.defaultRequestEncoding = defaultRequestEncoding; return this; } public String getDefaultResponseEncoding() { return defaultResponseEncoding; } public DeploymentInfo setDefaultResponseEncoding(String defaultResponseEncoding) { this.defaultResponseEncoding = defaultResponseEncoding; return this; } /** * Adds a pre compressed resource encoding and maps it to a file extension * * * @param encoding The content encoding * @param extension The file extension * @return this builder */ public DeploymentInfo addPreCompressedResourceEncoding(String encoding, String extension) { preCompressedResources.put(encoding, extension); return this; } public Map<String, String> getPreCompressedResources() { return Collections.unmodifiableMap(preCompressedResources); } public int getContainerMajorVersion() { return containerMajorVersion; } public DeploymentInfo setContainerMajorVersion(int containerMajorVersion) { this.containerMajorVersion = containerMajorVersion; return this; } public int getContainerMinorVersion() { return containerMinorVersion; } public DeploymentInfo setContainerMinorVersion(int containerMinorVersion) { this.containerMinorVersion = containerMinorVersion; return this; } /** * Add's a listener that is only invoked once all other deployment steps have been completed * * The listeners <code>contextDestroyed</code> method will be called after all undeployment steps are undertaken * * @param servletContextListener * @return */ public DeploymentInfo addDeploymentCompleteListener(ServletContextListener servletContextListener) { deploymentCompleteListeners.add(servletContextListener); return this; } public List<ServletContextListener> getDeploymentCompleteListeners() { return Collections.unmodifiableList(deploymentCompleteListeners); } @Override public DeploymentInfo clone() { final DeploymentInfo info = new DeploymentInfo() .setClassLoader(classLoader) .setContextPath(contextPath) .setResourceManager(resourceManager) .setMajorVersion(majorVersion) .setMinorVersion(minorVersion) .setDeploymentName(deploymentName) .setClassIntrospecter(classIntrospecter); for (Map.Entry<String, ServletInfo> e : servlets.entrySet()) { info.addServlet(e.getValue().clone()); } for (Map.Entry<String, FilterInfo> e : filters.entrySet()) { info.addFilter(e.getValue().clone()); } info.displayName = displayName; info.filterUrlMappings.addAll(filterUrlMappings); info.filterServletNameMappings.addAll(filterServletNameMappings); info.listeners.addAll(listeners); info.servletContainerInitializers.addAll(servletContainerInitializers); info.threadSetupActions.addAll(threadSetupActions); info.initParameters.putAll(initParameters); info.servletContextAttributes.putAll(servletContextAttributes); info.welcomePages.addAll(welcomePages); info.errorPages.addAll(errorPages); info.mimeMappings.addAll(mimeMappings); info.executor = executor; info.asyncExecutor = asyncExecutor; info.tempDir = tempDir; info.jspConfigDescriptor = jspConfigDescriptor; info.defaultServletConfig = defaultServletConfig; info.localeCharsetMapping.putAll(localeCharsetMapping); info.sessionManagerFactory = sessionManagerFactory; if (loginConfig != null) { info.loginConfig = loginConfig.clone(); } info.identityManager = identityManager; info.confidentialPortManager = confidentialPortManager; info.defaultEncoding = defaultEncoding; info.urlEncoding = urlEncoding; info.securityConstraints.addAll(securityConstraints); info.outerHandlerChainWrappers.addAll(outerHandlerChainWrappers); info.innerHandlerChainWrappers.addAll(innerHandlerChainWrappers); info.initialSecurityWrapper = initialSecurityWrapper; info.securityWrappers.addAll(securityWrappers); info.initialHandlerChainWrappers.addAll(initialHandlerChainWrappers); info.securityRoles.addAll(securityRoles); info.notificationReceivers.addAll(notificationReceivers); info.allowNonStandardWrappers = allowNonStandardWrappers; info.defaultSessionTimeout = defaultSessionTimeout; info.servletContextAttributeBackingMap = servletContextAttributeBackingMap; info.servletSessionConfig = servletSessionConfig; info.hostName = hostName; info.denyUncoveredHttpMethods = denyUncoveredHttpMethods; info.servletStackTraces = servletStackTraces; info.invalidateSessionOnLogout = invalidateSessionOnLogout; info.defaultCookieVersion = defaultCookieVersion; info.sessionPersistenceManager = sessionPersistenceManager; info.principalVersusRolesMap.putAll(principalVersusRolesMap); info.ignoreFlush = ignoreFlush; info.authorizationManager = authorizationManager; info.authenticationMechanisms.putAll(authenticationMechanisms); info.servletExtensions.addAll(servletExtensions); info.jaspiAuthenticationMechanism = jaspiAuthenticationMechanism; info.securityContextFactory = securityContextFactory; info.serverName = serverName; info.metricsCollector = metricsCollector; info.sessionConfigWrapper = sessionConfigWrapper; info.eagerFilterInit = eagerFilterInit; info.disableCachingForSecuredPages = disableCachingForSecuredPages; info.exceptionHandler = exceptionHandler; info.escapeErrorMessage = escapeErrorMessage; info.sessionListeners.addAll(sessionListeners); info.lifecycleInterceptors.addAll(lifecycleInterceptors); info.authenticationMode = authenticationMode; info.defaultMultipartConfig = defaultMultipartConfig; info.contentTypeCacheSize = contentTypeCacheSize; info.sessionIdGenerator = sessionIdGenerator; info.sendCustomReasonPhraseOnError = sendCustomReasonPhraseOnError; info.changeSessionIdOnLogin = changeSessionIdOnLogin; info.crawlerSessionManagerConfig = crawlerSessionManagerConfig; info.securityDisabled = securityDisabled; info.useCachedAuthenticationMechanism = useCachedAuthenticationMechanism; info.checkOtherSessionManagers = checkOtherSessionManagers; info.defaultRequestEncoding = defaultRequestEncoding; info.defaultResponseEncoding = defaultResponseEncoding; info.preCompressedResources.putAll(preCompressedResources); info.containerMajorVersion = containerMajorVersion; info.containerMinorVersion = containerMinorVersion; info.deploymentCompleteListeners.addAll(deploymentCompleteListeners); return info; } }
UNDERTOW-1346 Make collection fields of DeploymentInfo modifiable
servlet/src/main/java/io/undertow/servlet/api/DeploymentInfo.java
UNDERTOW-1346 Make collection fields of DeploymentInfo modifiable
Java
apache-2.0
a6849543c1c4dcbe05e907c17d0d802666422c0a
0
droolsjbpm/droolsjbpm-tools,bbrodt/droolsjbpm-tools,etirelli/droolsjbpm-tools,mbiarnes/droolsjbpm-tools,pleacu/droolsjbpm-tools,pleacu/droolsjbpm-tools,droolsjbpm/droolsjbpm-tools,ChallenHB/droolsjbpm-tools,ChallenHB/droolsjbpm-tools,bbrodt/droolsjbpm-tools,bbrodt/droolsjbpm-tools,etirelli/droolsjbpm-tools,mbiarnes/droolsjbpm-tools
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.ide.common.client.modeldriven.dt; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.drools.ide.common.client.modeldriven.SuggestionCompletionEngine; import org.drools.ide.common.client.modeldriven.brl.BaseSingleFieldConstraint; import org.drools.ide.common.client.modeldriven.brl.PortableObject; /** * This is a decision table model for a guided editor. It is not template or XLS * based. (template could be done relatively easily by taking a template, as a * String, and then String[][] data and driving the SheetListener interface in * the decision tables module). * * This works by taking the column definitions, and combining them with the * table of data to produce rule models. * * * @author Michael Neale */ public class GuidedDecisionTable implements PortableObject { private static final long serialVersionUID = 510l; /** * Number of internal elements before ( used for offsets in serialization ) */ public static final int INTERNAL_ELEMENTS = 2; /** * The name - obviously. */ private String tableName; private String parentName; // metadata defined for table ( will be represented as a column per table // row of DATA private RowNumberCol rowNumberCol; private DescriptionCol descriptionCol; private List<MetadataCol> metadataCols; private List<AttributeCol> attributeCols = new ArrayList<AttributeCol>(); private List<ConditionCol> conditionCols = new ArrayList<ConditionCol>(); private List<ActionCol> actionCols = new ArrayList<ActionCol>(); /** * First column is always row number. Second column is description. * Subsequent ones follow the above column definitions: attributeCols, then * conditionCols, then actionCols, in that order, left to right. */ private String[][] data = new String[0][0]; /** * The width to display the description column. */ private int descriptionWidth = -1; private String groupField; public GuidedDecisionTable() { } /** * This will return a list of valid values. if there is no such * "enumeration" of values, then it will return an empty array. */ public String[] getValueList(DTColumnConfig col, SuggestionCompletionEngine sce) { if (col instanceof AttributeCol) { AttributeCol at = (AttributeCol) col; if ("no-loop".equals(at.attr) || "enabled".equals(at.attr)) { return new String[] { "true", "false" }; } } else if (col instanceof ConditionCol) { // conditions: if its a formula etc, just return String[0], // otherwise check with the sce ConditionCol c = (ConditionCol) col; if (c.getConstraintValueType() == BaseSingleFieldConstraint.TYPE_RET_VALUE || c.getConstraintValueType() == BaseSingleFieldConstraint.TYPE_PREDICATE) { return new String[0]; } else { if (c.getValueList() != null && !"".equals(c.getValueList())) { return c.getValueList().split(","); } else { String[] r = sce.getEnumValues(c.getFactType(), c.getFactField()); return (r != null) ? r : new String[0]; } } } else if (col instanceof ActionSetFieldCol) { ActionSetFieldCol c = (ActionSetFieldCol) col; if (c.getValueList() != null && !"".equals(c.getValueList())) { return c.getValueList().split(","); } else { String[] r = sce.getEnumValues( getBoundFactType(c.getBoundName()), c.getFactField()); return (r != null) ? r : new String[0]; } } else if (col instanceof ActionInsertFactCol) { ActionInsertFactCol c = (ActionInsertFactCol) col; if (c.getValueList() != null && !"".equals(c.getValueList())) { return c.getValueList().split(","); } else { String[] r = sce.getEnumValues(c.getFactType(), c.getFactField()); return (r != null) ? r : new String[0]; } } return new String[0]; } public String getType(DTColumnConfig col, SuggestionCompletionEngine sce) { String type = null; if (col instanceof AttributeCol) { AttributeCol at = (AttributeCol) col; type = at.attr; } else if (col instanceof ConditionCol) { ConditionCol c = (ConditionCol) col; type = sce.getFieldType(c.getFactType(), c.getFactField()); } else if (col instanceof ActionSetFieldCol) { ActionSetFieldCol c = (ActionSetFieldCol) col; type = sce.getFieldType(getBoundFactType(c.getBoundName()), c.getFactField()); } else if (col instanceof ActionInsertFactCol) { ActionInsertFactCol c = (ActionInsertFactCol) col; type = sce.getFieldType(c.getFactType(), c.getFactField()); } return type; } private String getBoundFactType(String boundName) { for (Iterator<ConditionCol> iterator = getConditionCols().iterator(); iterator .hasNext();) { ConditionCol c = iterator.next(); if (c.getBoundName().equals(boundName)) { return c.getFactType(); } } return null; } public boolean isNumeric(DTColumnConfig col, SuggestionCompletionEngine sce) { if (col instanceof AttributeCol) { AttributeCol at = (AttributeCol) col; return "salience".equals(at.attr) || "duration".equals(at.attr); } else { return isDataType(col, sce, SuggestionCompletionEngine.TYPE_NUMERIC); } } public boolean isBoolean(DTColumnConfig col, SuggestionCompletionEngine sce) { if (col instanceof AttributeCol) { AttributeCol at = (AttributeCol) col; return "enabled".equals(at.attr) || "no-loop".equals(at.attr) || "auto-focus".equals(at.attr) || "lock-on-active".equals(at.attr); } else { return isDataType(col, sce, SuggestionCompletionEngine.TYPE_BOOLEAN); } } public boolean isDate(DTColumnConfig col, SuggestionCompletionEngine sce) { if (col instanceof AttributeCol) { AttributeCol at = (AttributeCol) col; return "date-effective".equals(at.attr) || "date-expires".equals(at.attr); } else { return isDataType(col, sce, SuggestionCompletionEngine.TYPE_DATE); } } private boolean isDataType(DTColumnConfig col, SuggestionCompletionEngine sce, String dataType) { if (col instanceof RowNumberCol) { throw new IllegalArgumentException( "Only ConditionCol and Actions permitted. Consider using one of the public is<DataType> methods."); } if (col instanceof DescriptionCol) { throw new IllegalArgumentException( "Only ConditionCol and Actions permitted. Consider using one of the public is<DataType> methods."); } if (col instanceof MetadataCol) { throw new IllegalArgumentException( "Only ConditionCol and Actions permitted. Consider using one of the public is<DataType> methods."); } if (col instanceof AttributeCol) { throw new IllegalArgumentException( "Only ConditionCol and Actions permitted. Consider using one of the public is<DataType> methods."); } if (col instanceof ConditionCol) { ConditionCol c = (ConditionCol) col; if (c.getConstraintValueType() == BaseSingleFieldConstraint.TYPE_LITERAL) { if (c.getOperator() == null || "".equals(c.getOperator())) { return false; } String ft = sce.getFieldType(c.getFactType(), c.getFactField()); if (ft != null && ft.equals(dataType)) { return true; } } } else if (col instanceof ActionSetFieldCol) { ActionSetFieldCol c = (ActionSetFieldCol) col; String ft = sce.getFieldType(getBoundFactType(c.getBoundName()), c.getFactField()); if (ft != null && ft.equals(dataType)) { return true; } } else if (col instanceof ActionInsertFactCol) { ActionInsertFactCol c = (ActionInsertFactCol) col; String ft = sce.getFieldType(c.getFactType(), c.getFactField()); if (ft != null && ft.equals(dataType)) { return true; } } return false; } public RowNumberCol getRowNumberCol() { // De-serialising old models sets this field to null if (this.rowNumberCol == null) { this.rowNumberCol = new RowNumberCol(); } return this.rowNumberCol; } public void setRowNumberCol(RowNumberCol rowNumberCol) { this.rowNumberCol = rowNumberCol; } public DescriptionCol getDescriptionCol() { // De-serialising old models sets this field to null if (this.descriptionCol == null) { this.descriptionCol = new DescriptionCol(); } return this.descriptionCol; } public void setDescriptionCol(DescriptionCol descriptionCol) { this.descriptionCol = descriptionCol; } public void setMetadataCols(List<MetadataCol> metadataCols) { this.metadataCols = metadataCols; } public List<MetadataCol> getMetadataCols() { if (null == metadataCols) { metadataCols = new ArrayList<MetadataCol>(); } return metadataCols; } /** * Locate index of attribute name if it exists * * @param attributeName * Name of metadata we are looking for * @return index of attribute name or -1 if not found */ public int getMetadataColIndex(String attributeName) { for (int i = 0; metadataCols != null && i < metadataCols.size(); i++) { if (attributeName.equals(metadataCols.get(i).attr)) { return i; } } return -1; } /** * Update all rows of metadata with value it attribute is present * * @param attributeName * Name of metadata we are looking for * @return true if values update, false if not */ public boolean updateMetadata(String attributeName, String newValue) { // see if metaData exists for int metaIndex = getMetadataColIndex(attributeName); if (metaIndex < 0) return false; for (int i = 0; i < getData().length; i++) { String[] row = getData()[i]; row[GuidedDecisionTable.INTERNAL_ELEMENTS + metaIndex] = newValue; } return true; } public void setGroupField(String groupField) { this.groupField = groupField; } public String getGroupField() { return groupField; } public void setTableName(String tableName) { this.tableName = tableName; } public String getTableName() { return tableName; } public void setParentName(String parentName) { this.parentName = parentName; } public String getParentName() { return parentName; } public void setAttributeCols(List<AttributeCol> attributeCols) { this.attributeCols = attributeCols; } public List<AttributeCol> getAttributeCols() { return attributeCols; } public void setConditionCols(List<ConditionCol> conditionCols) { this.conditionCols = conditionCols; } public List<ConditionCol> getConditionCols() { return conditionCols; } public void setActionCols(List<ActionCol> actionCols) { this.actionCols = actionCols; } public List<ActionCol> getActionCols() { return actionCols; } public void setData(String[][] data) { this.data = data; } public String[][] getData() { return data; } public void setDescriptionWidth(int descriptionWidth) { this.descriptionWidth = descriptionWidth; } public int getDescriptionWidth() { return descriptionWidth; } }
drools-ide-common/src/main/java/org/drools/ide/common/client/modeldriven/dt/GuidedDecisionTable.java
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.ide.common.client.modeldriven.dt; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.drools.ide.common.client.modeldriven.SuggestionCompletionEngine; import org.drools.ide.common.client.modeldriven.brl.BaseSingleFieldConstraint; import org.drools.ide.common.client.modeldriven.brl.PortableObject; /** * This is a decision table model for a guided editor. It is not template or XLS * based. (template could be done relatively easily by taking a template, as a * String, and then String[][] data and driving the SheetListener interface in * the decision tables module). * * This works by taking the column definitions, and combining them with the * table of data to produce rule models. * * * @author Michael Neale */ public class GuidedDecisionTable implements PortableObject { private static final long serialVersionUID = 510l; /** * Number of internal elements before ( used for offsets in serialization ) */ public static final int INTERNAL_ELEMENTS = 2; /** * The name - obviously. */ private String tableName; private String parentName; // metadata defined for table ( will be represented as a column per table // row of DATA private RowNumberCol rowNumberCol; private DescriptionCol descriptionCol; private List<MetadataCol> metadataCols; private List<AttributeCol> attributeCols = new ArrayList<AttributeCol>(); private List<ConditionCol> conditionCols = new ArrayList<ConditionCol>(); private List<ActionCol> actionCols = new ArrayList<ActionCol>(); /** * First column is always row number. Second column is description. * Subsequent ones follow the above column definitions: attributeCols, then * conditionCols, then actionCols, in that order, left to right. */ private String[][] data = new String[0][0]; /** * The width to display the description column. */ private int descriptionWidth = -1; private String groupField; public GuidedDecisionTable() { } /** * This will return a list of valid values. if there is no such * "enumeration" of values, then it will return an empty array. */ public String[] getValueList(DTColumnConfig col, SuggestionCompletionEngine sce) { if (col instanceof AttributeCol) { AttributeCol at = (AttributeCol) col; if ("no-loop".equals(at.attr) || "enabled".equals(at.attr)) { return new String[] { "true", "false" }; } } else if (col instanceof ConditionCol) { // conditions: if its a formula etc, just return String[0], // otherwise check with the sce ConditionCol c = (ConditionCol) col; if (c.getConstraintValueType() == BaseSingleFieldConstraint.TYPE_RET_VALUE || c.getConstraintValueType() == BaseSingleFieldConstraint.TYPE_PREDICATE) { return new String[0]; } else { if (c.getValueList() != null && !"".equals(c.getValueList())) { return c.getValueList().split(","); } else { String[] r = sce.getEnumValues(c.getFactType(), c.getFactField()); return (r != null) ? r : new String[0]; } } } else if (col instanceof ActionSetFieldCol) { ActionSetFieldCol c = (ActionSetFieldCol) col; if (c.getValueList() != null && !"".equals(c.getValueList())) { return c.getValueList().split(","); } else { String[] r = sce.getEnumValues( getBoundFactType(c.getBoundName()), c.getFactField()); return (r != null) ? r : new String[0]; } } else if (col instanceof ActionInsertFactCol) { ActionInsertFactCol c = (ActionInsertFactCol) col; if (c.getValueList() != null && !"".equals(c.getValueList())) { return c.getValueList().split(","); } else { String[] r = sce.getEnumValues(c.getFactType(), c.getFactField()); return (r != null) ? r : new String[0]; } } return new String[0]; } public String getType(DTColumnConfig col, SuggestionCompletionEngine sce) { String type = null; if (col instanceof AttributeCol) { AttributeCol at = (AttributeCol) col; type = at.attr; } else if (col instanceof ConditionCol) { ConditionCol c = (ConditionCol) col; type = sce.getFieldType(c.getFactType(), c.getFactField()); } else if (col instanceof ActionSetFieldCol) { ActionSetFieldCol c = (ActionSetFieldCol) col; type = sce.getFieldType(getBoundFactType(c.getBoundName()), c.getFactField()); } else if (col instanceof ActionInsertFactCol) { ActionInsertFactCol c = (ActionInsertFactCol) col; type = sce.getFieldType(c.getFactType(), c.getFactField()); } return type; } private String getBoundFactType(String boundName) { for (Iterator<ConditionCol> iterator = getConditionCols().iterator(); iterator .hasNext();) { ConditionCol c = iterator.next(); if (c.getBoundName().equals(boundName)) { return c.getFactType(); } } return null; } public boolean isNumeric(DTColumnConfig col, SuggestionCompletionEngine sce) { if (col instanceof AttributeCol) { AttributeCol at = (AttributeCol) col; return "salience".equals(at.attr); } else if (col instanceof ConditionCol) { ConditionCol c = (ConditionCol) col; if (c.getConstraintValueType() == BaseSingleFieldConstraint.TYPE_LITERAL) { if (c.getOperator() == null || "".equals(c.getOperator())) { return false; } String ft = sce.getFieldType(c.getFactType(), c.getFactField()); if (ft != null && ft.equals(SuggestionCompletionEngine.TYPE_NUMERIC)) { return true; } } } else if (col instanceof ActionSetFieldCol) { ActionSetFieldCol c = (ActionSetFieldCol) col; String ft = sce.getFieldType(getBoundFactType(c.getBoundName()), c.getFactField()); if (ft != null && ft.equals(SuggestionCompletionEngine.TYPE_NUMERIC)) { return true; } } else if (col instanceof ActionInsertFactCol) { ActionInsertFactCol c = (ActionInsertFactCol) col; String ft = sce.getFieldType(c.getFactType(), c.getFactField()); if (ft != null && ft.equals(SuggestionCompletionEngine.TYPE_NUMERIC)) { return true; } } // we can reuse text filter from guided editor to enforce this for data // entry. return false; } public RowNumberCol getRowNumberCol() { // De-serialising old models sets this field to null if (this.rowNumberCol == null) { this.rowNumberCol = new RowNumberCol(); } return this.rowNumberCol; } public void setRowNumberCol(RowNumberCol rowNumberCol) { this.rowNumberCol = rowNumberCol; } public DescriptionCol getDescriptionCol() { // De-serialising old models sets this field to null if (this.descriptionCol == null) { this.descriptionCol = new DescriptionCol(); } return this.descriptionCol; } public void setDescriptionCol(DescriptionCol descriptionCol) { this.descriptionCol = descriptionCol; } public void setMetadataCols(List<MetadataCol> metadataCols) { this.metadataCols = metadataCols; } public List<MetadataCol> getMetadataCols() { if (null == metadataCols) { metadataCols = new ArrayList<MetadataCol>(); } return metadataCols; } /** * Locate index of attribute name if it exists * * @param attributeName * Name of metadata we are looking for * @return index of attribute name or -1 if not found */ public int getMetadataColIndex(String attributeName) { for (int i = 0; metadataCols != null && i < metadataCols.size(); i++) { if (attributeName.equals(metadataCols.get(i).attr)) { return i; } } return -1; } /** * Update all rows of metadata with value it attribute is present * * @param attributeName * Name of metadata we are looking for * @return true if values update, false if not */ public boolean updateMetadata(String attributeName, String newValue) { // see if metaData exists for int metaIndex = getMetadataColIndex(attributeName); if (metaIndex < 0) return false; for (int i = 0; i < getData().length; i++) { String[] row = getData()[i]; row[GuidedDecisionTable.INTERNAL_ELEMENTS + metaIndex] = newValue; } return true; } public void setGroupField(String groupField) { this.groupField = groupField; } public String getGroupField() { return groupField; } public void setTableName(String tableName) { this.tableName = tableName; } public String getTableName() { return tableName; } public void setParentName(String parentName) { this.parentName = parentName; } public String getParentName() { return parentName; } public void setAttributeCols(List<AttributeCol> attributeCols) { this.attributeCols = attributeCols; } public List<AttributeCol> getAttributeCols() { return attributeCols; } public void setConditionCols(List<ConditionCol> conditionCols) { this.conditionCols = conditionCols; } public List<ConditionCol> getConditionCols() { return conditionCols; } public void setActionCols(List<ActionCol> actionCols) { this.actionCols = actionCols; } public List<ActionCol> getActionCols() { return actionCols; } public void setData(String[][] data) { this.data = data; } public String[][] getData() { return data; } public void setDescriptionWidth(int descriptionWidth) { this.descriptionWidth = descriptionWidth; } public int getDescriptionWidth() { return descriptionWidth; } }
More integration and fix some existing bugs
drools-ide-common/src/main/java/org/drools/ide/common/client/modeldriven/dt/GuidedDecisionTable.java
More integration and fix some existing bugs
Java
apache-2.0
44c1289c08c9a757d86ea6fca644dd5d87176a8c
0
ajordens/clouddriver,spinnaker/clouddriver,ajordens/clouddriver,spinnaker/clouddriver,ajordens/clouddriver,spinnaker/clouddriver,ajordens/clouddriver
/* * Copyright 2018 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.clouddriver.controllers; import com.netflix.spinnaker.clouddriver.model.Function; import com.netflix.spinnaker.clouddriver.model.FunctionProvider; import com.netflix.spinnaker.kork.web.exceptions.NotFoundException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.access.prepost.PostAuthorize; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.web.bind.annotation.*; @RestController public class FunctionController { private final List<FunctionProvider> functionProviders; private HashMap<String, String> functionMap = new HashMap<String, String>(); @Autowired public FunctionController(Optional<List<FunctionProvider>> functionProviders) { this.functionProviders = functionProviders.orElse(Collections.emptyList()); } @PostAuthorize("@authorizationSupport.filterForAccounts(returnObject)") @RequestMapping(value = "/functions", method = RequestMethod.GET) @ResponseBody public List<Function> list( @RequestParam(value = "functionName", required = false) String functionName, @RequestParam(value = "region", required = false) String region, @RequestParam(value = "account", required = false) String account) { if (functionName == null || functionName.isEmpty()) { return functionProviders.stream() .map(FunctionProvider::getAllFunctions) .flatMap(Collection::stream) .collect(Collectors.toList()); } else { try { List<Function> myFunction = functionProviders.stream() .map( functionProvider -> functionProvider.getFunction(account, region, functionName)) .filter(function -> function != null) .collect(Collectors.toList()); return myFunction; } catch (NotFoundException e) { throw new NotFoundException(functionName + "does not exist"); } } } @PreAuthorize("hasPermission(#application, 'APPLICATION', 'READ')") @PostAuthorize("@authorizationSupport.filterForAccounts(returnObject)") @RequestMapping(value = "/applications/{application}/functions", method = RequestMethod.GET) List<Function> list(@PathVariable String application) { List<Function> appFunctions = functionProviders.stream() .map(functionProvider -> functionProvider.getApplicationFunctions(application)) .flatMap(Collection::stream) .collect(Collectors.toList()); return appFunctions; } }
clouddriver-web/src/main/groovy/com/netflix/spinnaker/clouddriver/controllers/FunctionController.java
/* * Copyright 2018 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.clouddriver.controllers; import com.netflix.spinnaker.clouddriver.model.Function; import com.netflix.spinnaker.clouddriver.model.FunctionProvider; import com.netflix.spinnaker.kork.web.exceptions.NotFoundException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.access.prepost.PostAuthorize; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.web.bind.annotation.*; @RestController public class FunctionController { private final List<FunctionProvider> functionProviders; private HashMap<String, String> functionMap = new HashMap<String, String>(); @Autowired public FunctionController(Optional<List<FunctionProvider>> functionProviders) { this.functionProviders = functionProviders.orElse(Collections.emptyList()); } @PostAuthorize("@authorizationSupport.filterForAccounts(returnObject)") @RequestMapping(value = "/functions", method = RequestMethod.GET) @ResponseBody public List<Function> list( @RequestParam(value = "functionName", required = false) String functionName, @RequestParam(value = "region", required = false) String region, @RequestParam(value = "account", required = false) String account) { if (functionName == null || functionName.isEmpty()) { return functionProviders.stream() .map(FunctionProvider::getAllFunctions) .flatMap(Collection::stream) .collect(Collectors.toList()); } else { try { List<Function> myFunction = functionProviders.stream() .map( functionProvider -> functionProvider.getFunction(account, region, functionName)) .collect(Collectors.toList()); return myFunction; } catch (NotFoundException e) { throw new NotFoundException(functionName + "does not exist"); } } } @PreAuthorize("hasPermission(#application, 'APPLICATION', 'READ')") @PostAuthorize("@authorizationSupport.filterForAccounts(returnObject)") @RequestMapping(value = "/applications/{application}/functions", method = RequestMethod.GET) List<Function> list(@PathVariable String application) { List<Function> appFunctions = functionProviders.stream() .map(functionProvider -> functionProvider.getApplicationFunctions(application)) .flatMap(Collection::stream) .collect(Collectors.toList()); return appFunctions; } }
fix (aws): Filter out null values in list of lambda providers (#4899) Co-authored-by: smaniyedath <c96ca018fcef7fd98bf9fed93e549310bf2e238a@intuit.com>
clouddriver-web/src/main/groovy/com/netflix/spinnaker/clouddriver/controllers/FunctionController.java
fix (aws): Filter out null values in list of lambda providers (#4899)
Java
apache-2.0
53db22f8543ea4004ee40c90c876e038f00e3789
0
ontop/ontop,clarkparsia/ontop,eschwert/ontop,eschwert/ontop,ontop/ontop,srapisarda/ontop,srapisarda/ontop,ghxiao/ontop-spatial,ghxiao/ontop-spatial,ontop/ontop,ontop/ontop,ghxiao/ontop-spatial,ConstantB/ontop-spatial,ConstantB/ontop-spatial,ConstantB/ontop-spatial,clarkparsia/ontop,srapisarda/ontop,eschwert/ontop,ConstantB/ontop-spatial,srapisarda/ontop,eschwert/ontop,ghxiao/ontop-spatial,clarkparsia/ontop,ontop/ontop
package it.unibz.krdb.obda.owlrefplatform.core.sql; import it.unibz.krdb.obda.model.AlgebraOperatorPredicate; import it.unibz.krdb.obda.model.Atom; import it.unibz.krdb.obda.model.BooleanOperationPredicate; import it.unibz.krdb.obda.model.CQIE; import it.unibz.krdb.obda.model.Constant; import it.unibz.krdb.obda.model.DataTypePredicate; import it.unibz.krdb.obda.model.DatalogProgram; import it.unibz.krdb.obda.model.Function; import it.unibz.krdb.obda.model.NewLiteral; import it.unibz.krdb.obda.model.NumericalOperationPredicate; import it.unibz.krdb.obda.model.OBDAException; import it.unibz.krdb.obda.model.OBDAQueryModifiers.OrderCondition; import it.unibz.krdb.obda.model.Predicate; import it.unibz.krdb.obda.model.Predicate.COL_TYPE; import it.unibz.krdb.obda.model.URIConstant; import it.unibz.krdb.obda.model.ValueConstant; import it.unibz.krdb.obda.model.Variable; import it.unibz.krdb.obda.model.impl.OBDAVocabulary; import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.DatalogNormalizer; import it.unibz.krdb.obda.owlrefplatform.core.queryevaluation.JDBCUtility; import it.unibz.krdb.obda.owlrefplatform.core.queryevaluation.SQLDialectAdapter; import it.unibz.krdb.obda.owlrefplatform.core.srcquerygeneration.SQLQueryGenerator; import it.unibz.krdb.sql.DBMetadata; import it.unibz.krdb.sql.DataDefinition; import it.unibz.krdb.sql.TableDefinition; import it.unibz.krdb.sql.ViewDefinition; import java.sql.Types; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import org.slf4j.LoggerFactory; public class SQLGenerator implements SQLQueryGenerator { private static final long serialVersionUID = 7477161929752147045L; /** * Operator symbols */ private static final String EQ_OPERATOR = "%s = %s"; private static final String NEQ_OPERATOR = "%s <> %s"; private static final String GT_OPERATOR = "%s > %s"; private static final String GTE_OPERATOR = "%s >= %s"; private static final String LT_OPERATOR = "%s < %s"; private static final String LTE_OPERATOR = "%s <= %s"; private static final String AND_OPERATOR = "%s AND %s"; private static final String OR_OPERATOR = "%s OR %s"; private static final String NOT_OPERATOR = "NOT %s"; private static final String IS_NULL_OPERATOR = "%s IS NULL"; private static final String IS_NOT_NULL_OPERATOR = "%s IS NOT NULL"; private static final String ADD_OPERATOR = "%s + %s"; private static final String SUBSTRACT_OPERATOR = "%s - %s"; private static final String MULTIPLY_OPERATOR = "%s * %s"; private static final String INDENT = " "; private static final String IS_TRUE_OPERATOR = "%s IS TRUE"; //private static final String IS_TRUE_BOOL = "%s"; //private static final String IS_TRUE_INT = "%s > 0"; //private static final String IS_TRUE_DOUBLE = "%s > 0"; //private static final String IS_TRUE_STRING = "LENGTH(%s) > 0"; /** * Formatting template */ private static final String VIEW_NAME = "QVIEW%s"; private final DBMetadata metadata; private final JDBCUtility jdbcutil; private final SQLDialectAdapter sqladapter; private static final org.slf4j.Logger log = LoggerFactory .getLogger(SQLGenerator.class); public SQLGenerator(DBMetadata metadata, JDBCUtility jdbcutil, SQLDialectAdapter sqladapter) { this.metadata = metadata; this.jdbcutil = jdbcutil; this.sqladapter = sqladapter; } /*** * Generates and SQL query ready to be executed by Quest. Each query is a * SELECT FROM WHERE query. To know more about each of these see the inner * method descriptions. */ @Override public String generateSourceQuery(DatalogProgram query, List<String> signature) throws OBDAException { String indent = " "; if (query.getQueryModifiers().hasModifiers()) { final String outerViewName = "SUB_QVIEW"; String subquery = generateQuery(query, signature, indent); String modifier = ""; List<OrderCondition> conditions = query.getQueryModifiers() .getSortConditions(); if (!conditions.isEmpty()) { modifier += sqladapter.sqlOrderBy(conditions, outerViewName) + "\n"; } long limit = query.getQueryModifiers().getLimit(); long offset = query.getQueryModifiers().getOffset(); if (limit != -1 || offset != -1) { modifier += sqladapter.sqlSlice(limit, offset) + "\n"; } String sql = "SELECT *\n"; sql += "FROM (\n"; sql += subquery + "\n"; sql += ") " + outerViewName + "\n"; sql += modifier; return sql; } else { return generateQuery(query, signature, ""); } } /*** * Main method. Generates the full query, taking into account * limit/offset/order by. * * @param query * @param signature * @param indent * @return * @throws OBDAException */ private String generateQuery(DatalogProgram query, List<String> signature, String indent) throws OBDAException { boolean distinct = query.getQueryModifiers().isDistinct(); int numberOfQueries = query.getRules().size(); List<String> queriesStrings = new LinkedList<String>(); /* Main loop, constructing the SPJ query for each CQ */ for (CQIE cq : query.getRules()) { /* * Here we normalize so that the form of the CQ is as close to the * form of a normal SQL algebra as possible, particularly, no shared * variables, only joins by means of equality. Also, equalities in * nested expressions (JOINS) are kept at their respective levels to * generate correct ON and wHERE clauses. */ log.debug("Before pushing equalities: \n{}", cq); DatalogNormalizer.pushEqualities(cq, false); log.debug("Before folding Joins: \n{}", cq); DatalogNormalizer.foldJoinTrees(cq, false); log.debug("Before pulling out equalities: \n{}", cq); DatalogNormalizer.pullOutEqualities(cq); log.debug("Before pulling out Left Join Conditions: \n{}", cq); DatalogNormalizer.pullOutLeftJoinConditions(cq); log.debug("Before pulling up nested references: \n{}", cq); DatalogNormalizer.pullUpNestedReferences(cq, false); log.debug("Before adding trivial equalities: \n{}, cq);", cq); DatalogNormalizer.addMinimalEqualityToLeftJoin(cq); log.debug("Normalized CQ: \n{}", cq); Predicate headPredicate = cq.getHead().getFunctionSymbol(); if (!headPredicate.getName().toString().equals("ans1")) { // not a target query, skip it. continue; } QueryAliasIndex index = new QueryAliasIndex(cq); boolean innerdistincts = false; if (distinct && numberOfQueries == 1) { innerdistincts = true; } String FROM = getFROM(cq, index); String WHERE = getWHERE(cq, index); String SELECT = getSelectClause(signature, cq, index, innerdistincts); String querystr = SELECT + FROM + WHERE; queriesStrings.add(querystr); } Iterator<String> queryStringIterator = queriesStrings.iterator(); StringBuffer result = new StringBuffer(); if (queryStringIterator.hasNext()) { result.append(queryStringIterator.next()); } String UNION = null; if (distinct) UNION = "UNION"; else UNION = "UNION ALL"; while (queryStringIterator.hasNext()) { result.append("\n"); result.append(UNION); result.append("\n\n"); result.append(queryStringIterator.next()); } return result.toString(); } /*** * Returns a string with boolean conditions formed with the boolean atoms * found in the atoms list. */ private LinkedHashSet<String> getBooleanConditionsString( List<Function> atoms, QueryAliasIndex index) { LinkedHashSet<String> conditions = new LinkedHashSet<String>(); for (int atomidx = 0; atomidx < atoms.size(); atomidx++) { NewLiteral innerAtom = atoms.get(atomidx); Function innerAtomAsFunction = (Function) innerAtom; if (innerAtomAsFunction.isDataFunction() || innerAtomAsFunction.isAlgebraFunction()) continue; /* This is a boolean atom */ String condition = getSQLCondition(innerAtomAsFunction, index); conditions.add(condition); } return conditions; } /*** * Returns the SQL for an atom representing an SQL condition (booleans) * * @param atom * @param index * @return */ private String getSQLCondition(Function atom, QueryAliasIndex index) { Predicate functionSymbol = atom.getFunctionSymbol(); if (isUnary(atom)) { // For unary boolean operators, e.g., NOT, IS NULL, IS NOT NULL. // added also for IS TRUE String expressionFormat = getBooleanOperatorString(functionSymbol); NewLiteral term = atom.getTerm(0); String column = getSQLString(term, index, false); if (expressionFormat.contains("NOT %s") ) { // find data type of term and evaluate accordingly //int type = 8; if (term instanceof Function) { Function f = (Function) term; if (!f.isDataTypeFunction()) return String.format(expressionFormat, column); } int type = getVariableDataType(term, index); if (type == Types.INTEGER) return String.format("NOT %s > 0", column); if (type == Types.DOUBLE) return String.format("NOT %s > 0", column); if (type == Types.BOOLEAN) return String.format("NOT %s", column); if (type == Types.VARCHAR) return String.format("NOT LENGTH(%s) > 0", column); return "0;"; } if (expressionFormat.contains("IS TRUE")) { // find data type of term and evaluate accordingly //int type = 8; int type = getVariableDataType(term, index); if (type == Types.INTEGER) return String.format("%s > 0", column); if (type == Types.DOUBLE) return String.format("%s > 0", column); if (type == Types.BOOLEAN) return String.format("%s", column); if (type == Types.VARCHAR) return String.format("LENGTH(%s) > 0", column); return "1;"; } return String.format(expressionFormat, column); } else if (isBinary(atom)) { if (atom.isBooleanFunction()) { // For binary boolean operators, e.g., AND, OR, EQ, GT, LT, etc. _ String expressionFormat = getBooleanOperatorString(functionSymbol); NewLiteral left = atom.getTerm(0); NewLiteral right = atom.getTerm(1); String leftOp = getSQLString(left, index, true); String rightOp = getSQLString(right, index, true); return String.format("(" + expressionFormat + ")", leftOp, rightOp); } else if (atom.isArithmeticFunction()) { // For numerical operators, e.g., MUTLIPLY, SUBSTRACT, ADDITION String expressionFormat = getNumericalOperatorString(functionSymbol); NewLiteral left = atom.getTerm(0); NewLiteral right = atom.getTerm(1); String leftOp = getSQLString(left, index, true); String rightOp = getSQLString(right, index, true); return String.format("(" + expressionFormat + ")", leftOp, rightOp); } else { throw new RuntimeException("The binary function " + functionSymbol.toString() + " is not supported yet!"); } } else { if (functionSymbol == OBDAVocabulary.SPARQL_REGEX) { boolean caseinSensitive = false; if (atom.getArity() == 3) { if (atom.getTerm(2).toString().contains("i")) { caseinSensitive = true; } } NewLiteral p1 = atom.getTerm(0); NewLiteral p2 = atom.getTerm(1); String column = getSQLString(p1, index, false); String pattern = getSQLString(p2, index, false); return sqladapter.sqlRegex(column, pattern, caseinSensitive); } else { throw new RuntimeException("The builtin function " + functionSymbol.toString() + " is not supported yet!"); } } } /*** * Returns the table definition for these atoms. By default, a list of atoms * represents JOIN or LEFT JOIN of all the atoms, left to right. All boolean * atoms in the list are considered conditions in the ON clause of the JOIN. * * <p> * If the list is a LeftJoin, then it can only have 2 data atoms, and it HAS * to have 2 data atoms. * * <p> * If process boolean operators is enabled, all boolean conditions will be * added to the ON clause of the first JOIN. * * @param atoms * @param index * @param isTopLevel * indicates if the list of atoms is actually the main body of * the conjunctive query. If it is, no JOIN is generated, but a * cross product with WHERE clause. Moreover, the isLeftJoin * argument will be ignored. * * @return */ private String getTableDefinitions(List<Function> inneratoms, QueryAliasIndex index, boolean isTopLevel, boolean isLeftJoin, String indent) { /* * We now collect the view definitions for each data atom each * condition, and each each nested Join/LeftJoin */ List<String> tableDefinitions = new LinkedList<String>(); for (int atomidx = 0; atomidx < inneratoms.size(); atomidx++) { NewLiteral innerAtom = inneratoms.get(atomidx); Function innerAtomAsFunction = (Function) innerAtom; String definition = getTableDefinition(innerAtomAsFunction, index, indent + INDENT); if (!definition.isEmpty()) { tableDefinitions.add(definition); } } /* * Now we generate the table definition, this will be either a comma * separated list for TOP level (FROM clause) or a Join/LeftJoin * (possibly nested if there are more than 2 table definitions in the * current list) in case this method was called recursively. */ StringBuffer tableDefinitionsString = new StringBuffer(); int size = tableDefinitions.size(); if (isTopLevel) { if (size == 0) throw new RuntimeException("No table definitions"); Iterator<String> tableDefinitionsIterator = tableDefinitions .iterator(); tableDefinitionsString.append(indent); tableDefinitionsString.append(tableDefinitionsIterator.next()); while (tableDefinitionsIterator.hasNext()) { tableDefinitionsString.append(",\n"); tableDefinitionsString.append(indent); tableDefinitionsString.append(tableDefinitionsIterator.next()); } } else { /* * This is actually a Join or LeftJoin, so we form the JOINs/LEFT * JOINs and the ON clauses */ String JOIN_KEYWORD = null; if (isLeftJoin) { JOIN_KEYWORD = "LEFT OUTER JOIN"; } else { JOIN_KEYWORD = "JOIN"; } String JOIN = "\n" + indent + "(\n" + indent + "%s\n" + indent + JOIN_KEYWORD + "\n" + indent + "%s\n" + indent + ")"; if (size == 0) { throw new RuntimeException( "Cannot generate definition for empty data"); } if (size == 1) { return tableDefinitions.get(0); } /* * To form the JOIN we will cycle through each data definition, * nesting the JOINs as we go. The conditions in the ON clause will * go on the TOP level only. */ String currentJoin = String.format(JOIN, tableDefinitions.get(size - 2), tableDefinitions.get(size - 1)); tableDefinitions.remove(size - 1); tableDefinitions.remove(size - 2); int currentSize = tableDefinitions.size(); while (currentSize > 0) { currentJoin = String.format(JOIN, tableDefinitions.get(currentSize - 1), currentJoin); tableDefinitions.remove(currentSize - 1); currentSize = tableDefinitions.size(); } tableDefinitions.add(currentJoin); tableDefinitionsString.append(currentJoin); /* * If there are ON conditions we add them now. We need to remove the * last parenthesis ')' and replace it with ' ON %s)' where %s are * all the conditions */ String conditions = getConditionsString(inneratoms, index, true, indent); if (conditions.length() > 0 && tableDefinitionsString.lastIndexOf(")") != -1) { int lastidx = tableDefinitionsString.lastIndexOf(")"); tableDefinitionsString.delete(lastidx, tableDefinitionsString.length()); // tableDefinitionsString.deleteCharAt(tableDefinitionsString // .length() - 1); String ON_CLAUSE = String.format("ON\n%s\n " + indent + ")", conditions); tableDefinitionsString.append(ON_CLAUSE); } } return tableDefinitionsString.toString(); } /*** * Returns the table definition for the given atom. If the atom is a simple * table or view, then it returns the value as defined by the * QueryAliasIndex. If the atom is a Join or Left Join, it will call * getTableDefinitions on the nested term list. * * @param atom * @param index * @return */ private String getTableDefinition(Function atom, QueryAliasIndex index, String indent) { Predicate predicate = atom.getPredicate(); if (predicate instanceof BooleanOperationPredicate || predicate instanceof NumericalOperationPredicate || predicate instanceof DataTypePredicate) { // These don't participate in the FROM clause return ""; } else if (predicate instanceof AlgebraOperatorPredicate) { List<Function> innerTerms = new LinkedList<Function>(); for (NewLiteral innerTerm : atom.getTerms()) innerTerms.add((Function) innerTerm); if (predicate == OBDAVocabulary.SPARQL_JOIN) { return getTableDefinitions(innerTerms, index, false, false, indent + INDENT); } else if (predicate == OBDAVocabulary.SPARQL_LEFTJOIN) { return getTableDefinitions(innerTerms, index, false, true, indent + INDENT); } } /* * This is a data atom */ String def = index.getViewDefinition(atom); return def; } private String getFROM(CQIE query, QueryAliasIndex index) { List<Function> atoms = new LinkedList<Function>(); for (Function atom : query.getBody()) atoms.add((Function) atom); String tableDefinitions = getTableDefinitions(atoms, index, true, false, ""); return "\n FROM \n" + tableDefinitions; } /*** * Generates all the conditions on the given atoms, e.g., shared variables * and boolean conditions. This string can then be used to form a WHERE or * an ON clause. * * <p> * The method assumes that no variable in this list (or nested ones) referes * to an upper level one. * * @param atoms * @param index * @return */ private String getConditionsString(List<Function> atoms, QueryAliasIndex index, boolean processShared, String indent) { LinkedHashSet<String> equalityConditions = new LinkedHashSet<String>(); // if (processShared) equalityConditions.addAll(getConditionsSharedVariablesAndConstants( atoms, index, processShared)); LinkedHashSet<String> booleanConditions = getBooleanConditionsString( atoms, index); LinkedHashSet<String> conditions = new LinkedHashSet<String>(); conditions.addAll(equalityConditions); conditions.addAll(booleanConditions); /* * Collecting all the conditions in a single string for the ON or WHERE * clause */ StringBuffer conditionsString = new StringBuffer(); Iterator<String> conditionsIterator = conditions.iterator(); if (conditionsIterator.hasNext()) { conditionsString.append(indent); conditionsString.append(conditionsIterator.next()); } while (conditionsIterator.hasNext()) { conditionsString.append(" AND\n"); conditionsString.append(indent); conditionsString.append(conditionsIterator.next()); } return conditionsString.toString(); } /*** * Returns the set of variables that participate data atoms (either in this * atom directly or in nested ones). This will recursively collect the * variables references in in this atom, exlcuding those on the right side * of left joins. * * @param atom * @return */ private Set<Variable> getVariableReferencesWithLeftJoin(Function atom) { if (atom.isDataFunction()) return atom.getVariables(); if (atom.isBooleanFunction()) return new HashSet<Variable>(); /* * we have an alebra opertaor (join or left join) if its a join, we need * to collect all the varaibles of each nested atom., if its a left * join, only of the first data/algebra atom (the left atom). */ boolean isLeftJoin = false; boolean foundFirstDataAtom = false; if (atom.getFunctionSymbol() == OBDAVocabulary.SPARQL_LEFTJOIN) isLeftJoin = true; LinkedHashSet<Variable> innerVariables = new LinkedHashSet<Variable>(); for (NewLiteral t : atom.getTerms()) { if (isLeftJoin && foundFirstDataAtom) break; Function asFunction = (Function) t; if (asFunction.isBooleanFunction()) continue; innerVariables.addAll(getVariableReferencesWithLeftJoin(asFunction .asAtom())); foundFirstDataAtom = true; } return innerVariables; } /*** * Returns a list of equality conditions that reflect the semantics of the * shared variables in the list of atoms. * <p> * The method assumes that no variables are shared across deeper levels of * nesting (through Join or LeftJoin atoms), it will not call itself * recursively. Nor across upper levels. * * <p> * When generating equalities recursively, we will also generate a minimal * number of equalities. E.g., if we have A(x), Join(R(x,y), Join(R(y, * x),B(x)) * */ private LinkedHashSet<String> getConditionsSharedVariablesAndConstants( List<Function> atoms, QueryAliasIndex index, boolean processShared) { LinkedHashSet<String> equalities = new LinkedHashSet<String>(); Set<Variable> currentLevelVariables = new LinkedHashSet<Variable>(); if (processShared) for (Function atom : atoms) { currentLevelVariables .addAll(getVariableReferencesWithLeftJoin(atom)); // if (atom.isDataFunction()) { // currentLevelVariables.addAll(atom.getReferencedVariables()); // } else if (atom.isAlgebraFunction()) { // currentLevelVariables.addAll(atom.getReferencedVariables()); // } } /* * For each variable we collect all the columns that shold be equated * (due to repeated positions of the variable). then we form atoms of * the form "COL1 = COL2" */ for (Variable var : currentLevelVariables) { Set<String> references = index.getColumnReferences(var); if (references.size() < 2) { // No need for equality continue; } Iterator<String> referenceIterator = references.iterator(); String leftColumnReference = referenceIterator.next(); while (referenceIterator.hasNext()) { String rightColumnReference = referenceIterator.next(); String equality = String.format("(%s = %s)", leftColumnReference, rightColumnReference); equalities.add(equality); leftColumnReference = rightColumnReference; } } for (Function atom : atoms) { if (!atom.isDataFunction()) continue; for (int idx = 0; idx < atom.getArity(); idx++) { NewLiteral l = atom.getTerm(idx); if (l instanceof Constant) { String value = getSQLString(l, index, false); String columnReference = index .getColumnReference(atom, idx); equalities.add(String.format("(%s = %s)", columnReference, value)); } } } return equalities; } // return variable SQL data type private int getVariableDataType (NewLiteral term, QueryAliasIndex idx) { Function f = (Function) term; if (f.isDataTypeFunction()) { Predicate p = f.getPredicate(); if (p.toString() == OBDAVocabulary.XSD_BOOLEAN_URI) return Types.BOOLEAN; if (p.toString() == OBDAVocabulary.XSD_INT_URI) return Types.INTEGER; if (p.toString() == OBDAVocabulary.XSD_INTEGER_URI) return Types.INTEGER; if (p.toString() == OBDAVocabulary.XSD_DOUBLE_URI) return Types.DOUBLE; if (p.toString() == OBDAVocabulary.XSD_STRING_URI) return Types.VARCHAR; if (p.toString() == OBDAVocabulary.RDFS_LITERAL_URI) return Types.VARCHAR; } // Return varchar for unknown return 12; } // private Set<Variable> getMandatoryColumnsOnJoinsAndLeftJoinsRecursively( // Atom atom) { // if (atom.isDataFunction()) { // return atom.getReferencedVariables(); // } else if (atom.isBooleanFunction()) // return new HashSet<Variable>(); // /* atom is an alebra function */ // Predicate pred = atom.getFunctionSymbol(); // boolean isLeftJoin = true; // if (pred == OBDAVocabulary.SPARQL_JOIN) { // isLeftJoin = false; // } // // /* If its a normal join, all nexted variables are required variables, if // its // * // */ // // } private String getWHERE(CQIE query, QueryAliasIndex index) { List<Function> atoms = new LinkedList<Function>(); for (Function atom : query.getBody()) atoms.add((Function) atom); String conditions = getConditionsString(atoms, index, false, ""); if (conditions.length() == 0) return ""; return "\nWHERE \n" + conditions; } /** * produces the select clause of the sql query for the given CQIE * * @param q * the query * @return the sql select clause */ private String getSelectClause(List<String> signature, CQIE query, QueryAliasIndex index, boolean distinct) throws OBDAException { /* * If the head has size 0 this is a boolean query. */ List<NewLiteral> headterms = query.getHead().getTerms(); StringBuilder sb = new StringBuilder(); sb.append("SELECT "); if (distinct) sb.append("DISTINCT "); if (headterms.size() == 0) { sb.append("true as x"); return sb.toString(); } Iterator<NewLiteral> hit = headterms.iterator(); int hpos = 0; while (hit.hasNext()) { NewLiteral ht = hit.next(); String typeColumn = getTypeColumnForSELECT(ht, signature, hpos); String langColumn = getLangColumnForSELECT(ht, signature, hpos, index); String mainColumn = getMainColumnForSELECT(ht, signature, hpos, index); sb.append("\n "); sb.append(typeColumn); sb.append(", "); sb.append(langColumn); sb.append(", "); sb.append(mainColumn); if (hit.hasNext()) { sb.append(", "); } hpos++; } return sb.toString(); } private String getMainColumnForSELECT(NewLiteral ht, List<String> signature, int hpos, QueryAliasIndex index) { String mainColumn = null; String mainTemplate = "%s AS %s"; if (ht instanceof URIConstant) { URIConstant uc = (URIConstant) ht; mainColumn = jdbcutil.getSQLLexicalForm(uc.getURI().toString()); } else if (ht == OBDAVocabulary.NULL) { mainColumn = "NULL"; } else if (ht instanceof Function) { /* * if it's a function we need to get the nested value if its a * datatype function or we need to do the CONCAT if its URI(....). */ Function ov = (Function) ht; Predicate function = ov.getFunctionSymbol(); String functionString = function.toString(); /* * Adding the column(s) with the actual value(s) */ if (function instanceof DataTypePredicate) { /* * Case where we have a typing function in the head (this is the * case for all literal columns */ NewLiteral term = ov.getTerms().get(0); String termStr = null; if (term instanceof ValueConstant) { termStr = jdbcutil.getSQLLexicalForm((ValueConstant) term); } else { termStr = getSQLString(term, index, false); } mainColumn = termStr; } else if (functionString.equals(OBDAVocabulary.QUEST_URI)) { /*** * New template based URI building functions */ mainColumn = getSQLStringForURIFunction(ov, index); } else if (functionString.equals(OBDAVocabulary.QUEST_BNODE)) { /*** * New template based URI building functions */ mainColumn = getSQLStringForBNodeFunction(ov, index); } else { throw new IllegalArgumentException( "Error generating SQL query. Contact the developers. Found an invalid function during translation: " + ov.toString()); } } else { throw new RuntimeException("Cannot generate SELECT for term: " + ht.toString()); } /* * If the we have a column we need to still CAST to VARCHAR */ if (mainColumn.charAt(0) != '\'' && mainColumn.charAt(0) != '(') mainColumn = sqladapter.sqlCast(mainColumn, Types.VARCHAR); return String.format(mainTemplate, mainColumn, sqladapter.sqlQuote(signature.get(hpos))); } private String getLangColumnForSELECT(NewLiteral ht, List<String> signature, int hpos, QueryAliasIndex index) { String langStr = "%s AS \"%sLang\""; if (ht instanceof Function) { Function ov = (Function) ht; Predicate function = ov.getFunctionSymbol(); if (function == OBDAVocabulary.RDFS_LITERAL && ov.getTerms().size() > 1) { /* * Case for rdf:literal s with a language, we need to select 2 * terms from ".., rdf:literal(?x,"en"), * * and signature "name" * we will generate a select with the * projection of 2 columns * * , 'en' as nameqlang, view.colforx as name, */ String lang = null; NewLiteral langTerm = ov.getTerms().get(1); if (langTerm == OBDAVocabulary.NULL) lang = "NULL"; if (langTerm instanceof ValueConstant) { lang = jdbcutil.getSQLLexicalForm((ValueConstant) langTerm); } else { lang = getSQLString(langTerm, index, false); } return (String.format(langStr, lang, signature.get(hpos))); } } return (String.format(langStr, "NULL", signature.get(hpos))); } private String getTypeColumnForSELECT(NewLiteral ht, List<String> signature, int hpos) { String typeStr = "%s AS \"%sQuestType\""; if (ht instanceof Function) { Function ov = (Function) ht; Predicate function = ov.getFunctionSymbol(); String functionString = function.toString(); /* * Adding the ColType column to the projection (used in the result * set to know the type of constant) */ if (functionString.equals(OBDAVocabulary.XSD_BOOLEAN.getName() .toString())) { return (String.format(typeStr, 9, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.XSD_DATETIME .getName().toString())) { return (String.format(typeStr, 8, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.XSD_DECIMAL .getName().toString())) { return (String.format(typeStr, 5, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.XSD_DOUBLE .getName().toString())) { return (String.format(typeStr, 6, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.XSD_INTEGER .getName().toString())) { return (String.format(typeStr, 4, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.XSD_STRING .getName().toString())) { return (String.format(typeStr, 7, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.RDFS_LITERAL .getName().toString())) { return (String.format(typeStr, 3, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.QUEST_URI)) { return (String.format(typeStr, 1, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.QUEST_BNODE)) { return (String.format(typeStr, 2, signature.get(hpos))); } } else if (ht instanceof URIConstant) { return (String.format(typeStr, 1, signature.get(hpos))); } else if (ht == OBDAVocabulary.NULL) { return (String.format(typeStr, 0, signature.get(hpos))); } throw new RuntimeException("Cannot generate SELECT for term: " + ht.toString()); } /*** * Returns the SQL that builds a URI String out of an atom of the form * uri("htttp:...", x, y,...) * * @param ov * @param index * @return */ public String getSQLStringForURIFunction(Function ov, QueryAliasIndex index) { /* * The first inner term determines the form of the result */ NewLiteral t = ov.getTerms().get(0); if (t instanceof ValueConstant) { /* * The function is actually a template. The first parameter is a * string of the form http://.../.../ with place holders of the form * {}. The rest are variables or constants that should be put in * place of the palce holders. We need to tokenize and form the * CONCAT */ ValueConstant c = (ValueConstant) t; if (c.getValue().equals("{}")) { return getSQLString(ov.getTerms().get(1), index, false); } else { StringTokenizer tokenizer = new StringTokenizer(c.getValue(), "{}"); String functionString = jdbcutil.getSQLLexicalForm(tokenizer.nextToken()); List<String> vex = new LinkedList<String>(); /* * New we concat the rest of the function, note that if there is only 1 element * there is nothing to concatenate */ if (ov.getTerms().size() > 1) { for (int termIndex = 1; termIndex < ov.getTerms().size(); termIndex++) { NewLiteral currentTerm = ov.getTerms().get(termIndex); String repl = "REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(" + "REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(" + sqladapter.sqlCast(getSQLString(currentTerm, index, false), Types.VARCHAR) + ",' ', '%20')," + "'!', '%21')," + "'@', '%40'),"+ "'#', '%23')," + "'$', '%24'),"+ "'&', '%26'),"+ "'*', '%42'), "+ "'(', '%28'), "+ "')', '%29'), "+ "'[', '%5B'), "+ "']', '%5D'), "+ "',', '%2C'), "+ "';', '%3B'), "+ "':', '%3A'), "+ "'?', '%3F'), "+ "'=', '%3D'), "+ "'+', '%2B'), "+ "'''', '%22'), "+ "'/', '%2F')"; vex.add(repl); if (tokenizer.hasMoreTokens()) { vex.add(jdbcutil.getSQLLexicalForm(tokenizer.nextToken())); } // termIndex += 1; } } String[] params = new String[vex.size() + 1]; int i = 0; params[i] = functionString; i += 1; for (String param : vex) { params[i] = param; i += 1; } return sqladapter.strconcat(params); } } else if (t instanceof Variable) { /* * The function is of the form uri(x), we need to simply return the * value of X */ return getSQLString(((Variable) t), index, false); } else if (t instanceof URIConstant) { URIConstant uc = (URIConstant) t; /* * The function is of the form uri("http://some.uri/"), i.e., a * concrete URI, we return the string representing that URI. */ return jdbcutil.getSQLLexicalForm(uc.getURI().toString()); } /* * Unsupported case */ throw new IllegalArgumentException( "Error, cannot generate URI constructor clause for a term. Contact the authors. Term: " + ov.toString()); } /*** * Returns the SQL that builds a URI String out of an atom of the form * uri("http:...", x, y,...) * * @param ov * @param index * @return */ public String getSQLStringForBNodeFunction(Function ov, QueryAliasIndex index) { /* * The first inner term determines the form of the result */ NewLiteral t = ov.getTerms().get(0); if (t instanceof ValueConstant) { /* * The function is actually a template. The first parameter is a * string of the form http://.../.../ with place holders of the form * {}. The rest are variables or constants that should be put in * place of the palce holders. We need to tokenize and form the * CONCAT */ ValueConstant c = (ValueConstant) t; StringTokenizer tokenizer = new StringTokenizer(c.toString(), "{}"); String functionString = jdbcutil.getSQLLexicalForm(tokenizer .nextToken()); List<String> vex = new LinkedList<String>(); int termIndex = 1; do { NewLiteral currentTerm = ov.getTerms().get(termIndex); vex.add(getSQLString(currentTerm, index, false)); if (tokenizer.hasMoreTokens()) { vex.add(jdbcutil.getSQLLexicalForm(tokenizer.nextToken())); } termIndex += 1; } while (tokenizer.hasMoreElements() || termIndex < ov.getTerms().size()); String[] params = new String[vex.size() + 1]; int i = 0; params[i] = functionString; i += 1; for (String param : vex) { params[i] = param; i += 1; } return sqladapter.strconcat(params); } else if (t instanceof Variable) { /* * The function is of the form uri(x), we need to simply return the * value of X */ return getSQLString(((Variable) t), index, false); } else if (t instanceof URIConstant) { URIConstant uc = (URIConstant) t; /* * The function is of the form uri("http://some.uri/"), i.e., a * concrete URI, we return the string representing that URI. */ return jdbcutil.getSQLLexicalForm(uc.getURI().toString()); } /* * Unsupported case */ throw new IllegalArgumentException( "Error, cannot generate URI constructor clause for a term. Contact the authors. Term: " + ov.toString()); } /** * Determines if it is a unary function. */ private boolean isUnary(Function fun) { return (fun.getArity() == 1) ? true : false; } /** * Determines if it is a binary function. */ private boolean isBinary(Function fun) { return (fun.getArity() == 2) ? true : false; } /*** * Generates the SQL string that forms or retrieves the given term. The * function takes as input either: a constant (value or URI), a variable, or * a Function (i.e., uri(), eq(..), ISNULL(..), etc)). * <p> * If the input is a constant, it will return the SQL that generates the * string representing that constant. * <p> * If its a variable, it returns the column references to the position where * the variable first appears. * <p> * If its a function uri(..) it returns the SQL string concatenation that * builds the result of uri(...) * <p> * If its a boolean comparison, it returns the corresponding SQL comparison. * * @param term * @param index * @param useBrackets * @return */ public String getSQLString(NewLiteral term, QueryAliasIndex index, boolean useBrackets) { if (term == null) { return ""; } if (term instanceof ValueConstant) { ValueConstant ct = (ValueConstant) term; return jdbcutil.getSQLLexicalForm(ct); } else if (term instanceof URIConstant) { URIConstant uc = (URIConstant) term; return jdbcutil.getSQLLexicalForm(uc.toString()); } else if (term instanceof Variable) { Variable var = (Variable) term; LinkedHashSet<String> posList = index.getColumnReferences(var); if (posList == null || posList.size() == 0) { throw new RuntimeException( "Unbound variable found in WHERE clause: " + term); } return posList.iterator().next(); } /* If its not constant, or variable its a function */ Function function = (Function) term; Predicate functionSymbol = function.getFunctionSymbol(); NewLiteral term1 = function.getTerms().get(0); if (functionSymbol instanceof DataTypePredicate) { if (functionSymbol.getType(0) == COL_TYPE.UNSUPPORTED) { throw new RuntimeException("Unsupported type in the query: " + function); } /* atoms of the form integer(x) */ return getSQLString(term1, index, false); } else if (functionSymbol instanceof BooleanOperationPredicate) { /* atoms of the form EQ(x,y) */ String expressionFormat = getBooleanOperatorString(functionSymbol); if (isUnary(function)) { // for unary functions, e.g., NOT, IS NULL, IS NOT NULL // also added for IS TRUE if (expressionFormat.contains("IS TRUE")) { // find data type of term and evaluate accordingly //int type = 8; String column = getSQLString(term1, index, false); int type = getVariableDataType(term1, index); if (type == Types.INTEGER) return String.format("%s > 0", column); if (type == Types.DOUBLE) return String.format("%s > 0", column); if (type == Types.BOOLEAN) return String.format("%s", column); if (type == Types.VARCHAR) return String.format("LENGTH(%s) > 0", column); return "1"; } String op = getSQLString(term1, index, true); return String.format(expressionFormat, op); } else if (isBinary(function)) { // for binary functions, e.g., AND, OR, EQ, NEQ, GT, etc. String leftOp = getSQLString(term1, index, true); NewLiteral term2 = function.getTerms().get(1); String rightOp = getSQLString(term2, index, true); String result = String .format(expressionFormat, leftOp, rightOp); if (useBrackets) { return String.format("(%s)", result); } else { return result; } } else { throw new RuntimeException( "Cannot translate boolean function: " + functionSymbol); } } else if (functionSymbol instanceof NumericalOperationPredicate) { String expressionFormat = getNumericalOperatorString(functionSymbol); String leftOp = getSQLString(term1, index, true); NewLiteral term2 = function.getTerms().get(1); String rightOp = getSQLString(term2, index, true); String result = String.format(expressionFormat, leftOp, rightOp); if (useBrackets) { return String.format("(%s)", result); } else { return result; } } else { String functionName = functionSymbol.toString(); if (functionName.equals(OBDAVocabulary.QUEST_CAST_STR)) { String columnName = getSQLString(function.getTerm(0), index, false); String datatype = ((Constant) function.getTerm(1)).getValue(); int sqlDatatype = -1; if (datatype.equals(OBDAVocabulary.XSD_STRING_URI)) { sqlDatatype = Types.VARCHAR; } return sqladapter.sqlCast(columnName, sqlDatatype); } else if (functionName.equals(OBDAVocabulary.SPARQL_STR_URI)) { String columnName = getSQLString(function.getTerm(0), index, false); return sqladapter.sqlCast(columnName, Types.VARCHAR); } } /* * The atom must be of the form uri("...", x, y) */ String functionName = function.getFunctionSymbol().toString(); if (functionName.equals(OBDAVocabulary.QUEST_URI) || functionName.equals(OBDAVocabulary.QUEST_BNODE)) { return getSQLStringForURIFunction(function, index); } else { throw new RuntimeException("Unexpected function in the query: " + functionSymbol); } } /*** * Returns the SQL string for the boolean operator, including placeholders * for the terms to be used, e.g., %s = %s, %s IS NULL, etc. * * @param functionSymbol * @return */ private String getBooleanOperatorString(Predicate functionSymbol) { String operator = null; if (functionSymbol.equals(OBDAVocabulary.EQ)) { operator = EQ_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.NEQ)) { operator = NEQ_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.GT)) { operator = GT_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.GTE)) { operator = GTE_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.LT)) { operator = LT_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.LTE)) { operator = LTE_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.AND)) { operator = AND_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.OR)) { operator = OR_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.NOT)) { operator = NOT_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.IS_NULL)) { operator = IS_NULL_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.IS_NOT_NULL)) { operator = IS_NOT_NULL_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.IS_TRUE)) { operator = IS_TRUE_OPERATOR; } else { throw new RuntimeException("Unknown boolean operator: " + functionSymbol); } return operator; } private String getNumericalOperatorString(Predicate functionSymbol) { String operator = null; if (functionSymbol.equals(OBDAVocabulary.ADD)) { operator = ADD_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.SUBSTRACT)) { operator = SUBSTRACT_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.MULTIPLY)) { operator = MULTIPLY_OPERATOR; } else { throw new RuntimeException("Unknown numerical operator: " + functionSymbol); } return operator; } /*** * Utility class to resolve "database" atoms to view definitions ready to be * used in a FROM clause, and variables, to column references defined over * the existing view definitons of a query. * * * @author mariano * */ public class QueryAliasIndex { Map<Function, String> viewNames = new HashMap<Function, String>(); Map<Function, String> tableNames = new HashMap<Function, String>(); Map<Function, DataDefinition> dataDefinitions = new HashMap<Function, DataDefinition>(); Map<Variable, LinkedHashSet<String>> columnReferences = new HashMap<Variable, LinkedHashSet<String>>(); int dataTableCount = 0; boolean isEmpty = false; public QueryAliasIndex(CQIE query) { List<Atom> body = query.getBody(); generateViews(body); } private void generateViews(List<Atom> atoms) { for (Function atom : atoms) { /* * Thios wil call recursively if necessary */ generateViewsIndexVariables(atom); } } /*** * We assiciate each atom to a view definition. This will be * <p> * "tablename" as "viewX" or * <p> * (some nested sql view) as "viewX" * * <p> * View definitions are only done for data atoms. Join/LeftJoin and * boolean atoms are not associated to view definitions. * * @param atom */ private void generateViewsIndexVariables(Function atom) { if (atom.getFunctionSymbol() instanceof BooleanOperationPredicate) { return; } else if (atom.getFunctionSymbol() instanceof AlgebraOperatorPredicate) { List<NewLiteral> lit = atom.getTerms(); for (NewLiteral subatom : lit) { if (subatom instanceof Function) { generateViewsIndexVariables((Function) subatom); } } } Predicate tablePredicate = atom.getFunctionSymbol(); String tableName = tablePredicate.toString(); DataDefinition def = metadata.getDefinition(tableName); if (def == null) { /* * There is no definition for this atom, its not a database * predicate, the query is empty. */ isEmpty = true; return; } dataTableCount += 1; viewNames.put(atom, String.format(VIEW_NAME, dataTableCount)); tableNames.put(atom, tableName); dataDefinitions.put(atom, def); indexVariables(atom); } private void indexVariables(Function atom) { DataDefinition def = dataDefinitions.get(atom); String viewName = viewNames.get(atom); for (int index = 0; index < atom.getTerms().size(); index++) { NewLiteral term = atom.getTerms().get(index); if (!(term instanceof Variable)) continue; LinkedHashSet<String> references = columnReferences.get(term); if (references == null) { references = new LinkedHashSet<String>(); columnReferences.put((Variable) term, references); } String columnName = def.getAttributeName(index + 1); String reference = sqladapter.sqlQualifiedColumn(viewName, columnName); references.add(reference); } } /*** * Returns all the column aliases that correspond to this variable, * across all the DATA atoms in the query (not algebra operators or * boolean conditions. * * @param var * The variable we want the referenced columns. */ public LinkedHashSet<String> getColumnReferences(Variable var) { return columnReferences.get(var); } /*** * Generates the view definition, i.e., "tablename viewname" * * @param atom * @return */ public String getViewDefinition(Function atom) { DataDefinition def = dataDefinitions.get(atom); if (def instanceof TableDefinition) { return sqladapter.sqlTableName(tableNames.get(atom), viewNames.get(atom)); } else if (def instanceof ViewDefinition) { return String.format("(%s) %s", ((ViewDefinition) def).getStatement(), viewNames.get(atom)); } throw new RuntimeException( "Impossible to get data definition for: " + atom + ", type: " + def); } public String getView(Function atom) { return viewNames.get(atom); } public String getColumnReference(Function atom, int column) { String viewName = getView(atom); DataDefinition def = dataDefinitions.get(atom); String columnname = def.getAttributeName(column + 1); return sqladapter.sqlQualifiedColumn(viewName, columnname); } } }
obdalib/reformulation-core/src/main/java/it/unibz/krdb/obda/owlrefplatform/core/sql/SQLGenerator.java
package it.unibz.krdb.obda.owlrefplatform.core.sql; import it.unibz.krdb.obda.model.AlgebraOperatorPredicate; import it.unibz.krdb.obda.model.Atom; import it.unibz.krdb.obda.model.BooleanOperationPredicate; import it.unibz.krdb.obda.model.CQIE; import it.unibz.krdb.obda.model.Constant; import it.unibz.krdb.obda.model.DataTypePredicate; import it.unibz.krdb.obda.model.DatalogProgram; import it.unibz.krdb.obda.model.Function; import it.unibz.krdb.obda.model.NewLiteral; import it.unibz.krdb.obda.model.NumericalOperationPredicate; import it.unibz.krdb.obda.model.OBDAException; import it.unibz.krdb.obda.model.OBDAQueryModifiers.OrderCondition; import it.unibz.krdb.obda.model.Predicate; import it.unibz.krdb.obda.model.Predicate.COL_TYPE; import it.unibz.krdb.obda.model.URIConstant; import it.unibz.krdb.obda.model.ValueConstant; import it.unibz.krdb.obda.model.Variable; import it.unibz.krdb.obda.model.impl.OBDAVocabulary; import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.DatalogNormalizer; import it.unibz.krdb.obda.owlrefplatform.core.queryevaluation.JDBCUtility; import it.unibz.krdb.obda.owlrefplatform.core.queryevaluation.SQLDialectAdapter; import it.unibz.krdb.obda.owlrefplatform.core.srcquerygeneration.SQLQueryGenerator; import it.unibz.krdb.sql.DBMetadata; import it.unibz.krdb.sql.DataDefinition; import it.unibz.krdb.sql.TableDefinition; import it.unibz.krdb.sql.ViewDefinition; import java.sql.Types; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import org.slf4j.LoggerFactory; public class SQLGenerator implements SQLQueryGenerator { private static final long serialVersionUID = 7477161929752147045L; /** * Operator symbols */ private static final String EQ_OPERATOR = "%s = %s"; private static final String NEQ_OPERATOR = "%s <> %s"; private static final String GT_OPERATOR = "%s > %s"; private static final String GTE_OPERATOR = "%s >= %s"; private static final String LT_OPERATOR = "%s < %s"; private static final String LTE_OPERATOR = "%s <= %s"; private static final String AND_OPERATOR = "%s AND %s"; private static final String OR_OPERATOR = "%s OR %s"; private static final String NOT_OPERATOR = "NOT %s"; private static final String IS_NULL_OPERATOR = "%s IS NULL"; private static final String IS_NOT_NULL_OPERATOR = "%s IS NOT NULL"; private static final String ADD_OPERATOR = "%s + %s"; private static final String SUBSTRACT_OPERATOR = "%s - %s"; private static final String MULTIPLY_OPERATOR = "%s * %s"; private static final String INDENT = " "; private static final String IS_TRUE_OPERATOR = "%s IS TRUE"; //private static final String IS_TRUE_BOOL = "%s"; //private static final String IS_TRUE_INT = "%s > 0"; //private static final String IS_TRUE_DOUBLE = "%s > 0"; //private static final String IS_TRUE_STRING = "LENGTH(%s) > 0"; /** * Formatting template */ private static final String VIEW_NAME = "QVIEW%s"; private final DBMetadata metadata; private final JDBCUtility jdbcutil; private final SQLDialectAdapter sqladapter; private static final org.slf4j.Logger log = LoggerFactory .getLogger(SQLGenerator.class); public SQLGenerator(DBMetadata metadata, JDBCUtility jdbcutil, SQLDialectAdapter sqladapter) { this.metadata = metadata; this.jdbcutil = jdbcutil; this.sqladapter = sqladapter; } /*** * Generates and SQL query ready to be executed by Quest. Each query is a * SELECT FROM WHERE query. To know more about each of these see the inner * method descriptions. */ @Override public String generateSourceQuery(DatalogProgram query, List<String> signature) throws OBDAException { String indent = " "; if (query.getQueryModifiers().hasModifiers()) { final String outerViewName = "SUB_QVIEW"; String subquery = generateQuery(query, signature, indent); String modifier = ""; List<OrderCondition> conditions = query.getQueryModifiers() .getSortConditions(); if (!conditions.isEmpty()) { modifier += sqladapter.sqlOrderBy(conditions, outerViewName) + "\n"; } long limit = query.getQueryModifiers().getLimit(); long offset = query.getQueryModifiers().getOffset(); if (limit != -1 || offset != -1) { modifier += sqladapter.sqlSlice(limit, offset) + "\n"; } String sql = "SELECT *\n"; sql += "FROM (\n"; sql += subquery + "\n"; sql += ") " + outerViewName + "\n"; sql += modifier; return sql; } else { return generateQuery(query, signature, ""); } } /*** * Main method. Generates the full query, taking into account * limit/offset/order by. * * @param query * @param signature * @param indent * @return * @throws OBDAException */ private String generateQuery(DatalogProgram query, List<String> signature, String indent) throws OBDAException { boolean distinct = query.getQueryModifiers().isDistinct(); int numberOfQueries = query.getRules().size(); List<String> queriesStrings = new LinkedList<String>(); /* Main loop, constructing the SPJ query for each CQ */ for (CQIE cq : query.getRules()) { /* * Here we normalize so that the form of the CQ is as close to the * form of a normal SQL algebra as possible, particularly, no shared * variables, only joins by means of equality. Also, equalities in * nested expressions (JOINS) are kept at their respective levels to * generate correct ON and wHERE clauses. */ log.debug("Before pushing equalities: \n{}", cq); DatalogNormalizer.pushEqualities(cq, false); log.debug("Before folding Joins: \n{}", cq); DatalogNormalizer.foldJoinTrees(cq, false); log.debug("Before pulling out equalities: \n{}", cq); DatalogNormalizer.pullOutEqualities(cq); log.debug("Before pulling out Left Join Conditions: \n{}", cq); DatalogNormalizer.pullOutLeftJoinConditions(cq); log.debug("Before pulling up nested references: \n{}", cq); DatalogNormalizer.pullUpNestedReferences(cq, false); log.debug("Before adding trivial equalities: \n{}, cq);", cq); DatalogNormalizer.addMinimalEqualityToLeftJoin(cq); log.debug("Normalized CQ: \n{}", cq); Predicate headPredicate = cq.getHead().getFunctionSymbol(); if (!headPredicate.getName().toString().equals("ans1")) { // not a target query, skip it. continue; } QueryAliasIndex index = new QueryAliasIndex(cq); boolean innerdistincts = false; if (distinct && numberOfQueries == 1) { innerdistincts = true; } String FROM = getFROM(cq, index); String WHERE = getWHERE(cq, index); String SELECT = getSelectClause(signature, cq, index, innerdistincts); String querystr = SELECT + FROM + WHERE; queriesStrings.add(querystr); } Iterator<String> queryStringIterator = queriesStrings.iterator(); StringBuffer result = new StringBuffer(); if (queryStringIterator.hasNext()) { result.append(queryStringIterator.next()); } String UNION = null; if (distinct) UNION = "UNION"; else UNION = "UNION ALL"; while (queryStringIterator.hasNext()) { result.append("\n"); result.append(UNION); result.append("\n\n"); result.append(queryStringIterator.next()); } return result.toString(); } /*** * Returns a string with boolean conditions formed with the boolean atoms * found in the atoms list. */ private LinkedHashSet<String> getBooleanConditionsString( List<Function> atoms, QueryAliasIndex index) { LinkedHashSet<String> conditions = new LinkedHashSet<String>(); for (int atomidx = 0; atomidx < atoms.size(); atomidx++) { NewLiteral innerAtom = atoms.get(atomidx); Function innerAtomAsFunction = (Function) innerAtom; if (innerAtomAsFunction.isDataFunction() || innerAtomAsFunction.isAlgebraFunction()) continue; /* This is a boolean atom */ String condition = getSQLCondition(innerAtomAsFunction, index); conditions.add(condition); } return conditions; } /*** * Returns the SQL for an atom representing an SQL condition (booleans) * * @param atom * @param index * @return */ private String getSQLCondition(Function atom, QueryAliasIndex index) { Predicate functionSymbol = atom.getFunctionSymbol(); if (isUnary(atom)) { // For unary boolean operators, e.g., NOT, IS NULL, IS NOT NULL. // added also for IS TRUE String expressionFormat = getBooleanOperatorString(functionSymbol); NewLiteral term = atom.getTerm(0); String column = getSQLString(term, index, false); if (expressionFormat.contains("NOT %s") ) { // find data type of term and evaluate accordingly //int type = 8; if (term instanceof Function) { Function f = (Function) term; if (!f.isDataTypeFunction()) return String.format(expressionFormat, column); } int type = getVariableDataType(term, index); if (type == Types.INTEGER) return String.format("NOT %s > 0", column); if (type == Types.DOUBLE) return String.format("NOT %s > 0", column); if (type == Types.BOOLEAN) return String.format("NOT %s", column); if (type == Types.VARCHAR) return String.format("NOT LENGTH(%s) > 0", column); return "0;"; } if (expressionFormat.contains("IS TRUE")) { // find data type of term and evaluate accordingly //int type = 8; int type = getVariableDataType(term, index); if (type == Types.INTEGER) return String.format("%s > 0", column); if (type == Types.DOUBLE) return String.format("%s > 0", column); if (type == Types.BOOLEAN) return String.format("%s", column); if (type == Types.VARCHAR) return String.format("LENGTH(%s) > 0", column); return "1;"; } return String.format(expressionFormat, column); } else if (isBinary(atom)) { if (atom.isBooleanFunction()) { // For binary boolean operators, e.g., AND, OR, EQ, GT, LT, etc. _ String expressionFormat = getBooleanOperatorString(functionSymbol); NewLiteral left = atom.getTerm(0); NewLiteral right = atom.getTerm(1); String leftOp = getSQLString(left, index, true); String rightOp = getSQLString(right, index, true); return String.format("(" + expressionFormat + ")", leftOp, rightOp); } else if (atom.isArithmeticFunction()) { // For numerical operators, e.g., MUTLIPLY, SUBSTRACT, ADDITION String expressionFormat = getNumericalOperatorString(functionSymbol); NewLiteral left = atom.getTerm(0); NewLiteral right = atom.getTerm(1); String leftOp = getSQLString(left, index, true); String rightOp = getSQLString(right, index, true); return String.format("(" + expressionFormat + ")", leftOp, rightOp); } else { throw new RuntimeException("The binary function " + functionSymbol.toString() + " is not supported yet!"); } } else { if (functionSymbol == OBDAVocabulary.SPARQL_REGEX) { boolean caseinSensitive = false; if (atom.getArity() == 3) { if (atom.getTerm(2).toString().contains("i")) { caseinSensitive = true; } } NewLiteral p1 = atom.getTerm(0); NewLiteral p2 = atom.getTerm(1); String column = getSQLString(p1, index, false); String pattern = getSQLString(p2, index, false); return sqladapter.sqlRegex(column, pattern, caseinSensitive); } else { throw new RuntimeException("The builtin function " + functionSymbol.toString() + " is not supported yet!"); } } } /*** * Returns the table definition for these atoms. By default, a list of atoms * represents JOIN or LEFT JOIN of all the atoms, left to right. All boolean * atoms in the list are considered conditions in the ON clause of the JOIN. * * <p> * If the list is a LeftJoin, then it can only have 2 data atoms, and it HAS * to have 2 data atoms. * * <p> * If process boolean operators is enabled, all boolean conditions will be * added to the ON clause of the first JOIN. * * @param atoms * @param index * @param isTopLevel * indicates if the list of atoms is actually the main body of * the conjunctive query. If it is, no JOIN is generated, but a * cross product with WHERE clause. Moreover, the isLeftJoin * argument will be ignored. * * @return */ private String getTableDefinitions(List<Function> inneratoms, QueryAliasIndex index, boolean isTopLevel, boolean isLeftJoin, String indent) { /* * We now collect the view definitions for each data atom each * condition, and each each nested Join/LeftJoin */ List<String> tableDefinitions = new LinkedList<String>(); for (int atomidx = 0; atomidx < inneratoms.size(); atomidx++) { NewLiteral innerAtom = inneratoms.get(atomidx); Function innerAtomAsFunction = (Function) innerAtom; String definition = getTableDefinition(innerAtomAsFunction, index, indent + INDENT); if (!definition.isEmpty()) { tableDefinitions.add(definition); } } /* * Now we generate the table definition, this will be either a comma * separated list for TOP level (FROM clause) or a Join/LeftJoin * (possibly nested if there are more than 2 table definitions in the * current list) in case this method was called recursively. */ StringBuffer tableDefinitionsString = new StringBuffer(); int size = tableDefinitions.size(); if (isTopLevel) { if (size == 0) throw new RuntimeException("No table definitions"); Iterator<String> tableDefinitionsIterator = tableDefinitions .iterator(); tableDefinitionsString.append(indent); tableDefinitionsString.append(tableDefinitionsIterator.next()); while (tableDefinitionsIterator.hasNext()) { tableDefinitionsString.append(",\n"); tableDefinitionsString.append(indent); tableDefinitionsString.append(tableDefinitionsIterator.next()); } } else { /* * This is actually a Join or LeftJoin, so we form the JOINs/LEFT * JOINs and the ON clauses */ String JOIN_KEYWORD = null; if (isLeftJoin) { JOIN_KEYWORD = "LEFT OUTER JOIN"; } else { JOIN_KEYWORD = "JOIN"; } String JOIN = "\n" + indent + "(\n" + indent + "%s\n" + indent + JOIN_KEYWORD + "\n" + indent + "%s\n" + indent + ")"; if (size == 0) { throw new RuntimeException( "Cannot generate definition for empty data"); } if (size == 1) { return tableDefinitions.get(0); } /* * To form the JOIN we will cycle through each data definition, * nesting the JOINs as we go. The conditions in the ON clause will * go on the TOP level only. */ String currentJoin = String.format(JOIN, tableDefinitions.get(size - 2), tableDefinitions.get(size - 1)); tableDefinitions.remove(size - 1); tableDefinitions.remove(size - 2); int currentSize = tableDefinitions.size(); while (currentSize > 0) { currentJoin = String.format(JOIN, tableDefinitions.get(currentSize - 1), currentJoin); tableDefinitions.remove(currentSize - 1); currentSize = tableDefinitions.size(); } tableDefinitions.add(currentJoin); tableDefinitionsString.append(currentJoin); /* * If there are ON conditions we add them now. We need to remove the * last parenthesis ')' and replace it with ' ON %s)' where %s are * all the conditions */ String conditions = getConditionsString(inneratoms, index, true, indent); if (conditions.length() > 0 && tableDefinitionsString.lastIndexOf(")") != -1) { int lastidx = tableDefinitionsString.lastIndexOf(")"); tableDefinitionsString.delete(lastidx, tableDefinitionsString.length()); // tableDefinitionsString.deleteCharAt(tableDefinitionsString // .length() - 1); String ON_CLAUSE = String.format("ON\n%s\n " + indent + ")", conditions); tableDefinitionsString.append(ON_CLAUSE); } } return tableDefinitionsString.toString(); } /*** * Returns the table definition for the given atom. If the atom is a simple * table or view, then it returns the value as defined by the * QueryAliasIndex. If the atom is a Join or Left Join, it will call * getTableDefinitions on the nested term list. * * @param atom * @param index * @return */ private String getTableDefinition(Function atom, QueryAliasIndex index, String indent) { Predicate predicate = atom.getPredicate(); if (predicate instanceof BooleanOperationPredicate || predicate instanceof NumericalOperationPredicate || predicate instanceof DataTypePredicate) { // These don't participate in the FROM clause return ""; } else if (predicate instanceof AlgebraOperatorPredicate) { List<Function> innerTerms = new LinkedList<Function>(); for (NewLiteral innerTerm : atom.getTerms()) innerTerms.add((Function) innerTerm); if (predicate == OBDAVocabulary.SPARQL_JOIN) { return getTableDefinitions(innerTerms, index, false, false, indent + INDENT); } else if (predicate == OBDAVocabulary.SPARQL_LEFTJOIN) { return getTableDefinitions(innerTerms, index, false, true, indent + INDENT); } } /* * This is a data atom */ String def = index.getViewDefinition(atom); return def; } private String getFROM(CQIE query, QueryAliasIndex index) { List<Function> atoms = new LinkedList<Function>(); for (Function atom : query.getBody()) atoms.add((Function) atom); String tableDefinitions = getTableDefinitions(atoms, index, true, false, ""); return "\n FROM \n" + tableDefinitions; } /*** * Generates all the conditions on the given atoms, e.g., shared variables * and boolean conditions. This string can then be used to form a WHERE or * an ON clause. * * <p> * The method assumes that no variable in this list (or nested ones) referes * to an upper level one. * * @param atoms * @param index * @return */ private String getConditionsString(List<Function> atoms, QueryAliasIndex index, boolean processShared, String indent) { LinkedHashSet<String> equalityConditions = new LinkedHashSet<String>(); // if (processShared) equalityConditions.addAll(getConditionsSharedVariablesAndConstants( atoms, index, processShared)); LinkedHashSet<String> booleanConditions = getBooleanConditionsString( atoms, index); LinkedHashSet<String> conditions = new LinkedHashSet<String>(); conditions.addAll(equalityConditions); conditions.addAll(booleanConditions); /* * Collecting all the conditions in a single string for the ON or WHERE * clause */ StringBuffer conditionsString = new StringBuffer(); Iterator<String> conditionsIterator = conditions.iterator(); if (conditionsIterator.hasNext()) { conditionsString.append(indent); conditionsString.append(conditionsIterator.next()); } while (conditionsIterator.hasNext()) { conditionsString.append(" AND\n"); conditionsString.append(indent); conditionsString.append(conditionsIterator.next()); } return conditionsString.toString(); } /*** * Returns the set of variables that participate data atoms (either in this * atom directly or in nested ones). This will recursively collect the * variables references in in this atom, exlcuding those on the right side * of left joins. * * @param atom * @return */ private Set<Variable> getVariableReferencesWithLeftJoin(Function atom) { if (atom.isDataFunction()) return atom.getVariables(); if (atom.isBooleanFunction()) return new HashSet<Variable>(); /* * we have an alebra opertaor (join or left join) if its a join, we need * to collect all the varaibles of each nested atom., if its a left * join, only of the first data/algebra atom (the left atom). */ boolean isLeftJoin = false; boolean foundFirstDataAtom = false; if (atom.getFunctionSymbol() == OBDAVocabulary.SPARQL_LEFTJOIN) isLeftJoin = true; LinkedHashSet<Variable> innerVariables = new LinkedHashSet<Variable>(); for (NewLiteral t : atom.getTerms()) { if (isLeftJoin && foundFirstDataAtom) break; Function asFunction = (Function) t; if (asFunction.isBooleanFunction()) continue; innerVariables.addAll(getVariableReferencesWithLeftJoin(asFunction .asAtom())); foundFirstDataAtom = true; } return innerVariables; } /*** * Returns a list of equality conditions that reflect the semantics of the * shared variables in the list of atoms. * <p> * The method assumes that no variables are shared across deeper levels of * nesting (through Join or LeftJoin atoms), it will not call itself * recursively. Nor across upper levels. * * <p> * When generating equalities recursively, we will also generate a minimal * number of equalities. E.g., if we have A(x), Join(R(x,y), Join(R(y, * x),B(x)) * */ private LinkedHashSet<String> getConditionsSharedVariablesAndConstants( List<Function> atoms, QueryAliasIndex index, boolean processShared) { LinkedHashSet<String> equalities = new LinkedHashSet<String>(); Set<Variable> currentLevelVariables = new LinkedHashSet<Variable>(); if (processShared) for (Function atom : atoms) { currentLevelVariables .addAll(getVariableReferencesWithLeftJoin(atom)); // if (atom.isDataFunction()) { // currentLevelVariables.addAll(atom.getReferencedVariables()); // } else if (atom.isAlgebraFunction()) { // currentLevelVariables.addAll(atom.getReferencedVariables()); // } } /* * For each variable we collect all the columns that shold be equated * (due to repeated positions of the variable). then we form atoms of * the form "COL1 = COL2" */ for (Variable var : currentLevelVariables) { Set<String> references = index.getColumnReferences(var); if (references.size() < 2) { // No need for equality continue; } Iterator<String> referenceIterator = references.iterator(); String leftColumnReference = referenceIterator.next(); while (referenceIterator.hasNext()) { String rightColumnReference = referenceIterator.next(); String equality = String.format("(%s = %s)", leftColumnReference, rightColumnReference); equalities.add(equality); leftColumnReference = rightColumnReference; } } for (Function atom : atoms) { if (!atom.isDataFunction()) continue; for (int idx = 0; idx < atom.getArity(); idx++) { NewLiteral l = atom.getTerm(idx); if (l instanceof Constant) { String value = getSQLString(l, index, false); String columnReference = index .getColumnReference(atom, idx); equalities.add(String.format("(%s = %s)", columnReference, value)); } } } return equalities; } // return variable SQL data type private int getVariableDataType (NewLiteral term, QueryAliasIndex idx) { Function f = (Function) term; if (f.isDataTypeFunction()) { Predicate p = f.getPredicate(); if (p.toString() == OBDAVocabulary.XSD_BOOLEAN_URI) return Types.BOOLEAN; if (p.toString() == OBDAVocabulary.XSD_INT_URI) return Types.INTEGER; if (p.toString() == OBDAVocabulary.XSD_INTEGER_URI) return Types.INTEGER; if (p.toString() == OBDAVocabulary.XSD_DOUBLE_URI) return Types.DOUBLE; if (p.toString() == OBDAVocabulary.XSD_STRING_URI) return Types.VARCHAR; if (p.toString() == OBDAVocabulary.RDFS_LITERAL_URI) return Types.VARCHAR; } // Return varchar for unknown return 12; } // private Set<Variable> getMandatoryColumnsOnJoinsAndLeftJoinsRecursively( // Atom atom) { // if (atom.isDataFunction()) { // return atom.getReferencedVariables(); // } else if (atom.isBooleanFunction()) // return new HashSet<Variable>(); // /* atom is an alebra function */ // Predicate pred = atom.getFunctionSymbol(); // boolean isLeftJoin = true; // if (pred == OBDAVocabulary.SPARQL_JOIN) { // isLeftJoin = false; // } // // /* If its a normal join, all nexted variables are required variables, if // its // * // */ // // } private String getWHERE(CQIE query, QueryAliasIndex index) { List<Function> atoms = new LinkedList<Function>(); for (Function atom : query.getBody()) atoms.add((Function) atom); String conditions = getConditionsString(atoms, index, false, ""); if (conditions.length() == 0) return ""; return "\nWHERE \n" + conditions; } /** * produces the select clause of the sql query for the given CQIE * * @param q * the query * @return the sql select clause */ private String getSelectClause(List<String> signature, CQIE query, QueryAliasIndex index, boolean distinct) throws OBDAException { /* * If the head has size 0 this is a boolean query. */ List<NewLiteral> headterms = query.getHead().getTerms(); StringBuilder sb = new StringBuilder(); sb.append("SELECT "); if (distinct) sb.append("DISTINCT "); if (headterms.size() == 0) { sb.append("true as x"); return sb.toString(); } Iterator<NewLiteral> hit = headterms.iterator(); int hpos = 0; while (hit.hasNext()) { NewLiteral ht = hit.next(); String typeColumn = getTypeColumnForSELECT(ht, signature, hpos); String langColumn = getLangColumnForSELECT(ht, signature, hpos, index); String mainColumn = getMainColumnForSELECT(ht, signature, hpos, index); sb.append("\n "); sb.append(typeColumn); sb.append(", "); sb.append(langColumn); sb.append(", "); sb.append(mainColumn); if (hit.hasNext()) { sb.append(", "); } hpos++; } return sb.toString(); } private String getMainColumnForSELECT(NewLiteral ht, List<String> signature, int hpos, QueryAliasIndex index) { String mainColumn = null; String mainTemplate = "%s AS %s"; if (ht instanceof URIConstant) { URIConstant uc = (URIConstant) ht; mainColumn = jdbcutil.getSQLLexicalForm(uc.getURI().toString()); } else if (ht == OBDAVocabulary.NULL) { mainColumn = "NULL"; } else if (ht instanceof Function) { /* * if it's a function we need to get the nested value if its a * datatype function or we need to do the CONCAT if its URI(....). */ Function ov = (Function) ht; Predicate function = ov.getFunctionSymbol(); String functionString = function.toString(); /* * Adding the column(s) with the actual value(s) */ if (function instanceof DataTypePredicate) { /* * Case where we have a typing function in the head (this is the * case for all literal columns */ NewLiteral term = ov.getTerms().get(0); String termStr = null; if (term instanceof ValueConstant) { termStr = jdbcutil.getSQLLexicalForm((ValueConstant) term); } else { termStr = getSQLString(term, index, false); } mainColumn = termStr; } else if (functionString.equals(OBDAVocabulary.QUEST_URI)) { /*** * New template based URI building functions */ mainColumn = getSQLStringForURIFunction(ov, index); } else if (functionString.equals(OBDAVocabulary.QUEST_BNODE)) { /*** * New template based URI building functions */ mainColumn = getSQLStringForBNodeFunction(ov, index); } else { throw new IllegalArgumentException( "Error generating SQL query. Contact the developers. Found an invalid function during translation: " + ov.toString()); } } else { throw new RuntimeException("Cannot generate SELECT for term: " + ht.toString()); } /* * If the we have a column we need to still CAST to VARCHAR */ if (mainColumn.charAt(0) != '\'' && mainColumn.charAt(0) != '(') mainColumn = sqladapter.sqlCast(mainColumn, Types.VARCHAR); return String.format(mainTemplate, mainColumn, sqladapter.sqlQuote(signature.get(hpos))); } private String getLangColumnForSELECT(NewLiteral ht, List<String> signature, int hpos, QueryAliasIndex index) { String langStr = "%s AS \"%sLang\""; if (ht instanceof Function) { Function ov = (Function) ht; Predicate function = ov.getFunctionSymbol(); if (function == OBDAVocabulary.RDFS_LITERAL && ov.getTerms().size() > 1) { /* * Case for rdf:literal s with a language, we need to select 2 * terms from ".., rdf:literal(?x,"en"), * * and signature "name" * we will generate a select with the * projection of 2 columns * * , 'en' as nameqlang, view.colforx as name, */ String lang = null; NewLiteral langTerm = ov.getTerms().get(1); if (langTerm == OBDAVocabulary.NULL) lang = "NULL"; if (langTerm instanceof ValueConstant) { lang = jdbcutil.getSQLLexicalForm((ValueConstant) langTerm); } else { lang = getSQLString(langTerm, index, false); } return (String.format(langStr, lang, signature.get(hpos))); } } return (String.format(langStr, "NULL", signature.get(hpos))); } private String getTypeColumnForSELECT(NewLiteral ht, List<String> signature, int hpos) { String typeStr = "%s AS \"%sQuestType\""; if (ht instanceof Function) { Function ov = (Function) ht; Predicate function = ov.getFunctionSymbol(); String functionString = function.toString(); /* * Adding the ColType column to the projection (used in the result * set to know the type of constant) */ if (functionString.equals(OBDAVocabulary.XSD_BOOLEAN.getName() .toString())) { return (String.format(typeStr, 9, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.XSD_DATETIME .getName().toString())) { return (String.format(typeStr, 8, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.XSD_DECIMAL .getName().toString())) { return (String.format(typeStr, 5, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.XSD_DOUBLE .getName().toString())) { return (String.format(typeStr, 6, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.XSD_INTEGER .getName().toString())) { return (String.format(typeStr, 4, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.XSD_STRING .getName().toString())) { return (String.format(typeStr, 7, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.RDFS_LITERAL .getName().toString())) { return (String.format(typeStr, 3, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.QUEST_URI)) { return (String.format(typeStr, 1, signature.get(hpos))); } else if (functionString.equals(OBDAVocabulary.QUEST_BNODE)) { return (String.format(typeStr, 2, signature.get(hpos))); } } else if (ht instanceof URIConstant) { return (String.format(typeStr, 1, signature.get(hpos))); } else if (ht == OBDAVocabulary.NULL) { return (String.format(typeStr, 0, signature.get(hpos))); } throw new RuntimeException("Cannot generate SELECT for term: " + ht.toString()); } /*** * Returns the SQL that builds a URI String out of an atom of the form * uri("htttp:...", x, y,...) * * @param ov * @param index * @return */ public String getSQLStringForURIFunction(Function ov, QueryAliasIndex index) { /* * The first inner term determines the form of the result */ NewLiteral t = ov.getTerms().get(0); if (t instanceof ValueConstant) { /* * The function is actually a template. The first parameter is a * string of the form http://.../.../ with place holders of the form * {}. The rest are variables or constants that should be put in * place of the palce holders. We need to tokenize and form the * CONCAT */ ValueConstant c = (ValueConstant) t; if (c.getValue().equals("{}")) { return getSQLString(ov.getTerms().get(1), index, false); } else { StringTokenizer tokenizer = new StringTokenizer(c.getValue(), "{}"); String functionString = jdbcutil.getSQLLexicalForm(tokenizer.nextToken()); List<String> vex = new LinkedList<String>(); /* * New we concat the rest of the function, note that if there is only 1 element * there is nothing to concatenate */ if (ov.getTerms().size() > 1) { for (int termIndex = 1; termIndex < ov.getTerms().size(); termIndex++) { NewLiteral currentTerm = ov.getTerms().get(termIndex); String repl = "REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(" + "REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(" + sqladapter.sqlCast(getSQLString(currentTerm, index, false), Types.VARCHAR) + ",' ', '%20')," + "'!', '%21')," + "'@', '%40'),"+ "'#', '%23')," + "'$', '%24'),"+ "'&', '%26'),"+ "'*', '%42'), "+ "'(', '%28'), "+ "')', '%29'), "+ "'[', '%5B'), "+ "']', '%5D'), "+ "',', '%2C'), "+ "';', '%3B'), "+ "':', '%3A'), "+ "'?', '%3F'), "+ "'=', '%3D'), "+ "'+', '%2B'), "+ "'''', '%22'), "+ "'/', '%2F')"; vex.add(repl); if (tokenizer.hasMoreTokens()) { vex.add(jdbcutil.getSQLLexicalForm(tokenizer.nextToken())); } termIndex += 1; } } String[] params = new String[vex.size() + 1]; int i = 0; params[i] = functionString; i += 1; for (String param : vex) { params[i] = param; i += 1; } return sqladapter.strconcat(params); } } else if (t instanceof Variable) { /* * The function is of the form uri(x), we need to simply return the * value of X */ return getSQLString(((Variable) t), index, false); } else if (t instanceof URIConstant) { URIConstant uc = (URIConstant) t; /* * The function is of the form uri("http://some.uri/"), i.e., a * concrete URI, we return the string representing that URI. */ return jdbcutil.getSQLLexicalForm(uc.getURI().toString()); } /* * Unsupported case */ throw new IllegalArgumentException( "Error, cannot generate URI constructor clause for a term. Contact the authors. Term: " + ov.toString()); } /*** * Returns the SQL that builds a URI String out of an atom of the form * uri("http:...", x, y,...) * * @param ov * @param index * @return */ public String getSQLStringForBNodeFunction(Function ov, QueryAliasIndex index) { /* * The first inner term determines the form of the result */ NewLiteral t = ov.getTerms().get(0); if (t instanceof ValueConstant) { /* * The function is actually a template. The first parameter is a * string of the form http://.../.../ with place holders of the form * {}. The rest are variables or constants that should be put in * place of the palce holders. We need to tokenize and form the * CONCAT */ ValueConstant c = (ValueConstant) t; StringTokenizer tokenizer = new StringTokenizer(c.toString(), "{}"); String functionString = jdbcutil.getSQLLexicalForm(tokenizer .nextToken()); List<String> vex = new LinkedList<String>(); int termIndex = 1; do { NewLiteral currentTerm = ov.getTerms().get(termIndex); vex.add(getSQLString(currentTerm, index, false)); if (tokenizer.hasMoreTokens()) { vex.add(jdbcutil.getSQLLexicalForm(tokenizer.nextToken())); } termIndex += 1; } while (tokenizer.hasMoreElements() || termIndex < ov.getTerms().size()); String[] params = new String[vex.size() + 1]; int i = 0; params[i] = functionString; i += 1; for (String param : vex) { params[i] = param; i += 1; } return sqladapter.strconcat(params); } else if (t instanceof Variable) { /* * The function is of the form uri(x), we need to simply return the * value of X */ return getSQLString(((Variable) t), index, false); } else if (t instanceof URIConstant) { URIConstant uc = (URIConstant) t; /* * The function is of the form uri("http://some.uri/"), i.e., a * concrete URI, we return the string representing that URI. */ return jdbcutil.getSQLLexicalForm(uc.getURI().toString()); } /* * Unsupported case */ throw new IllegalArgumentException( "Error, cannot generate URI constructor clause for a term. Contact the authors. Term: " + ov.toString()); } /** * Determines if it is a unary function. */ private boolean isUnary(Function fun) { return (fun.getArity() == 1) ? true : false; } /** * Determines if it is a binary function. */ private boolean isBinary(Function fun) { return (fun.getArity() == 2) ? true : false; } /*** * Generates the SQL string that forms or retrieves the given term. The * function takes as input either: a constant (value or URI), a variable, or * a Function (i.e., uri(), eq(..), ISNULL(..), etc)). * <p> * If the input is a constant, it will return the SQL that generates the * string representing that constant. * <p> * If its a variable, it returns the column references to the position where * the variable first appears. * <p> * If its a function uri(..) it returns the SQL string concatenation that * builds the result of uri(...) * <p> * If its a boolean comparison, it returns the corresponding SQL comparison. * * @param term * @param index * @param useBrackets * @return */ public String getSQLString(NewLiteral term, QueryAliasIndex index, boolean useBrackets) { if (term == null) { return ""; } if (term instanceof ValueConstant) { ValueConstant ct = (ValueConstant) term; return jdbcutil.getSQLLexicalForm(ct); } else if (term instanceof URIConstant) { URIConstant uc = (URIConstant) term; return jdbcutil.getSQLLexicalForm(uc.toString()); } else if (term instanceof Variable) { Variable var = (Variable) term; LinkedHashSet<String> posList = index.getColumnReferences(var); if (posList == null || posList.size() == 0) { throw new RuntimeException( "Unbound variable found in WHERE clause: " + term); } return posList.iterator().next(); } /* If its not constant, or variable its a function */ Function function = (Function) term; Predicate functionSymbol = function.getFunctionSymbol(); NewLiteral term1 = function.getTerms().get(0); if (functionSymbol instanceof DataTypePredicate) { if (functionSymbol.getType(0) == COL_TYPE.UNSUPPORTED) { throw new RuntimeException("Unsupported type in the query: " + function); } /* atoms of the form integer(x) */ return getSQLString(term1, index, false); } else if (functionSymbol instanceof BooleanOperationPredicate) { /* atoms of the form EQ(x,y) */ String expressionFormat = getBooleanOperatorString(functionSymbol); if (isUnary(function)) { // for unary functions, e.g., NOT, IS NULL, IS NOT NULL // also added for IS TRUE if (expressionFormat.contains("IS TRUE")) { // find data type of term and evaluate accordingly //int type = 8; String column = getSQLString(term1, index, false); int type = getVariableDataType(term1, index); if (type == Types.INTEGER) return String.format("%s > 0", column); if (type == Types.DOUBLE) return String.format("%s > 0", column); if (type == Types.BOOLEAN) return String.format("%s", column); if (type == Types.VARCHAR) return String.format("LENGTH(%s) > 0", column); return "1"; } String op = getSQLString(term1, index, true); return String.format(expressionFormat, op); } else if (isBinary(function)) { // for binary functions, e.g., AND, OR, EQ, NEQ, GT, etc. String leftOp = getSQLString(term1, index, true); NewLiteral term2 = function.getTerms().get(1); String rightOp = getSQLString(term2, index, true); String result = String .format(expressionFormat, leftOp, rightOp); if (useBrackets) { return String.format("(%s)", result); } else { return result; } } else { throw new RuntimeException( "Cannot translate boolean function: " + functionSymbol); } } else if (functionSymbol instanceof NumericalOperationPredicate) { String expressionFormat = getNumericalOperatorString(functionSymbol); String leftOp = getSQLString(term1, index, true); NewLiteral term2 = function.getTerms().get(1); String rightOp = getSQLString(term2, index, true); String result = String.format(expressionFormat, leftOp, rightOp); if (useBrackets) { return String.format("(%s)", result); } else { return result; } } else { String functionName = functionSymbol.toString(); if (functionName.equals(OBDAVocabulary.QUEST_CAST_STR)) { String columnName = getSQLString(function.getTerm(0), index, false); String datatype = ((Constant) function.getTerm(1)).getValue(); int sqlDatatype = -1; if (datatype.equals(OBDAVocabulary.XSD_STRING_URI)) { sqlDatatype = Types.VARCHAR; } return sqladapter.sqlCast(columnName, sqlDatatype); } else if (functionName.equals(OBDAVocabulary.SPARQL_STR_URI)) { String columnName = getSQLString(function.getTerm(0), index, false); return sqladapter.sqlCast(columnName, Types.VARCHAR); } } /* * The atom must be of the form uri("...", x, y) */ String functionName = function.getFunctionSymbol().toString(); if (functionName.equals(OBDAVocabulary.QUEST_URI) || functionName.equals(OBDAVocabulary.QUEST_BNODE)) { return getSQLStringForURIFunction(function, index); } else { throw new RuntimeException("Unexpected function in the query: " + functionSymbol); } } /*** * Returns the SQL string for the boolean operator, including placeholders * for the terms to be used, e.g., %s = %s, %s IS NULL, etc. * * @param functionSymbol * @return */ private String getBooleanOperatorString(Predicate functionSymbol) { String operator = null; if (functionSymbol.equals(OBDAVocabulary.EQ)) { operator = EQ_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.NEQ)) { operator = NEQ_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.GT)) { operator = GT_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.GTE)) { operator = GTE_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.LT)) { operator = LT_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.LTE)) { operator = LTE_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.AND)) { operator = AND_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.OR)) { operator = OR_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.NOT)) { operator = NOT_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.IS_NULL)) { operator = IS_NULL_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.IS_NOT_NULL)) { operator = IS_NOT_NULL_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.IS_TRUE)) { operator = IS_TRUE_OPERATOR; } else { throw new RuntimeException("Unknown boolean operator: " + functionSymbol); } return operator; } private String getNumericalOperatorString(Predicate functionSymbol) { String operator = null; if (functionSymbol.equals(OBDAVocabulary.ADD)) { operator = ADD_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.SUBSTRACT)) { operator = SUBSTRACT_OPERATOR; } else if (functionSymbol.equals(OBDAVocabulary.MULTIPLY)) { operator = MULTIPLY_OPERATOR; } else { throw new RuntimeException("Unknown numerical operator: " + functionSymbol); } return operator; } /*** * Utility class to resolve "database" atoms to view definitions ready to be * used in a FROM clause, and variables, to column references defined over * the existing view definitons of a query. * * * @author mariano * */ public class QueryAliasIndex { Map<Function, String> viewNames = new HashMap<Function, String>(); Map<Function, String> tableNames = new HashMap<Function, String>(); Map<Function, DataDefinition> dataDefinitions = new HashMap<Function, DataDefinition>(); Map<Variable, LinkedHashSet<String>> columnReferences = new HashMap<Variable, LinkedHashSet<String>>(); int dataTableCount = 0; boolean isEmpty = false; public QueryAliasIndex(CQIE query) { List<Atom> body = query.getBody(); generateViews(body); } private void generateViews(List<Atom> atoms) { for (Function atom : atoms) { /* * Thios wil call recursively if necessary */ generateViewsIndexVariables(atom); } } /*** * We assiciate each atom to a view definition. This will be * <p> * "tablename" as "viewX" or * <p> * (some nested sql view) as "viewX" * * <p> * View definitions are only done for data atoms. Join/LeftJoin and * boolean atoms are not associated to view definitions. * * @param atom */ private void generateViewsIndexVariables(Function atom) { if (atom.getFunctionSymbol() instanceof BooleanOperationPredicate) { return; } else if (atom.getFunctionSymbol() instanceof AlgebraOperatorPredicate) { List<NewLiteral> lit = atom.getTerms(); for (NewLiteral subatom : lit) { if (subatom instanceof Function) { generateViewsIndexVariables((Function) subatom); } } } Predicate tablePredicate = atom.getFunctionSymbol(); String tableName = tablePredicate.toString(); DataDefinition def = metadata.getDefinition(tableName); if (def == null) { /* * There is no definition for this atom, its not a database * predicate, the query is empty. */ isEmpty = true; return; } dataTableCount += 1; viewNames.put(atom, String.format(VIEW_NAME, dataTableCount)); tableNames.put(atom, tableName); dataDefinitions.put(atom, def); indexVariables(atom); } private void indexVariables(Function atom) { DataDefinition def = dataDefinitions.get(atom); String viewName = viewNames.get(atom); for (int index = 0; index < atom.getTerms().size(); index++) { NewLiteral term = atom.getTerms().get(index); if (!(term instanceof Variable)) continue; LinkedHashSet<String> references = columnReferences.get(term); if (references == null) { references = new LinkedHashSet<String>(); columnReferences.put((Variable) term, references); } String columnName = def.getAttributeName(index + 1); String reference = sqladapter.sqlQualifiedColumn(viewName, columnName); references.add(reference); } } /*** * Returns all the column aliases that correspond to this variable, * across all the DATA atoms in the query (not algebra operators or * boolean conditions. * * @param var * The variable we want the referenced columns. */ public LinkedHashSet<String> getColumnReferences(Variable var) { return columnReferences.get(var); } /*** * Generates the view definition, i.e., "tablename viewname" * * @param atom * @return */ public String getViewDefinition(Function atom) { DataDefinition def = dataDefinitions.get(atom); if (def instanceof TableDefinition) { return sqladapter.sqlTableName(tableNames.get(atom), viewNames.get(atom)); } else if (def instanceof ViewDefinition) { return String.format("(%s) %s", ((ViewDefinition) def).getStatement(), viewNames.get(atom)); } throw new RuntimeException( "Impossible to get data definition for: " + atom + ", type: " + def); } public String getView(Function atom) { return viewNames.get(atom); } public String getColumnReference(Function atom, int column) { String viewName = getView(atom); DataDefinition def = dataDefinitions.get(atom); String columnname = def.getAttributeName(column + 1); return sqladapter.sqlQualifiedColumn(viewName, columnname); } } }
Bug fix on tokenizing string in URI template function.
obdalib/reformulation-core/src/main/java/it/unibz/krdb/obda/owlrefplatform/core/sql/SQLGenerator.java
Bug fix on tokenizing string in URI template function.
Java
apache-2.0
6306a41563464afba569a8c7aafc5a9c05e831c1
0
akosyakov/intellij-community,izonder/intellij-community,kdwink/intellij-community,FHannes/intellij-community,fnouama/intellij-community,ThiagoGarciaAlves/intellij-community,dslomov/intellij-community,hurricup/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,da1z/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,TangHao1987/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,MichaelNedzelsky/intellij-community,signed/intellij-community,gnuhub/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,apixandru/intellij-community,caot/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,blademainer/intellij-community,semonte/intellij-community,akosyakov/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,holmes/intellij-community,robovm/robovm-studio,hurricup/intellij-community,youdonghai/intellij-community,izonder/intellij-community,slisson/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,asedunov/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,nicolargo/intellij-community,gnuhub/intellij-community,caot/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,fitermay/intellij-community,supersven/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,suncycheng/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,da1z/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,vladmm/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,gnuhub/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,signed/intellij-community,kool79/intellij-community,ahb0327/intellij-community,holmes/intellij-community,jagguli/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,kdwink/intellij-community,clumsy/intellij-community,kool79/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,semonte/intellij-community,apixandru/intellij-community,fitermay/intellij-community,holmes/intellij-community,nicolargo/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,kool79/intellij-community,petteyg/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,MichaelNedzelsky/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,adedayo/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,orekyuu/intellij-community,adedayo/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,allotria/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,diorcety/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,ryano144/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,signed/intellij-community,blademainer/intellij-community,izonder/intellij-community,hurricup/intellij-community,holmes/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,signed/intellij-community,kdwink/intellij-community,diorcety/intellij-community,allotria/intellij-community,dslomov/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,ibinti/intellij-community,holmes/intellij-community,ibinti/intellij-community,apixandru/intellij-community,blademainer/intellij-community,samthor/intellij-community,apixandru/intellij-community,supersven/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,clumsy/intellij-community,nicolargo/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,adedayo/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,da1z/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,signed/intellij-community,holmes/intellij-community,suncycheng/intellij-community,allotria/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,asedunov/intellij-community,Distrotech/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,kool79/intellij-community,FHannes/intellij-community,kdwink/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,pwoodworth/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,ryano144/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,hurricup/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,robovm/robovm-studio,salguarnieri/intellij-community,nicolargo/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,fengbaicanhe/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,kdwink/intellij-community,jagguli/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,vladmm/intellij-community,blademainer/intellij-community,blademainer/intellij-community,amith01994/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,izonder/intellij-community,amith01994/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,robovm/robovm-studio,jagguli/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,retomerz/intellij-community,kool79/intellij-community,slisson/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,supersven/intellij-community,kool79/intellij-community,caot/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,holmes/intellij-community,akosyakov/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,fengbaicanhe/intellij-community,supersven/intellij-community,hurricup/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,signed/intellij-community,apixandru/intellij-community,dslomov/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,petteyg/intellij-community,signed/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,samthor/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,allotria/intellij-community,ahb0327/intellij-community,ahb0327/intellij-community,semonte/intellij-community,semonte/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,supersven/intellij-community,signed/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,ryano144/intellij-community,semonte/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,kool79/intellij-community,signed/intellij-community,amith01994/intellij-community,fnouama/intellij-community,xfournet/intellij-community,caot/intellij-community,petteyg/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,da1z/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,youdonghai/intellij-community,da1z/intellij-community,hurricup/intellij-community,clumsy/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,ryano144/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,jagguli/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,adedayo/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,semonte/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,asedunov/intellij-community,Lekanich/intellij-community,robovm/robovm-studio,allotria/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,hurricup/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,robovm/robovm-studio,FHannes/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,fitermay/intellij-community,asedunov/intellij-community,retomerz/intellij-community,asedunov/intellij-community,samthor/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,diorcety/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,slisson/intellij-community,MER-GROUP/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,FHannes/intellij-community,slisson/intellij-community,adedayo/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,izonder/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,vladmm/intellij-community,slisson/intellij-community,izonder/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,izonder/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,blademainer/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,fitermay/intellij-community,xfournet/intellij-community,izonder/intellij-community,supersven/intellij-community,amith01994/intellij-community,jagguli/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,xfournet/intellij-community,kool79/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,caot/intellij-community,da1z/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,fnouama/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,fitermay/intellij-community,supersven/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,holmes/intellij-community,izonder/intellij-community,caot/intellij-community,slisson/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,petteyg/intellij-community,signed/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,supersven/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,signed/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,diorcety/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,da1z/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,diorcety/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,petteyg/intellij-community,caot/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,izonder/intellij-community,tmpgit/intellij-community,slisson/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,ibinti/intellij-community,kool79/intellij-community,hurricup/intellij-community,jagguli/intellij-community,fnouama/intellij-community,caot/intellij-community,orekyuu/intellij-community,allotria/intellij-community,jagguli/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,kdwink/intellij-community,asedunov/intellij-community,robovm/robovm-studio,asedunov/intellij-community,slisson/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,caot/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,adedayo/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,da1z/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,vladmm/intellij-community,samthor/intellij-community,clumsy/intellij-community,semonte/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,ahb0327/intellij-community,akosyakov/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,samthor/intellij-community,hurricup/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,izonder/intellij-community,clumsy/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,samthor/intellij-community,holmes/intellij-community,apixandru/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,nicolargo/intellij-community,supersven/intellij-community,holmes/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,samthor/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,SerCeMan/intellij-community,youdonghai/intellij-community,diorcety/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,gnuhub/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,muntasirsyed/intellij-community,ryano144/intellij-community,holmes/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,samthor/intellij-community,nicolargo/intellij-community
package com.intellij.remoteServer.impl.runtime.ui; import com.intellij.ide.DataManager; import com.intellij.ide.actions.ContextHelpAction; import com.intellij.ide.util.treeView.AbstractTreeNode; import com.intellij.ide.util.treeView.NodeDescriptor; import com.intellij.ide.util.treeView.NodeRenderer; import com.intellij.ide.util.treeView.TreeVisitor; import com.intellij.remoteServer.impl.runtime.ui.tree.ServersTreeStructure; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.EmptyRunnable; import com.intellij.remoteServer.configuration.RemoteServer; import com.intellij.remoteServer.impl.runtime.log.LoggingHandlerImpl; import com.intellij.remoteServer.impl.runtime.ui.tree.DeploymentNode; import com.intellij.remoteServer.impl.runtime.ui.tree.ServerNode; import com.intellij.remoteServer.impl.runtime.ui.tree.TreeBuilderBase; import com.intellij.remoteServer.runtime.ConnectionStatus; import com.intellij.remoteServer.runtime.ServerConnection; import com.intellij.remoteServer.runtime.ServerConnectionListener; import com.intellij.remoteServer.runtime.ServerConnectionManager; import com.intellij.ui.DoubleClickListener; import com.intellij.ui.ScrollPaneFactory; import com.intellij.ui.SideBorder; import com.intellij.ui.components.panels.Wrapper; import com.intellij.ui.treeStructure.Tree; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeModel; import java.awt.*; import java.awt.event.MouseEvent; import java.util.HashMap; import java.util.Map; import java.util.Set; /** * Created by IntelliJ IDEA. * User: michael.golubev */ public class ServersToolWindowContent extends JPanel implements Disposable { public static final DataKey<ServersToolWindowContent> KEY = DataKey.create("serversToolWindowContent"); @NonNls private static final String PLACE_TOOLBAR = "ServersToolWindowContent#Toolbar"; @NonNls private static final String SERVERS_TOOL_WINDOW_TOOLBAR = "RemoteServersViewToolbar"; @NonNls private static final String HELP_ID = "Application_Servers_tool_window"; private static final String MESSAGE_CARD = "message"; private static final String EMPTY_SELECTION_MESSAGE = "Select a server or deployment in the tree to view details"; private final Tree myTree; private final CardLayout myPropertiesPanelLayout; private final JPanel myPropertiesPanel; private final JLabel myMessageLabel; private final Map<String, JComponent> myLogComponents = new HashMap<String, JComponent>(); private final DefaultTreeModel myTreeModel; private TreeBuilderBase myBuilder; private AbstractTreeNode<?> myLastSelection; private final Project myProject; public ServersToolWindowContent(@NotNull Project project) { super(new BorderLayout()); myProject = project; myTreeModel = new DefaultTreeModel(new DefaultMutableTreeNode()); myTree = new Tree(myTreeModel); myTree.setRootVisible(false); myTree.setShowsRootHandles(true); myTree.setCellRenderer(new NodeRenderer()); myTree.setLineStyleAngled(); getMainPanel().add(createToolbar(), BorderLayout.WEST); Splitter splitter = new Splitter(false, 0.3f); splitter.setFirstComponent(ScrollPaneFactory.createScrollPane(myTree, SideBorder.LEFT)); myPropertiesPanelLayout = new CardLayout(); myPropertiesPanel = new JPanel(myPropertiesPanelLayout); myMessageLabel = new JLabel(EMPTY_SELECTION_MESSAGE, SwingConstants.CENTER); myPropertiesPanel.add(MESSAGE_CARD, new Wrapper(myMessageLabel)); splitter.setSecondComponent(myPropertiesPanel); getMainPanel().add(splitter, BorderLayout.CENTER); setupBuilder(project); for (RemoteServersViewContributor contributor : RemoteServersViewContributor.EP_NAME.getExtensions()) { contributor.setupTree(myProject, myTree, myBuilder); } myTree.addTreeSelectionListener(new TreeSelectionListener() { @Override public void valueChanged(TreeSelectionEvent e) { onSelectionChanged(); } }); new DoubleClickListener() { @Override protected boolean onDoubleClick(MouseEvent event) { Set<ServersTreeStructure.RemoteServerNode> nodes = getSelectedRemoteServerNodes(); if (nodes.size() == 1) { RemoteServer<?> server = nodes.iterator().next().getValue(); ServerConnectionManager.getInstance().getOrCreateConnection(server).computeDeployments(EmptyRunnable.INSTANCE); return true; } return false; } }.installOn(myTree); } private void onSelectionChanged() { Set<AbstractTreeNode> nodes = myBuilder.getSelectedElements(AbstractTreeNode.class); if (nodes.size() != 1) { showMessageLabel(EMPTY_SELECTION_MESSAGE); myLastSelection = null; return; } AbstractTreeNode<?> node = nodes.iterator().next(); if (Comparing.equal(node, myLastSelection)) { return; } myLastSelection = node; if (node instanceof ServersTreeStructure.LogProvidingNode) { ServersTreeStructure.LogProvidingNode logNode = (ServersTreeStructure.LogProvidingNode)node; LoggingHandlerImpl loggingHandler = logNode.getLoggingHandler(); if (loggingHandler != null) { String cardName = logNode.getLogId(); JComponent oldComponent = myLogComponents.get(cardName); JComponent logComponent = loggingHandler.getConsole().getComponent(); if (!logComponent.equals(oldComponent)) { myLogComponents.put(cardName, logComponent); if (oldComponent != null) { myPropertiesPanel.remove(oldComponent); } myPropertiesPanel.add(cardName, logComponent); } myPropertiesPanelLayout.show(myPropertiesPanel, cardName); } } else if (node instanceof ServersTreeStructure.RemoteServerNode) { updateServerDetails((ServersTreeStructure.RemoteServerNode)node); } else { showMessageLabel(""); } } private void updateServerDetails(ServersTreeStructure.RemoteServerNode node) { RemoteServer<?> server = ((ServersTreeStructure.RemoteServerNode)node).getValue(); ServerConnection connection = ServerConnectionManager.getInstance().getConnection(server); if (connection == null || connection.getStatus() == ConnectionStatus.DISCONNECTED) { showMessageLabel("Double-click on the server node to connect"); } else { showMessageLabel(connection.getStatusText()); } } private void showMessageLabel(final String text) { myMessageLabel.setText(UIUtil.toHtml(text)); myPropertiesPanelLayout.show(myPropertiesPanel, MESSAGE_CARD); } private void setupBuilder(final @NotNull Project project) { ServersTreeStructure structure = new ServersTreeStructure(project); myBuilder = new TreeBuilderBase(myTree, structure, myTreeModel) { @Override protected boolean isAutoExpandNode(NodeDescriptor nodeDescriptor) { return nodeDescriptor instanceof ServersTreeStructure.RemoteServerNode || nodeDescriptor instanceof ServersTreeStructure.DeploymentNodeImpl; } }; Disposer.register(this, myBuilder); project.getMessageBus().connect().subscribe(ServerConnectionListener.TOPIC, new ServerConnectionListener() { @Override public void onConnectionCreated(@NotNull ServerConnection<?> connection) { getBuilder().queueUpdate(); } @Override public void onConnectionStatusChanged(@NotNull ServerConnection<?> connection) { getBuilder().queueUpdate(); updateSelectedServerDetails(); } @Override public void onDeploymentsChanged(@NotNull ServerConnection<?> connection) { getBuilder().queueUpdate(); updateSelectedServerDetails(); } }); } private void updateSelectedServerDetails() { if (myLastSelection instanceof ServersTreeStructure.RemoteServerNode) { updateServerDetails((ServersTreeStructure.RemoteServerNode)myLastSelection); } } private JComponent createToolbar() { DefaultActionGroup group = new DefaultActionGroup(); group.add(ActionManager.getInstance().getAction(SERVERS_TOOL_WINDOW_TOOLBAR)); group.add(new Separator()); group.add(new ContextHelpAction(HELP_ID)); ActionToolbar actionToolBar = ActionManager.getInstance().createActionToolbar(PLACE_TOOLBAR, group, false); myTree.putClientProperty(DataManager.CLIENT_PROPERTY_DATA_PROVIDER, new DataProvider() { @Override public Object getData(@NonNls String dataId) { if (KEY.getName().equals(dataId)) { return ServersToolWindowContent.this; } for (RemoteServersViewContributor contributor : RemoteServersViewContributor.EP_NAME.getExtensions()) { Object data = contributor.getData(dataId, ServersToolWindowContent.this); if (data != null) { return data; } } return null; } }); actionToolBar.setTargetComponent(myTree); return actionToolBar.getComponent(); } public JPanel getMainPanel() { return this; } public Set<ServerNode> getSelectedServerNodes() { return myBuilder.getSelectedElements(ServerNode.class); } public Set<DeploymentNode> getSelectedDeploymentNodes() { return myBuilder.getSelectedElements(DeploymentNode.class); } public Set<ServersTreeStructure.RemoteServerNode> getSelectedRemoteServerNodes() { return myBuilder.getSelectedElements(ServersTreeStructure.RemoteServerNode.class); } @Override public void dispose() { } public TreeBuilderBase getBuilder() { return myBuilder; } @NotNull public Project getProject() { return myProject; } public void select(@NotNull final ServerConnection<?> connection) { myBuilder.select(ServersTreeStructure.RemoteServerNode.class, new TreeVisitor<ServersTreeStructure.RemoteServerNode>() { @Override public boolean visit(@NotNull ServersTreeStructure.RemoteServerNode node) { return node.getValue().equals(connection.getServer()); } }, null, false); } public void select(@NotNull final ServerConnection<?> connection, @NotNull final String deploymentName) { myBuilder.getUi().queueUpdate(connection).doWhenDone(new Runnable() { @Override public void run() { myBuilder.select(ServersTreeStructure.DeploymentNodeImpl.class, new TreeVisitor<ServersTreeStructure.DeploymentNodeImpl>() { @Override public boolean visit(@NotNull ServersTreeStructure.DeploymentNodeImpl node) { AbstractTreeNode parent = node.getParent(); return parent instanceof ServersTreeStructure.RemoteServerNode && ((ServersTreeStructure.RemoteServerNode)parent).getValue().equals(connection.getServer()) && node.getValue().getName().equals(deploymentName); } }, null, false); } }); } }
platform/remote-servers/impl/src/com/intellij/remoteServer/impl/runtime/ui/ServersToolWindowContent.java
package com.intellij.remoteServer.impl.runtime.ui; import com.intellij.ide.DataManager; import com.intellij.ide.actions.ContextHelpAction; import com.intellij.ide.util.treeView.AbstractTreeNode; import com.intellij.ide.util.treeView.NodeDescriptor; import com.intellij.ide.util.treeView.NodeRenderer; import com.intellij.ide.util.treeView.TreeVisitor; import com.intellij.remoteServer.impl.runtime.ui.tree.ServersTreeStructure; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.EmptyRunnable; import com.intellij.remoteServer.configuration.RemoteServer; import com.intellij.remoteServer.impl.runtime.log.LoggingHandlerImpl; import com.intellij.remoteServer.impl.runtime.ui.tree.DeploymentNode; import com.intellij.remoteServer.impl.runtime.ui.tree.ServerNode; import com.intellij.remoteServer.impl.runtime.ui.tree.TreeBuilderBase; import com.intellij.remoteServer.runtime.ConnectionStatus; import com.intellij.remoteServer.runtime.ServerConnection; import com.intellij.remoteServer.runtime.ServerConnectionListener; import com.intellij.remoteServer.runtime.ServerConnectionManager; import com.intellij.ui.DoubleClickListener; import com.intellij.ui.ScrollPaneFactory; import com.intellij.ui.SideBorder; import com.intellij.ui.components.panels.Wrapper; import com.intellij.ui.treeStructure.Tree; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeModel; import java.awt.*; import java.awt.event.MouseEvent; import java.util.HashMap; import java.util.Map; import java.util.Set; /** * Created by IntelliJ IDEA. * User: michael.golubev */ public class ServersToolWindowContent extends JPanel implements Disposable { public static final DataKey<ServersToolWindowContent> KEY = DataKey.create("serversToolWindowContent"); @NonNls private static final String PLACE_TOOLBAR = "ServersToolWindowContent#Toolbar"; @NonNls private static final String SERVERS_TOOL_WINDOW_TOOLBAR = "RemoteServersViewToolbar"; @NonNls private static final String HELP_ID = "Application_Servers_tool_window"; private static final String MESSAGE_CARD = "message"; private static final String EMPTY_SELECTION_MESSAGE = "Select a server or deployment in the tree to view details"; private final Tree myTree; private final CardLayout myPropertiesPanelLayout; private final JPanel myPropertiesPanel; private final JLabel myMessageLabel; private final Map<String, JComponent> myLogComponents = new HashMap<String, JComponent>(); private final DefaultTreeModel myTreeModel; private TreeBuilderBase myBuilder; private AbstractTreeNode<?> myLastSelection; private final Project myProject; public ServersToolWindowContent(@NotNull Project project) { super(new BorderLayout()); myProject = project; myTreeModel = new DefaultTreeModel(new DefaultMutableTreeNode()); myTree = new Tree(myTreeModel); myTree.setRootVisible(false); myTree.setShowsRootHandles(true); myTree.setCellRenderer(new NodeRenderer()); myTree.setLineStyleAngled(); getMainPanel().add(createToolbar(), BorderLayout.WEST); Splitter splitter = new Splitter(false, 0.3f); splitter.setFirstComponent(ScrollPaneFactory.createScrollPane(myTree, SideBorder.LEFT)); myPropertiesPanelLayout = new CardLayout(); myPropertiesPanel = new JPanel(myPropertiesPanelLayout); myMessageLabel = new JLabel(EMPTY_SELECTION_MESSAGE, SwingConstants.CENTER); myPropertiesPanel.add(MESSAGE_CARD, new Wrapper(myMessageLabel)); splitter.setSecondComponent(myPropertiesPanel); getMainPanel().add(splitter, BorderLayout.CENTER); setupBuilder(project); for (RemoteServersViewContributor contributor : RemoteServersViewContributor.EP_NAME.getExtensions()) { contributor.setupTree(myProject, myTree, myBuilder); } myTree.addTreeSelectionListener(new TreeSelectionListener() { @Override public void valueChanged(TreeSelectionEvent e) { onSelectionChanged(); } }); new DoubleClickListener() { @Override protected boolean onDoubleClick(MouseEvent event) { Set<ServersTreeStructure.RemoteServerNode> nodes = getSelectedRemoteServerNodes(); if (nodes.size() == 1) { RemoteServer<?> server = nodes.iterator().next().getValue(); ServerConnectionManager.getInstance().getOrCreateConnection(server).computeDeployments(EmptyRunnable.INSTANCE); return true; } return false; } }.installOn(myTree); } private void onSelectionChanged() { Set<AbstractTreeNode> nodes = myBuilder.getSelectedElements(AbstractTreeNode.class); if (nodes.size() != 1) { myMessageLabel.setText(EMPTY_SELECTION_MESSAGE); myPropertiesPanelLayout.show(myPropertiesPanel, MESSAGE_CARD); myLastSelection = null; return; } AbstractTreeNode<?> node = nodes.iterator().next(); if (Comparing.equal(node, myLastSelection)) { return; } myLastSelection = node; if (node instanceof ServersTreeStructure.LogProvidingNode) { ServersTreeStructure.LogProvidingNode logNode = (ServersTreeStructure.LogProvidingNode)node; LoggingHandlerImpl loggingHandler = logNode.getLoggingHandler(); if (loggingHandler != null) { String cardName = logNode.getLogId(); JComponent oldComponent = myLogComponents.get(cardName); JComponent logComponent = loggingHandler.getConsole().getComponent(); if (!logComponent.equals(oldComponent)) { myLogComponents.put(cardName, logComponent); if (oldComponent != null) { myPropertiesPanel.remove(oldComponent); } myPropertiesPanel.add(cardName, logComponent); } myPropertiesPanelLayout.show(myPropertiesPanel, cardName); } } else if (node instanceof ServersTreeStructure.RemoteServerNode) { updateServerDetails((ServersTreeStructure.RemoteServerNode)node); } else { myMessageLabel.setText(""); myPropertiesPanelLayout.show(myPropertiesPanel, MESSAGE_CARD); } } private void updateServerDetails(ServersTreeStructure.RemoteServerNode node) { RemoteServer<?> server = ((ServersTreeStructure.RemoteServerNode)node).getValue(); ServerConnection connection = ServerConnectionManager.getInstance().getConnection(server); if (connection == null || connection.getStatus() == ConnectionStatus.DISCONNECTED) { myMessageLabel.setText("Double-click on the server node to connect"); } else { myMessageLabel.setText(connection.getStatusText()); } myPropertiesPanelLayout.show(myPropertiesPanel, MESSAGE_CARD); } private void setupBuilder(final @NotNull Project project) { ServersTreeStructure structure = new ServersTreeStructure(project); myBuilder = new TreeBuilderBase(myTree, structure, myTreeModel) { @Override protected boolean isAutoExpandNode(NodeDescriptor nodeDescriptor) { return nodeDescriptor instanceof ServersTreeStructure.RemoteServerNode || nodeDescriptor instanceof ServersTreeStructure.DeploymentNodeImpl; } }; Disposer.register(this, myBuilder); project.getMessageBus().connect().subscribe(ServerConnectionListener.TOPIC, new ServerConnectionListener() { @Override public void onConnectionCreated(@NotNull ServerConnection<?> connection) { getBuilder().queueUpdate(); } @Override public void onConnectionStatusChanged(@NotNull ServerConnection<?> connection) { getBuilder().queueUpdate(); if (myLastSelection instanceof ServersTreeStructure.RemoteServerNode) { updateServerDetails((ServersTreeStructure.RemoteServerNode)myLastSelection); } } @Override public void onDeploymentsChanged(@NotNull ServerConnection<?> connection) { getBuilder().queueUpdate(); } }); } private JComponent createToolbar() { DefaultActionGroup group = new DefaultActionGroup(); group.add(ActionManager.getInstance().getAction(SERVERS_TOOL_WINDOW_TOOLBAR)); group.add(new Separator()); group.add(new ContextHelpAction(HELP_ID)); ActionToolbar actionToolBar = ActionManager.getInstance().createActionToolbar(PLACE_TOOLBAR, group, false); myTree.putClientProperty(DataManager.CLIENT_PROPERTY_DATA_PROVIDER, new DataProvider() { @Override public Object getData(@NonNls String dataId) { if (KEY.getName().equals(dataId)) { return ServersToolWindowContent.this; } for (RemoteServersViewContributor contributor : RemoteServersViewContributor.EP_NAME.getExtensions()) { Object data = contributor.getData(dataId, ServersToolWindowContent.this); if (data != null) { return data; } } return null; } }); actionToolBar.setTargetComponent(myTree); return actionToolBar.getComponent(); } public JPanel getMainPanel() { return this; } public Set<ServerNode> getSelectedServerNodes() { return myBuilder.getSelectedElements(ServerNode.class); } public Set<DeploymentNode> getSelectedDeploymentNodes() { return myBuilder.getSelectedElements(DeploymentNode.class); } public Set<ServersTreeStructure.RemoteServerNode> getSelectedRemoteServerNodes() { return myBuilder.getSelectedElements(ServersTreeStructure.RemoteServerNode.class); } @Override public void dispose() { } public TreeBuilderBase getBuilder() { return myBuilder; } @NotNull public Project getProject() { return myProject; } public void select(@NotNull final ServerConnection<?> connection) { myBuilder.select(ServersTreeStructure.RemoteServerNode.class, new TreeVisitor<ServersTreeStructure.RemoteServerNode>() { @Override public boolean visit(@NotNull ServersTreeStructure.RemoteServerNode node) { return node.getValue().equals(connection.getServer()); } }, null, false); } public void select(@NotNull final ServerConnection<?> connection, @NotNull final String deploymentName) { myBuilder.getUi().queueUpdate(connection).doWhenDone(new Runnable() { @Override public void run() { myBuilder.select(ServersTreeStructure.DeploymentNodeImpl.class, new TreeVisitor<ServersTreeStructure.DeploymentNodeImpl>() { @Override public boolean visit(@NotNull ServersTreeStructure.DeploymentNodeImpl node) { AbstractTreeNode parent = node.getParent(); return parent instanceof ServersTreeStructure.RemoteServerNode && ((ServersTreeStructure.RemoteServerNode)parent).getValue().equals(connection.getServer()) && node.getValue().getName().equals(deploymentName); } }, null, false); } }); } }
remote servers view: wrap long error message
platform/remote-servers/impl/src/com/intellij/remoteServer/impl/runtime/ui/ServersToolWindowContent.java
remote servers view: wrap long error message
Java
apache-2.0
762d90bfc80426f82e37a172e1408b4588822fe7
0
smartnews/presto,smartnews/presto,smartnews/presto,smartnews/presto,smartnews/presto
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.tests.product.launcher.env.common; import com.google.common.collect.ImmutableMap; import io.trino.tests.product.launcher.docker.DockerFiles; import io.trino.tests.product.launcher.env.DockerContainer; import io.trino.tests.product.launcher.env.Environment; import io.trino.tests.product.launcher.testcontainers.PortBinder; import org.testcontainers.containers.startupcheck.IsRunningStartupCheckStrategy; import javax.inject.Inject; import java.time.Duration; import static io.trino.tests.product.launcher.docker.ContainerUtil.forSelectedPorts; import static io.trino.tests.product.launcher.env.EnvironmentContainers.HADOOP; import static io.trino.tests.product.launcher.env.common.Hadoop.CONTAINER_HADOOP_INIT_D; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static org.testcontainers.utility.MountableFile.forHostPath; public class Minio implements EnvironmentExtender { private final DockerFiles dockerFiles; public static final String MINIO_CONTAINER_NAME = "minio"; private static final String MINIO_ACCESS_KEY = "minio-access-key"; private static final String MINIO_SECRET_KEY = "minio-secret-key"; private static final String MINIO_RELEASE = "RELEASE.2021-07-15T22-27-34Z"; private static final int MINIO_PORT = 9080; // minio uses 9000 by default, which conflicts with hadoop private static final int MINIO_CONSOLE_PORT = 9001; private final PortBinder portBinder; @Inject public Minio(DockerFiles dockerFiles, PortBinder portBinder) { this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); this.portBinder = requireNonNull(portBinder, "portBinder is null"); } @Override public void extendEnvironment(Environment.Builder builder) { builder.addContainer(createMinioContainer()); builder.configureContainers(container -> { if (container.getLogicalName().equals(HADOOP)) { container.withCopyFileToContainer( forHostPath(dockerFiles.getDockerFilesHostPath("common/minio/apply-minio-config.sh")), CONTAINER_HADOOP_INIT_D + "apply-minio-config.sh"); } }); } private DockerContainer createMinioContainer() { DockerContainer container = new DockerContainer("minio/minio:" + MINIO_RELEASE, MINIO_CONTAINER_NAME) .withEnv(ImmutableMap.<String, String>builder() .put("MINIO_ACCESS_KEY", MINIO_ACCESS_KEY) .put("MINIO_SECRET_KEY", MINIO_SECRET_KEY) .buildOrThrow()) .withCommand("server", "--address", format("0.0.0.0:%d", MINIO_PORT), "--console-address", format("0.0.0.0:%d", MINIO_CONSOLE_PORT), "/data") .withStartupCheckStrategy(new IsRunningStartupCheckStrategy()) .waitingFor(forSelectedPorts(MINIO_PORT)) .withStartupTimeout(Duration.ofMinutes(1)); portBinder.exposePort(container, MINIO_PORT); portBinder.exposePort(container, MINIO_CONSOLE_PORT); return container; } }
testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/common/Minio.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.tests.product.launcher.env.common; import com.google.common.collect.ImmutableMap; import io.trino.tests.product.launcher.docker.DockerFiles; import io.trino.tests.product.launcher.env.DockerContainer; import io.trino.tests.product.launcher.env.Environment; import io.trino.tests.product.launcher.testcontainers.PortBinder; import org.testcontainers.containers.startupcheck.IsRunningStartupCheckStrategy; import javax.inject.Inject; import java.time.Duration; import static io.trino.tests.product.launcher.docker.ContainerUtil.forSelectedPorts; import static io.trino.tests.product.launcher.env.EnvironmentContainers.HADOOP; import static io.trino.tests.product.launcher.env.common.Hadoop.CONTAINER_HADOOP_INIT_D; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static org.testcontainers.utility.MountableFile.forHostPath; public class Minio implements EnvironmentExtender { private final DockerFiles dockerFiles; public static final String MINIO_CONTAINER_NAME = "minio"; private static final String MINIO_ACCESS_KEY = "minio-access-key"; private static final String MINIO_SECRET_KEY = "minio-secret-key"; private static final String MINIO_RELEASE = "RELEASE.2021-07-15T22-27-34Z"; private static final int MINIO_PORT = 9080; // minio uses 9000 by default, which conflicts with hadoop private final PortBinder portBinder; @Inject public Minio(DockerFiles dockerFiles, PortBinder portBinder) { this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null"); this.portBinder = requireNonNull(portBinder, "portBinder is null"); } @Override public void extendEnvironment(Environment.Builder builder) { builder.addContainer(createMinioContainer()); builder.configureContainers(container -> { if (container.getLogicalName().equals(HADOOP)) { container.withCopyFileToContainer( forHostPath(dockerFiles.getDockerFilesHostPath("common/minio/apply-minio-config.sh")), CONTAINER_HADOOP_INIT_D + "apply-minio-config.sh"); } }); } private DockerContainer createMinioContainer() { DockerContainer container = new DockerContainer("minio/minio:" + MINIO_RELEASE, MINIO_CONTAINER_NAME) .withEnv(ImmutableMap.<String, String>builder() .put("MINIO_ACCESS_KEY", MINIO_ACCESS_KEY) .put("MINIO_SECRET_KEY", MINIO_SECRET_KEY) .buildOrThrow()) .withCommand("server", "--address", format("0.0.0.0:%d", MINIO_PORT), "/data") .withStartupCheckStrategy(new IsRunningStartupCheckStrategy()) .waitingFor(forSelectedPorts(MINIO_PORT)) .withStartupTimeout(Duration.ofMinutes(1)); portBinder.exposePort(container, MINIO_PORT); return container; } }
Expose MinIO web console
testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/common/Minio.java
Expose MinIO web console
Java
apache-2.0
6c4c15e4412651a97d2c1813ded776f8a6fd4ede
0
sanjeewa-malalgoda/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,harsha89/carbon-apimgt,pubudu538/carbon-apimgt,prasa7/carbon-apimgt,praminda/carbon-apimgt,pubudu538/carbon-apimgt,bhathiya/carbon-apimgt,tharindu1st/carbon-apimgt,tharikaGitHub/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,wso2/carbon-apimgt,harsha89/carbon-apimgt,chamindias/carbon-apimgt,chamilaadhi/carbon-apimgt,fazlan-nazeem/carbon-apimgt,tharindu1st/carbon-apimgt,prasa7/carbon-apimgt,uvindra/carbon-apimgt,isharac/carbon-apimgt,Rajith90/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,praminda/carbon-apimgt,uvindra/carbon-apimgt,bhathiya/carbon-apimgt,ruks/carbon-apimgt,malinthaprasan/carbon-apimgt,chamilaadhi/carbon-apimgt,ruks/carbon-apimgt,fazlan-nazeem/carbon-apimgt,fazlan-nazeem/carbon-apimgt,Rajith90/carbon-apimgt,jaadds/carbon-apimgt,praminda/carbon-apimgt,fazlan-nazeem/carbon-apimgt,chamindias/carbon-apimgt,tharindu1st/carbon-apimgt,harsha89/carbon-apimgt,malinthaprasan/carbon-apimgt,nuwand/carbon-apimgt,isharac/carbon-apimgt,isharac/carbon-apimgt,wso2/carbon-apimgt,tharikaGitHub/carbon-apimgt,uvindra/carbon-apimgt,jaadds/carbon-apimgt,tharikaGitHub/carbon-apimgt,pubudu538/carbon-apimgt,jaadds/carbon-apimgt,bhathiya/carbon-apimgt,nuwand/carbon-apimgt,ruks/carbon-apimgt,tharikaGitHub/carbon-apimgt,pubudu538/carbon-apimgt,chamilaadhi/carbon-apimgt,uvindra/carbon-apimgt,bhathiya/carbon-apimgt,Rajith90/carbon-apimgt,malinthaprasan/carbon-apimgt,prasa7/carbon-apimgt,nuwand/carbon-apimgt,nuwand/carbon-apimgt,malinthaprasan/carbon-apimgt,isharac/carbon-apimgt,wso2/carbon-apimgt,ruks/carbon-apimgt,jaadds/carbon-apimgt,Rajith90/carbon-apimgt,chamindias/carbon-apimgt,chamilaadhi/carbon-apimgt,tharindu1st/carbon-apimgt,prasa7/carbon-apimgt,chamindias/carbon-apimgt,harsha89/carbon-apimgt,wso2/carbon-apimgt
/* * Copyright WSO2 Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.apimgt.impl; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.util.AXIOMUtil; import org.apache.axis2.AxisFault; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.json.JSONException; import org.json.JSONObject; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.dto.ClientCertificateDTO; import org.wso2.carbon.apimgt.api.model.API; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.APIProduct; import org.wso2.carbon.apimgt.api.model.APIProductIdentifier; import org.wso2.carbon.apimgt.api.model.APIProductResource; import org.wso2.carbon.apimgt.gateway.dto.stub.APIData; import org.wso2.carbon.apimgt.gateway.dto.stub.ResourceData; import org.wso2.carbon.apimgt.impl.certificatemgt.CertificateManagerImpl; import org.wso2.carbon.apimgt.impl.certificatemgt.exceptions.CertificateManagementException; import org.wso2.carbon.apimgt.impl.dao.CertificateMgtDAO; import org.wso2.carbon.apimgt.impl.dto.Environment; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.template.APITemplateBuilder; import org.wso2.carbon.apimgt.impl.utils.APIGatewayAdminClient; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.governance.api.exception.GovernanceException; import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamException; public class APIGatewayManager { private static final Log log = LogFactory.getLog(APIGatewayManager.class); private static APIGatewayManager instance; private Map<String, Environment> environments; private boolean debugEnabled = log.isDebugEnabled(); private final String ENDPOINT_PRODUCTION = "_PRODUCTION_"; private final String ENDPOINT_SANDBOX = "_SANDBOX_"; private static final String PRODUCT_PREFIX = "prod"; private static final String PRODUCT_VERSION = "1.0.0"; private APIGatewayManager() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService() .getAPIManagerConfiguration(); environments = config.getApiGatewayEnvironments(); } public synchronized static APIGatewayManager getInstance() { if (instance == null) { instance = new APIGatewayManager(); } return instance; } /** * Publishes an API to all configured Gateways. * * @param api * - The API to be published * @param builder * - The template builder * @param tenantDomain * - Tenant Domain of the publisher */ public Map<String, String> publishToGateway(API api, APITemplateBuilder builder, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (api.getEnvironments() == null) { return failedEnvironmentsMap; } long startTime; long endTime; if (debugEnabled) { log.debug("API to be published: " + api.getId()); log.debug("Number of environments to be published to: " + api.getEnvironments().size()); } for (String environmentName : api.getEnvironments()) { long startTimePublishToGateway = System.currentTimeMillis(); Environment environment = environments.get(environmentName); //If the environment is removed from the configuration, continue without publishing if (environment == null) { continue; } APIGatewayAdminClient client; try { client = new APIGatewayAdminClient(environment); String operation; long apiGetStartTime = System.currentTimeMillis(); APIData apiData = client.getApi(tenantDomain, api.getId()); endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Time taken to fetch API Data: " + (endTime - apiGetStartTime) / 1000 + " seconds"); } // If the API exists in the Gateway if (apiData != null) { startTime = System.currentTimeMillis(); // If the Gateway type is 'production' and the production url // has been removed // Or if the Gateway type is 'sandbox' and the sandbox url has // been removed. if ((APIConstants.GATEWAY_ENV_TYPE_PRODUCTION.equals(environment.getType()) && !APIUtil.isProductionEndpointsExists(api.getEndpointConfig())) || (APIConstants.GATEWAY_ENV_TYPE_SANDBOX.equals(environment.getType()) && !APIUtil.isSandboxEndpointsExists(api.getEndpointConfig()))) { if (debugEnabled) { log.debug("Removing API " + api.getId().getApiName() + " from Environment " + environment.getName() + " since its relevant URL has been removed."); } client.deleteApi(tenantDomain, api.getId()); if (api.isPublishedDefaultVersion()) { if (client.getDefaultApi(tenantDomain, api.getId()) != null) { client.deleteDefaultApi(tenantDomain, api.getId()); } } setSecureVaultProperty(client, api, tenantDomain, environment); undeployCustomSequences(client, api, tenantDomain, environment); unDeployClientCertificates(client, api, tenantDomain); } else { if (debugEnabled) { log.debug("API exists, updating existing API " + api.getId().getApiName() + " in environment " + environment.getName()); } //Deploy the fault sequence first since it has to be available by the time the API is deployed. deployAPIFaultSequence(client, api, tenantDomain, environment); operation = "update"; //Update the API if (api.getImplementation().equalsIgnoreCase(APIConstants.IMPLEMENTATION_TYPE_INLINE)) { client.updateApiForInlineScript(builder, tenantDomain, api.getId()); } else if (api.getImplementation().equalsIgnoreCase(APIConstants.IMPLEMENTATION_TYPE_ENDPOINT)) { client.updateApi(builder, tenantDomain, api.getId()); client.saveEndpoint(api, builder, tenantDomain); } if (api.isDefaultVersion() || api.isPublishedDefaultVersion()) {//api.isPublishedDefaultVersion() check is used to detect and update when context etc. is changed in the api which is not the default version but has a published default api if (client.getDefaultApi(tenantDomain, api.getId()) != null) { client.updateDefaultApi(builder, tenantDomain, api.getId().getVersion(), api.getId()); } else { client.addDefaultAPI(builder, tenantDomain, api.getId().getVersion(), api.getId()); } } setSecureVaultProperty(client, api, tenantDomain, environment); long customSeqStartTime = System.currentTimeMillis(); //Update the custom sequences of the API updateCustomSequences(client, api, tenantDomain, environment); endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Time taken to deploy custom Sequences: " + (endTime - customSeqStartTime) / 1000 + " seconds"); } updateClientCertificates(client, api, tenantDomain); } endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Publishing API (if the API exists in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } else { // If the Gateway type is 'production' and a production url has // not been specified // Or if the Gateway type is 'sandbox' and a sandbox url has not // been specified startTime = System.currentTimeMillis(); if ((APIConstants.GATEWAY_ENV_TYPE_PRODUCTION.equals(environment.getType()) && !APIUtil.isProductionEndpointsExists(api.getEndpointConfig())) || (APIConstants.GATEWAY_ENV_TYPE_SANDBOX.equals(environment.getType()) && !APIUtil.isSandboxEndpointsExists(api.getEndpointConfig()))) { if (debugEnabled) { log.debug("Not adding API to environment " + environment.getName() + " since its endpoint URL " + "cannot be found"); } } else { if (debugEnabled) { log.debug("API does not exist, adding new API " + api.getId().getApiName() + " in environment " + environment.getName()); } //Deploy the fault sequence first since it has to be available by the time the API is deployed. deployAPIFaultSequence(client, api, tenantDomain, environment); deployClientCertificates(client, api, tenantDomain); if (!APIConstants.APIType.WS.toString().equals(api.getType())) { //Add the API if (APIConstants.IMPLEMENTATION_TYPE_INLINE.equalsIgnoreCase(api.getImplementation())) { client.addPrototypeApiScriptImpl(builder, tenantDomain, api.getId()); } else if (APIConstants.IMPLEMENTATION_TYPE_ENDPOINT .equalsIgnoreCase(api.getImplementation())) { client.addApi(builder, tenantDomain, api.getId()); client.addEndpoint(api, builder, tenantDomain); } if (api.isDefaultVersion()) { if (client.getDefaultApi(tenantDomain, api.getId()) != null) { client.updateDefaultApi(builder, tenantDomain, api.getId().getVersion(), api.getId()); } else { client.addDefaultAPI(builder, tenantDomain, api.getId().getVersion(), api.getId()); } } setSecureVaultProperty(client, api, tenantDomain, environment); //Deploy the custom sequences of the API. deployCustomSequences(client, api, tenantDomain, environment); } else { deployWebsocketAPI(api, client); } } endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Publishing API (if the API does not exist in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); log.error("Error occurred when publish to gateway " + environmentName, axisFault); } catch (APIManagementException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ log.error("Error occurred deploying sequences on " + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (JSONException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ log.error("Error occurred deploying sequences on " + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (EndpointAdminException ex) { log.error("Error occurred when endpoint add/update operation" + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (CertificateManagementException ex) { log.error("Error occurred while adding/updating client certificate in " + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } long endTimePublishToGateway = System.currentTimeMillis(); if (debugEnabled) { log.debug("Publishing to gateway : " + environmentName + " total time taken : " + (endTimePublishToGateway - startTimePublishToGateway) / 1000 + " seconds"); } } updateRemovedClientCertificates(api, tenantDomain); return failedEnvironmentsMap; } /** * Publishes an API Product to all configured Gateways. * * @param apiProduct * - The API Product to be published * @param builder * - The template builder * @param tenantDomain * - Tenant Domain of the publisher */ public Map<String, String> publishToGateway(APIProduct apiProduct, APITemplateBuilder builder, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (apiProduct.getEnvironments() == null) { return failedEnvironmentsMap; } long startTime = 0; long startTimePublishToGateway = 0; long apiGetStartTime = 0; APIProductIdentifier apiProductId = apiProduct.getId(); APIIdentifier id = new APIIdentifier(PRODUCT_PREFIX, apiProductId.getName(), PRODUCT_VERSION); if (debugEnabled) { log.debug("API to be published: " + id); log.debug("Number of environments to be published to: " + apiProduct.getEnvironments().size()); } for (String environmentName : apiProduct.getEnvironments()) { if (debugEnabled) { startTimePublishToGateway = System.currentTimeMillis(); } Environment environment = environments.get(environmentName); //If the environment is removed from the configuration, continue without publishing if (environment == null) { continue; } APIGatewayAdminClient client; try { client = new APIGatewayAdminClient(environment); if (debugEnabled) { apiGetStartTime = System.currentTimeMillis(); } APIData apiData = client.getApi(tenantDomain, id); if (debugEnabled) { long endTime = System.currentTimeMillis(); log.debug("Time taken to fetch API Data: " + (endTime - apiGetStartTime) / 1000 + " seconds"); } // If the API exists in the Gateway if (apiData != null) { if (debugEnabled) { startTime = System.currentTimeMillis(); } if (debugEnabled) { log.debug("API exists, updating existing API " + id.getApiName() + " in environment " + environment.getName()); } //Update the API client.updateApi(builder, tenantDomain, id); if (debugEnabled) { long endTime = System.currentTimeMillis(); log.debug("Publishing API (if the API exists in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } else { // If the Gateway type is 'production' and a production url has // not been specified // Or if the Gateway type is 'sandbox' and a sandbox url has not // been specified if (debugEnabled) { startTime = System.currentTimeMillis(); } if (debugEnabled) { log.debug("API does not exist, adding new API " + id.getApiName() + " in environment " + environment.getName()); } //Add the API client.addApi(builder, tenantDomain, id); if (debugEnabled) { long endTime = System.currentTimeMillis(); log.debug("Publishing API (if the API does not exist in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); log.error("Error occurred when publish to gateway " + environmentName, axisFault); } if (debugEnabled) { long endTimePublishToGateway = System.currentTimeMillis(); log.debug("Publishing to gateway : " + environmentName + " total time taken : " + (endTimePublishToGateway - startTimePublishToGateway) / 1000 + " seconds"); } } return failedEnvironmentsMap; } /** * Removed an API from the configured Gateways * * @param api * - The API to be removed * @param tenantDomain * - Tenant Domain of the publisher */ public Map<String, String> removeFromGateway(API api, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (api.getEnvironments() != null) { for (String environmentName : api.getEnvironments()) { try { Environment environment = environments.get(environmentName); //If the environment is removed from the configuration, continue without removing if (environment == null) { continue; } APIGatewayAdminClient client = new APIGatewayAdminClient(environment); unDeployClientCertificates(client, api, tenantDomain); if(!APIConstants.APIType.WS.toString().equals(api.getType())) { APIIdentifier id = api.getId(); if (client.getApi(tenantDomain, id) != null) { if (debugEnabled) { log.debug("Removing API " + api.getId().getApiName() + " From environment " + environment.getName()); } if ("INLINE".equals(api.getImplementation()) || "MARKDOWN".equals(api.getImplementation())) { client.deleteApi(tenantDomain, api.getId()); undeployCustomSequences(client, api, tenantDomain, environment); } else { client.deleteEndpoint(api, tenantDomain); client.deleteApi(tenantDomain, api.getId()); undeployCustomSequences(client, api, tenantDomain, environment); } } } else { String fileName = api.getContext().replace('/', '-'); String[] fileNames = new String[2]; fileNames[0] = ENDPOINT_PRODUCTION + fileName; fileNames[1] = ENDPOINT_SANDBOX + fileName; if (client.isExistingSequence(fileNames[0], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileNames[0], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } if (client.isExistingSequence(fileNames[1], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileNames[1], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } } if (api.isPublishedDefaultVersion()) { APIIdentifier id = api.getId(); if (client.getDefaultApi(tenantDomain, id) != null) { client.deleteDefaultApi(tenantDomain, api.getId()); } } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway unpublisihing if one gateway unreachable */ log.error("Error occurred when removing from gateway " + environmentName, axisFault); failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); } catch (EndpointAdminException ex) { log.error("Error occurred when deleting endpoint from gateway" + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (CertificateManagementException ex) { log.error("Error occurred when deleting certificate from gateway" + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } } updateRemovedClientCertificates(api, tenantDomain); } return failedEnvironmentsMap; } /** * add websoocket api to the gateway * * @param api * @param client * @throws APIManagementException */ public void deployWebsocketAPI(API api, APIGatewayAdminClient client) throws APIManagementException, JSONException { try { String production_endpoint = null; String sandbox_endpoint = null; JSONObject obj = new JSONObject(api.getEndpointConfig()); if (obj.has(APIConstants.API_DATA_PRODUCTION_ENDPOINTS)) { production_endpoint = obj.getJSONObject(APIConstants.API_DATA_PRODUCTION_ENDPOINTS).getString("url"); } if (obj.has(APIConstants.API_DATA_SANDBOX_ENDPOINTS)) { sandbox_endpoint = obj.getJSONObject(APIConstants.API_DATA_SANDBOX_ENDPOINTS).getString("url"); } OMElement element; try { if (production_endpoint != null) { String content = createSeqString(api, production_endpoint, ENDPOINT_PRODUCTION); element = AXIOMUtil.stringToOM(content); String fileName = element.getAttributeValue(new QName("name")); if (client.isExistingSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } client.addSequence(element, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } if (sandbox_endpoint != null) { String content = createSeqString(api, sandbox_endpoint, ENDPOINT_SANDBOX); element = AXIOMUtil.stringToOM(content); String fileName = element.getAttributeValue(new QName("name")); if (client.isExistingSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } client.addSequence(element, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } } catch (AxisFault e) { String msg = "Error while parsing the policy to get the eligibility query: "; log.error(msg, e); throw new APIManagementException(msg); } } catch (XMLStreamException e) { String msg = "Error while parsing the policy to get the eligibility query: "; log.error(msg, e); throw new APIManagementException(msg); } } /** * add new api version at the API Gateway * * @param artifact * @param api */ public void createNewWebsocketApiVersion(GenericArtifact artifact, API api) { try { APIGatewayManager gatewayManager = APIGatewayManager.getInstance(); APIGatewayAdminClient client; Set<String> environments = APIUtil.extractEnvironmentsForAPI( artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS)); api.setEndpointConfig(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG)); api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); for (String environmentName : environments) { Environment environment = this.environments.get(environmentName); client = new APIGatewayAdminClient(environment); try { gatewayManager.deployWebsocketAPI(api, client); } catch (JSONException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ log.error("Error occurred deploying sequences on " + environmentName, ex); } } } catch (APIManagementException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway unpublisihing if one gateway unreachable */ log.error("Error in deploying to gateway :" + ex.getMessage(), ex); } catch (AxisFault ex) { log.error("Error in deploying to gateway :" + ex.getMessage(), ex); } catch (GovernanceException ex) { log.error("Error in deploying to gateway :" + ex.getMessage(), ex); } } /** * create body of sequence * * @param api * @param url * @return */ public String createSeqString(API api, String url, String urltype) { String context = api.getContext(); context = urltype + context; String seq = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<sequence xmlns=\"http://ws.apache.org/ns/synapse\" name=\"" + context.replace('/', '-') + "\">\n" + " <property name=\"OUT_ONLY\" value=\"true\"/>\n" + " <script language=\"js\">var sub_path = mc.getProperty(\"websocket.subscriber.path\");\t \n" + " \tvar queryParamString = sub_path.split(\"\\\\?\")[1];\n" + " if(queryParamString != undefined) {\t \n" + "\t\tmc.setProperty('queryparams', \"?\" + queryParamString);\n" + "\t\t}\t\t\n" + " </script>\n" + " <property xmlns:soapenv=\"http://www.w3.org/2003/05/soap-envelope\"\n" + " xmlns:ns=\"http://org.apache.synapse/xsd\"\n" + " xmlns:ns3=\"http://org.apache.synapse/xsd\"\n" + " name=\"queryparams\"\n" + " expression=\"$ctx:queryparams\"/>\n" + " <property name=\"urlVal\" value=\""+ url + "\"/>\n" + " <property xmlns:soapenv=\"http://www.w3.org/2003/05/soap-envelope\"\n" + " xmlns:ns3=\"http://org.apache.synapse/xsd\"\n" + " name=\"fullUrl\"\n" + " expression=\"fn:concat(get-property('urlVal'), get-property('queryparams'))\"\n" + " type=\"STRING\"/>\n" + " <header xmlns:soapenv=\"http://www.w3.org/2003/05/soap-envelope\"\n" + " xmlns:ns3=\"http://org.apache.synapse/xsd\"\n" + " name=\"To\"\n" + " expression=\"$ctx:fullUrl\"/>\n" + " <send>\n" + " <endpoint>\n" + " <default/>\n" + " </endpoint>\n" + " </send>\n" + "</sequence>"; return seq; } public Map<String, String> removeDefaultAPIFromGateway(API api, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (api.getEnvironments() != null) { for (String environmentName : api.getEnvironments()) { try { Environment environment = environments.get(environmentName); APIGatewayAdminClient client = new APIGatewayAdminClient(environment); APIIdentifier id = api.getId(); if (client.getDefaultApi(tenantDomain, id) != null) { if (debugEnabled) { log.debug("Removing Default API " + api.getId().getApiName() + " From environment " + environment.getName()); } client.deleteDefaultApi(tenantDomain, api.getId()); } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway unpublisihing if one gateway unreachable */ log.error("Error occurred when removing default api from gateway " + environmentName, axisFault); failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); } } } return failedEnvironmentsMap; } /** * Checks whether the API has been published. * * @param api * - The API to be cheked. * @param tenantDomain * - Tenant Domain of the publisher * @return True if the API is available in at least one Gateway. False if * available in none. */ public boolean isAPIPublished(API api, String tenantDomain)throws APIManagementException { for (Environment environment : environments.values()) { try { APIGatewayAdminClient client = new APIGatewayAdminClient(environment); // If the API exists in at least one environment, consider as // published and return true. APIIdentifier id = api.getId(); if (client.getApi(tenantDomain, id) != null) { return true; } } catch (AxisFault axisFault) { /* didn't throw this exception to check api available in all the environments therefore we didn't throw exception to avoid if gateway unreachable affect */ if (!APIConstants.CREATED.equals(api.getStatus())) { log.error("Error occurred when check api is published on gateway" + environment.getName(), axisFault); } } } return false; } /** * Get the endpoint Security type of the published API * * @param api - The API to be checked. * @param tenantDomain - Tenant Domain of the publisher * @return Endpoint security type; Basic or Digest */ public String getAPIEndpointSecurityType(API api, String tenantDomain) throws APIManagementException { for (Environment environment : environments.values()) { try { APIGatewayAdminClient client = new APIGatewayAdminClient(environment); APIIdentifier id = api.getId(); APIData apiData = client.getApi(tenantDomain, id); if (apiData != null) { ResourceData[] resourceData = apiData.getResources(); for (ResourceData resource : resourceData) { if (resource != null && resource.getInSeqXml() != null && resource.getInSeqXml().contains("DigestAuthMediator")) { return APIConstants.APIEndpointSecurityConstants.DIGEST_AUTH; } } } } catch (AxisFault axisFault) { // didn't throw this exception to check api available in all the environments // therefore we didn't throw exception to avoid if gateway unreachable affect if (!APIConstants.CREATED.equals(api.getStatus())) { log.error("Error occurred when check api endpoint security type on gateway" + environment.getName(), axisFault); } } } return APIConstants.APIEndpointSecurityConstants.BASIC_AUTH; } public void setProductResourceSequences(APIProviderImpl apiProvider, APIProduct apiProduct, String tenantDomain) throws APIManagementException { for (APIProductResource resource : apiProduct.getProductResources()) { APIIdentifier apiIdentifier = resource.getApiIdentifier(); API api = apiProvider.getAPI(apiIdentifier); for (String environmentName : api.getEnvironments()) { Environment environment = environments.get(environmentName); try { APIGatewayAdminClient client = new APIGatewayAdminClient(environment); String inSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; if (client.isExistingSequence(inSequenceKey, tenantDomain)) { resource.setInSequenceName(inSequenceKey); } String outSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; if (client.isExistingSequence(outSequenceKey, tenantDomain)) { resource.setOutSequenceName(outSequenceKey); } String faultSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_FAULT_EXT; if (client.isExistingSequence(faultSequenceKey, tenantDomain)) { resource.setFaultSequenceName(faultSequenceKey); } } catch (AxisFault axisFault) { throw new APIManagementException("Error occurred while checking if product resources " + "have custom sequences", axisFault); } } } } /** * To deploy client certificate in given API environment. * * @param client API GatewayAdminClient . * @param api Relevant API. * @param tenantDomain Tenant domain. * @throws CertificateManagementException Certificate Management Exception. * @throws AxisFault AxisFault. */ private void deployClientCertificates(APIGatewayAdminClient client, API api, String tenantDomain) throws CertificateManagementException, AxisFault { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); List<ClientCertificateDTO> clientCertificateDTOList = CertificateMgtDAO.getInstance() .getClientCertificates(tenantId, null, api.getId()); if (clientCertificateDTOList != null) { for (ClientCertificateDTO clientCertificateDTO : clientCertificateDTOList) { client.addClientCertificate(clientCertificateDTO.getCertificate(), clientCertificateDTO.getAlias() + "_" + tenantId); } } } /** * To update client certificate in relevant API gateway environment. * * @param client API Gateway admi client. * @param api Relevant API. * @param tenantDomain Tenant domain. */ private void updateClientCertificates(APIGatewayAdminClient client, API api, String tenantDomain) throws CertificateManagementException, AxisFault { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); List<String> aliasList = CertificateMgtDAO.getInstance() .getDeletedClientCertificateAlias(api.getId(), tenantId); for (String alias : aliasList) { client.deleteClientCertificate(alias + "_" + tenantId); } List<ClientCertificateDTO> clientCertificateDTOList = CertificateMgtDAO.getInstance() .getClientCertificates(tenantId, null, api.getId()); if (clientCertificateDTOList != null) { for (ClientCertificateDTO clientCertificateDTO : clientCertificateDTOList) { client.addClientCertificate(clientCertificateDTO.getCertificate(), clientCertificateDTO.getAlias() + "_" + tenantId); } } } /** * To update the database instance with the successfully removed client certificates from teh gateway. * * @param api Relevant API related with teh removed certificate. * @param tenantDomain Tenant domain of the API. */ private void updateRemovedClientCertificates(API api, String tenantDomain) { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } try { CertificateMgtDAO.getInstance().updateRemovedCertificatesFromGateways(api.getId(), APIUtil.getTenantIdFromTenantDomain(tenantDomain)); /* The flow does not need to be blocked, as this failure do not related with updating client certificates in gateway, rather updating in database. There is no harm in database having outdated certificate information.*/ } catch (CertificateManagementException e) { log.error("Certificate Management Exception while trying to update the remove certificate from gateways " + "for the api " + api.getId() + " for the tenant domain " + tenantDomain, e); } } /** * To undeploy the client certificates from the gateway environment. * * @param client APIGatewayAdmin Client. * @param api Relevant API particular certificate is related with. * @param tenantDomain Tenant domain of the API. * @throws CertificateManagementException Certificate Management Exception. * @throws AxisFault AxisFault. */ private void unDeployClientCertificates(APIGatewayAdminClient client, API api, String tenantDomain) throws CertificateManagementException, AxisFault { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); List<ClientCertificateDTO> clientCertificateDTOList = CertificateMgtDAO.getInstance() .getClientCertificates(tenantId, null, api.getId()); if (clientCertificateDTOList != null) { for (ClientCertificateDTO clientCertificateDTO : clientCertificateDTOList) { client.deleteClientCertificate(clientCertificateDTO.getAlias() + "_" + tenantId); } } List<String> aliasList = CertificateMgtDAO.getInstance() .getDeletedClientCertificateAlias(api.getId(), tenantId); for (String alias : aliasList) { client.deleteClientCertificate(alias + "_" + tenantId); } } /** * Get the specified in/out sequences from api object * * @param api -API object * @param tenantDomain * @param environment * @throws APIManagementException * @throws AxisFault */ private void deployCustomSequences(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) throws APIManagementException, AxisFault { if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence())) { try { PrivilegedCarbonContext.startTenantFlow(); if(tenantDomain != null && !"".equals(tenantDomain)){ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); if (APIUtil.isSequenceDefined(api.getInSequence())) { deployInSequence(client, api, tenantId, tenantDomain, environment); } if (APIUtil.isSequenceDefined(api.getOutSequence())) { deployOutSequence(client, api, tenantId, tenantDomain, environment); } } catch (Exception e) { String msg = "Error in deploying the sequence to gateway"; log.error(msg, e); throw new APIManagementException(msg); } finally { PrivilegedCarbonContext.endTenantFlow(); } } } private void deployInSequence(APIGatewayAdminClient sequenceAdminServiceClient, API api, int tenantId, String tenantDomain, Environment environment) throws APIManagementException, AxisFault { String inSequenceName = api.getInSequence(); OMElement inSequence = APIUtil.getCustomSequence(inSequenceName, tenantId, "in", api.getId()); if (inSequence != null) { String inSeqExt = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; if (inSequence.getAttribute(new QName("name")) != null) { inSequence.getAttribute(new QName("name")).setAttributeValue(inSeqExt); } sequenceAdminServiceClient.addSequence(inSequence, tenantDomain); } } private void deployOutSequence(APIGatewayAdminClient client, API api, int tenantId, String tenantDomain, Environment environment) throws APIManagementException, AxisFault { String outSequenceName = api.getOutSequence(); OMElement outSequence = APIUtil.getCustomSequence(outSequenceName, tenantId, "out", api.getId()); if (outSequence != null) { String outSeqExt = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; if (outSequence.getAttribute(new QName("name")) != null) { outSequence.getAttribute(new QName("name")).setAttributeValue(outSeqExt); } client.addSequence(outSequence, tenantDomain); } } /** * Undeploy the sequences deployed in synapse * * @param api * @param tenantDomain * @param environment * @throws APIManagementException */ private void undeployCustomSequences(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) { if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence())) { try { PrivilegedCarbonContext.startTenantFlow(); if(tenantDomain != null && !"".equals(tenantDomain)){ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else{ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } if (APIUtil.isSequenceDefined(api.getInSequence())) { String inSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; client.deleteSequence(inSequence, tenantDomain); } if (APIUtil.isSequenceDefined(api.getOutSequence())) { String outSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; client.deleteSequence(outSequence, tenantDomain); } if (APIUtil.isSequenceDefined(api.getFaultSequence())) { String faultSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_FAULT_EXT; if(client.isExistingSequence(faultSequence, tenantDomain)) { client.deleteSequence(faultSequence, tenantDomain); } } } catch (Exception e) { String msg = "Error in deleting the sequence from gateway"; log.error(msg, e); } finally { PrivilegedCarbonContext.endTenantFlow(); } } } /** * Update the custom sequences in gateway * @param api * @param tenantDomain * @param environment * @throws APIManagementException */ private void updateCustomSequences(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) throws APIManagementException { //If sequences have been added, updated or removed. if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence()) || APIUtil.isSequenceDefined(api.getOldInSequence()) || APIUtil.isSequenceDefined(api.getOldOutSequence())) { try { PrivilegedCarbonContext.startTenantFlow(); if(tenantDomain != null && !"".equals(tenantDomain)){ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else{ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); //If an inSequence has been added, updated or removed. if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOldInSequence())) { String inSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; //If sequence already exists if (client.isExistingSequence(inSequenceKey, tenantDomain)) { //Delete existing sequence client.deleteSequence(inSequenceKey, tenantDomain); } //If an inSequence has been added or updated. if(APIUtil.isSequenceDefined(api.getInSequence())){ //Deploy the inSequence deployInSequence(client, api, tenantId, tenantDomain, environment); } } //If an outSequence has been added, updated or removed. if (APIUtil.isSequenceDefined(api.getOutSequence()) || APIUtil.isSequenceDefined(api.getOldOutSequence())) { String outSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; //If the outSequence exists. if (client.isExistingSequence(outSequence, tenantDomain)) { //Delete existing outSequence client.deleteSequence(outSequence, tenantDomain); } //If an outSequence has been added or updated. if (APIUtil.isSequenceDefined(api.getOutSequence())){ //Deploy outSequence deployOutSequence(client, api, tenantId, tenantDomain, environment); } } } catch (Exception e) { String msg = "Error in updating the sequence at the Gateway"; log.error(msg, e); throw new APIManagementException(msg, e); } finally { PrivilegedCarbonContext.endTenantFlow(); } } } private void deployAPIFaultSequence(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) throws APIManagementException { String faultSequenceName = api.getFaultSequence(); String faultSeqExt = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_FAULT_EXT; boolean isTenantFlowStarted = false; try { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } //If a fault sequence has be defined. if (APIUtil.isSequenceDefined(faultSequenceName)) { int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); //If the sequence already exists if (client.isExistingSequence(faultSeqExt, tenantDomain)) { //Delete the sequence. We need to redeploy afterwards since the sequence may have been updated. client.deleteSequence(faultSeqExt, tenantDomain); } //Get the fault sequence xml OMElement faultSequence = APIUtil.getCustomSequence(faultSequenceName, tenantId, APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT, api.getId()); if (faultSequence != null) { if (APIUtil.isPerAPISequence(faultSequenceName, tenantId, api.getId(), APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT)) { if (faultSequence.getAttribute(new QName("name")) != null) { faultSequence.getAttribute(new QName("name")).setAttributeValue(faultSeqExt); } } else { //If the previous sequence was a per API fault sequence delete it if (client.isExistingSequence(faultSequenceName, tenantDomain)) { client.deleteSequence(faultSequenceName, tenantDomain); } } //Deploy the fault sequence client.addSequence(faultSequence, tenantDomain); } } else { if (client.isExistingSequence(faultSeqExt, tenantDomain)) { client.deleteSequence(faultSeqExt, tenantDomain); } } } catch (AxisFault e) { String msg = "Error while updating the fault sequence at the Gateway"; log.error(msg, e); throw new APIManagementException(msg, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } /** * Store the secured endpoint username password to registry * @param api * @param tenantDomain * @param environment * @throws APIManagementException */ private void setSecureVaultProperty(APIGatewayAdminClient securityAdminClient, API api, String tenantDomain, Environment environment) throws APIManagementException { boolean isSecureVaultEnabled = Boolean.parseBoolean(ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService(). getAPIManagerConfiguration().getFirstProperty(APIConstants.API_SECUREVAULT_ENABLE)); if (api.isEndpointSecured() && isSecureVaultEnabled) { try { securityAdminClient.setSecureVaultProperty(api, tenantDomain); } catch (Exception e) { String msg = "Error in setting secured password."; log.error(msg + ' ' + e.getLocalizedMessage(), e); throw new APIManagementException(msg); } } } }
components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIGatewayManager.java
/* * Copyright WSO2 Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.apimgt.impl; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.util.AXIOMUtil; import org.apache.axis2.AxisFault; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.json.JSONException; import org.json.JSONObject; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.dto.ClientCertificateDTO; import org.wso2.carbon.apimgt.api.model.*; import org.wso2.carbon.apimgt.gateway.dto.stub.APIData; import org.wso2.carbon.apimgt.gateway.dto.stub.ResourceData; import org.wso2.carbon.apimgt.impl.certificatemgt.CertificateManagerImpl; import org.wso2.carbon.apimgt.impl.certificatemgt.exceptions.CertificateManagementException; import org.wso2.carbon.apimgt.impl.dao.CertificateMgtDAO; import org.wso2.carbon.apimgt.impl.dto.Environment; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.template.APITemplateBuilder; import org.wso2.carbon.apimgt.impl.utils.APIGatewayAdminClient; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.governance.api.exception.GovernanceException; import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import java.util.*; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamException; public class APIGatewayManager { private static final Log log = LogFactory.getLog(APIGatewayManager.class); private static APIGatewayManager instance; private Map<String, Environment> environments; private boolean debugEnabled = log.isDebugEnabled(); private final String ENDPOINT_PRODUCTION = "_PRODUCTION_"; private final String ENDPOINT_SANDBOX = "_SANDBOX_"; private static final String PRODUCT_PREFIX = "prod"; private static final String PRODUCT_VERSION = "1.0.0"; private APIGatewayManager() { APIManagerConfiguration config = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService() .getAPIManagerConfiguration(); environments = config.getApiGatewayEnvironments(); } public synchronized static APIGatewayManager getInstance() { if (instance == null) { instance = new APIGatewayManager(); } return instance; } /** * Publishes an API to all configured Gateways. * * @param api * - The API to be published * @param builder * - The template builder * @param tenantDomain * - Tenant Domain of the publisher */ public Map<String, String> publishToGateway(API api, APITemplateBuilder builder, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (api.getEnvironments() == null) { return failedEnvironmentsMap; } long startTime; long endTime; if (debugEnabled) { log.debug("API to be published: " + api.getId()); log.debug("Number of environments to be published to: " + api.getEnvironments().size()); } for (String environmentName : api.getEnvironments()) { long startTimePublishToGateway = System.currentTimeMillis(); Environment environment = environments.get(environmentName); //If the environment is removed from the configuration, continue without publishing if (environment == null) { continue; } APIGatewayAdminClient client; try { client = new APIGatewayAdminClient(environment); String operation; long apiGetStartTime = System.currentTimeMillis(); APIData apiData = client.getApi(tenantDomain, api.getId()); endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Time taken to fetch API Data: " + (endTime - apiGetStartTime) / 1000 + " seconds"); } // If the API exists in the Gateway if (apiData != null) { startTime = System.currentTimeMillis(); // If the Gateway type is 'production' and the production url // has been removed // Or if the Gateway type is 'sandbox' and the sandbox url has // been removed. if ((APIConstants.GATEWAY_ENV_TYPE_PRODUCTION.equals(environment.getType()) && !APIUtil.isProductionEndpointsExists(api.getEndpointConfig())) || (APIConstants.GATEWAY_ENV_TYPE_SANDBOX.equals(environment.getType()) && !APIUtil.isSandboxEndpointsExists(api.getEndpointConfig()))) { if (debugEnabled) { log.debug("Removing API " + api.getId().getApiName() + " from Environment " + environment.getName() + " since its relevant URL has been removed."); } client.deleteApi(tenantDomain, api.getId()); if (api.isPublishedDefaultVersion()) { if (client.getDefaultApi(tenantDomain, api.getId()) != null) { client.deleteDefaultApi(tenantDomain, api.getId()); } } setSecureVaultProperty(client, api, tenantDomain, environment); undeployCustomSequences(client, api, tenantDomain, environment); unDeployClientCertificates(client, api, tenantDomain); } else { if (debugEnabled) { log.debug("API exists, updating existing API " + api.getId().getApiName() + " in environment " + environment.getName()); } //Deploy the fault sequence first since it has to be available by the time the API is deployed. deployAPIFaultSequence(client, api, tenantDomain, environment); operation = "update"; //Update the API if (api.getImplementation().equalsIgnoreCase(APIConstants.IMPLEMENTATION_TYPE_INLINE)) { client.updateApiForInlineScript(builder, tenantDomain, api.getId()); } else if (api.getImplementation().equalsIgnoreCase(APIConstants.IMPLEMENTATION_TYPE_ENDPOINT)) { client.updateApi(builder, tenantDomain, api.getId()); client.saveEndpoint(api, builder, tenantDomain); } if (api.isDefaultVersion() || api.isPublishedDefaultVersion()) {//api.isPublishedDefaultVersion() check is used to detect and update when context etc. is changed in the api which is not the default version but has a published default api if (client.getDefaultApi(tenantDomain, api.getId()) != null) { client.updateDefaultApi(builder, tenantDomain, api.getId().getVersion(), api.getId()); } else { client.addDefaultAPI(builder, tenantDomain, api.getId().getVersion(), api.getId()); } } setSecureVaultProperty(client, api, tenantDomain, environment); long customSeqStartTime = System.currentTimeMillis(); //Update the custom sequences of the API updateCustomSequences(client, api, tenantDomain, environment); endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Time taken to deploy custom Sequences: " + (endTime - customSeqStartTime) / 1000 + " seconds"); } updateClientCertificates(client, api, tenantDomain); } endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Publishing API (if the API exists in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } else { // If the Gateway type is 'production' and a production url has // not been specified // Or if the Gateway type is 'sandbox' and a sandbox url has not // been specified startTime = System.currentTimeMillis(); if ((APIConstants.GATEWAY_ENV_TYPE_PRODUCTION.equals(environment.getType()) && !APIUtil.isProductionEndpointsExists(api.getEndpointConfig())) || (APIConstants.GATEWAY_ENV_TYPE_SANDBOX.equals(environment.getType()) && !APIUtil.isSandboxEndpointsExists(api.getEndpointConfig()))) { if (debugEnabled) { log.debug("Not adding API to environment " + environment.getName() + " since its endpoint URL " + "cannot be found"); } } else { if (debugEnabled) { log.debug("API does not exist, adding new API " + api.getId().getApiName() + " in environment " + environment.getName()); } //Deploy the fault sequence first since it has to be available by the time the API is deployed. deployAPIFaultSequence(client, api, tenantDomain, environment); deployClientCertificates(client, api, tenantDomain); if (!APIConstants.APIType.WS.toString().equals(api.getType())) { //Add the API if (APIConstants.IMPLEMENTATION_TYPE_INLINE.equalsIgnoreCase(api.getImplementation())) { client.addPrototypeApiScriptImpl(builder, tenantDomain, api.getId()); } else if (APIConstants.IMPLEMENTATION_TYPE_ENDPOINT .equalsIgnoreCase(api.getImplementation())) { client.addApi(builder, tenantDomain, api.getId()); client.addEndpoint(api, builder, tenantDomain); } if (api.isDefaultVersion()) { if (client.getDefaultApi(tenantDomain, api.getId()) != null) { client.updateDefaultApi(builder, tenantDomain, api.getId().getVersion(), api.getId()); } else { client.addDefaultAPI(builder, tenantDomain, api.getId().getVersion(), api.getId()); } } setSecureVaultProperty(client, api, tenantDomain, environment); //Deploy the custom sequences of the API. deployCustomSequences(client, api, tenantDomain, environment); } else { deployWebsocketAPI(api, client); } } endTime = System.currentTimeMillis(); if (debugEnabled) { log.debug("Publishing API (if the API does not exist in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); log.error("Error occurred when publish to gateway " + environmentName, axisFault); } catch (APIManagementException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ log.error("Error occurred deploying sequences on " + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (JSONException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ log.error("Error occurred deploying sequences on " + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (EndpointAdminException ex) { log.error("Error occurred when endpoint add/update operation" + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (CertificateManagementException ex) { log.error("Error occurred while adding/updating client certificate in " + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } long endTimePublishToGateway = System.currentTimeMillis(); if (debugEnabled) { log.debug("Publishing to gateway : " + environmentName + " total time taken : " + (endTimePublishToGateway - startTimePublishToGateway) / 1000 + " seconds"); } } updateRemovedClientCertificates(api, tenantDomain); return failedEnvironmentsMap; } /** * Publishes an API Product to all configured Gateways. * * @param apiProduct * - The API Product to be published * @param builder * - The template builder * @param tenantDomain * - Tenant Domain of the publisher */ public Map<String, String> publishToGateway(APIProduct apiProduct, APITemplateBuilder builder, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (apiProduct.getEnvironments() == null) { return failedEnvironmentsMap; } long startTime = 0; long startTimePublishToGateway = 0; long apiGetStartTime = 0; APIProductIdentifier apiProductId = apiProduct.getId(); APIIdentifier id = new APIIdentifier(PRODUCT_PREFIX, apiProductId.getName(), PRODUCT_VERSION); if (debugEnabled) { log.debug("API to be published: " + id); log.debug("Number of environments to be published to: " + apiProduct.getEnvironments().size()); } for (String environmentName : apiProduct.getEnvironments()) { if (debugEnabled) { startTimePublishToGateway = System.currentTimeMillis(); } Environment environment = environments.get(environmentName); //If the environment is removed from the configuration, continue without publishing if (environment == null) { continue; } APIGatewayAdminClient client; try { client = new APIGatewayAdminClient(environment); if (debugEnabled) { apiGetStartTime = System.currentTimeMillis(); } APIData apiData = client.getApi(tenantDomain, id); if (debugEnabled) { long endTime = System.currentTimeMillis(); log.debug("Time taken to fetch API Data: " + (endTime - apiGetStartTime) / 1000 + " seconds"); } // If the API exists in the Gateway if (apiData != null) { if (debugEnabled) { startTime = System.currentTimeMillis(); } if (debugEnabled) { log.debug("API exists, updating existing API " + id.getApiName() + " in environment " + environment.getName()); } //Update the API client.updateApi(builder, tenantDomain, id); if (debugEnabled) { long endTime = System.currentTimeMillis(); log.debug("Publishing API (if the API exists in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } else { // If the Gateway type is 'production' and a production url has // not been specified // Or if the Gateway type is 'sandbox' and a sandbox url has not // been specified if (debugEnabled) { startTime = System.currentTimeMillis(); } if (debugEnabled) { log.debug("API does not exist, adding new API " + id.getApiName() + " in environment " + environment.getName()); } //Add the API client.addApi(builder, tenantDomain, id); if (debugEnabled) { long endTime = System.currentTimeMillis(); log.debug("Publishing API (if the API does not exist in the Gateway) took " + (endTime - startTime) / 1000 + " seconds"); } } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); log.error("Error occurred when publish to gateway " + environmentName, axisFault); } if (debugEnabled) { long endTimePublishToGateway = System.currentTimeMillis(); log.debug("Publishing to gateway : " + environmentName + " total time taken : " + (endTimePublishToGateway - startTimePublishToGateway) / 1000 + " seconds"); } } return failedEnvironmentsMap; } /** * Removed an API from the configured Gateways * * @param api * - The API to be removed * @param tenantDomain * - Tenant Domain of the publisher */ public Map<String, String> removeFromGateway(API api, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (api.getEnvironments() != null) { for (String environmentName : api.getEnvironments()) { try { Environment environment = environments.get(environmentName); //If the environment is removed from the configuration, continue without removing if (environment == null) { continue; } APIGatewayAdminClient client = new APIGatewayAdminClient(environment); unDeployClientCertificates(client, api, tenantDomain); if(!APIConstants.APIType.WS.toString().equals(api.getType())) { APIIdentifier id = api.getId(); if (client.getApi(tenantDomain, id) != null) { if (debugEnabled) { log.debug("Removing API " + api.getId().getApiName() + " From environment " + environment.getName()); } if ("INLINE".equals(api.getImplementation()) || "MARKDOWN".equals(api.getImplementation())) { client.deleteApi(tenantDomain, api.getId()); undeployCustomSequences(client, api, tenantDomain, environment); } else { client.deleteEndpoint(api, tenantDomain); client.deleteApi(tenantDomain, api.getId()); undeployCustomSequences(client, api, tenantDomain, environment); } } } else { String fileName = api.getContext().replace('/', '-'); String[] fileNames = new String[2]; fileNames[0] = ENDPOINT_PRODUCTION + fileName; fileNames[1] = ENDPOINT_SANDBOX + fileName; if (client.isExistingSequence(fileNames[0], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileNames[0], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } if (client.isExistingSequence(fileNames[1], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileNames[1], MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } } if (api.isPublishedDefaultVersion()) { APIIdentifier id = api.getId(); if (client.getDefaultApi(tenantDomain, id) != null) { client.deleteDefaultApi(tenantDomain, api.getId()); } } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway unpublisihing if one gateway unreachable */ log.error("Error occurred when removing from gateway " + environmentName, axisFault); failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); } catch (EndpointAdminException ex) { log.error("Error occurred when deleting endpoint from gateway" + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } catch (CertificateManagementException ex) { log.error("Error occurred when deleting certificate from gateway" + environmentName, ex); failedEnvironmentsMap.put(environmentName, ex.getMessage()); } } updateRemovedClientCertificates(api, tenantDomain); } return failedEnvironmentsMap; } /** * add websoocket api to the gateway * * @param api * @param client * @throws APIManagementException */ public void deployWebsocketAPI(API api, APIGatewayAdminClient client) throws APIManagementException, JSONException { try { String production_endpoint = null; String sandbox_endpoint = null; JSONObject obj = new JSONObject(api.getEndpointConfig()); if (obj.has(APIConstants.API_DATA_PRODUCTION_ENDPOINTS)) { production_endpoint = obj.getJSONObject(APIConstants.API_DATA_PRODUCTION_ENDPOINTS).getString("url"); } if (obj.has(APIConstants.API_DATA_SANDBOX_ENDPOINTS)) { sandbox_endpoint = obj.getJSONObject(APIConstants.API_DATA_SANDBOX_ENDPOINTS).getString("url"); } OMElement element; try { if (production_endpoint != null) { String content = createSeqString(api, production_endpoint, ENDPOINT_PRODUCTION); element = AXIOMUtil.stringToOM(content); String fileName = element.getAttributeValue(new QName("name")); if (client.isExistingSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } client.addSequence(element, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } if (sandbox_endpoint != null) { String content = createSeqString(api, sandbox_endpoint, ENDPOINT_SANDBOX); element = AXIOMUtil.stringToOM(content); String fileName = element.getAttributeValue(new QName("name")); if (client.isExistingSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME)) { client.deleteSequence(fileName, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } client.addSequence(element, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); } } catch (AxisFault e) { String msg = "Error while parsing the policy to get the eligibility query: "; log.error(msg, e); throw new APIManagementException(msg); } } catch (XMLStreamException e) { String msg = "Error while parsing the policy to get the eligibility query: "; log.error(msg, e); throw new APIManagementException(msg); } } /** * add new api version at the API Gateway * * @param artifact * @param api */ public void createNewWebsocketApiVersion(GenericArtifact artifact, API api) { try { APIGatewayManager gatewayManager = APIGatewayManager.getInstance(); APIGatewayAdminClient client; Set<String> environments = APIUtil.extractEnvironmentsForAPI( artifact.getAttribute(APIConstants.API_OVERVIEW_ENVIRONMENTS)); api.setEndpointConfig(artifact.getAttribute(APIConstants.API_OVERVIEW_ENDPOINT_CONFIG)); api.setContext(artifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT)); for (String environmentName : environments) { Environment environment = this.environments.get(environmentName); client = new APIGatewayAdminClient(environment); try { gatewayManager.deployWebsocketAPI(api, client); } catch (JSONException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway publishing if one gateway unreachable */ log.error("Error occurred deploying sequences on " + environmentName, ex); } } } catch (APIManagementException ex) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway unpublisihing if one gateway unreachable */ log.error("Error in deploying to gateway :" + ex.getMessage(), ex); } catch (AxisFault ex) { log.error("Error in deploying to gateway :" + ex.getMessage(), ex); } catch (GovernanceException ex) { log.error("Error in deploying to gateway :" + ex.getMessage(), ex); } } /** * create body of sequence * * @param api * @param url * @return */ public String createSeqString(API api, String url, String urltype) { String context = api.getContext(); context = urltype + context; String seq = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<sequence xmlns=\"http://ws.apache.org/ns/synapse\" name=\"" + context.replace('/', '-') + "\">\n" + " <property name=\"OUT_ONLY\" value=\"true\"/>\n" + " <script language=\"js\">var sub_path = mc.getProperty(\"websocket.subscriber.path\");\t \n" + " \tvar queryParamString = sub_path.split(\"\\\\?\")[1];\n" + " if(queryParamString != undefined) {\t \n" + "\t\tmc.setProperty('queryparams', \"?\" + queryParamString);\n" + "\t\t}\t\t\n" + " </script>\n" + " <property xmlns:soapenv=\"http://www.w3.org/2003/05/soap-envelope\"\n" + " xmlns:ns=\"http://org.apache.synapse/xsd\"\n" + " xmlns:ns3=\"http://org.apache.synapse/xsd\"\n" + " name=\"queryparams\"\n" + " expression=\"$ctx:queryparams\"/>\n" + " <property name=\"urlVal\" value=\""+ url + "\"/>\n" + " <property xmlns:soapenv=\"http://www.w3.org/2003/05/soap-envelope\"\n" + " xmlns:ns3=\"http://org.apache.synapse/xsd\"\n" + " name=\"fullUrl\"\n" + " expression=\"fn:concat(get-property('urlVal'), get-property('queryparams'))\"\n" + " type=\"STRING\"/>\n" + " <header xmlns:soapenv=\"http://www.w3.org/2003/05/soap-envelope\"\n" + " xmlns:ns3=\"http://org.apache.synapse/xsd\"\n" + " name=\"To\"\n" + " expression=\"$ctx:fullUrl\"/>\n" + " <send>\n" + " <endpoint>\n" + " <default/>\n" + " </endpoint>\n" + " </send>\n" + "</sequence>"; return seq; } public Map<String, String> removeDefaultAPIFromGateway(API api, String tenantDomain) { Map<String, String> failedEnvironmentsMap = new HashMap<String, String>(0); if (api.getEnvironments() != null) { for (String environmentName : api.getEnvironments()) { try { Environment environment = environments.get(environmentName); APIGatewayAdminClient client = new APIGatewayAdminClient(environment); APIIdentifier id = api.getId(); if (client.getDefaultApi(tenantDomain, id) != null) { if (debugEnabled) { log.debug("Removing Default API " + api.getId().getApiName() + " From environment " + environment.getName()); } client.deleteDefaultApi(tenantDomain, api.getId()); } } catch (AxisFault axisFault) { /* didn't throw this exception to handle multiple gateway publishing if gateway is unreachable we collect that environments into map with issue and show on popup in ui therefore this didn't break the gateway unpublisihing if one gateway unreachable */ log.error("Error occurred when removing default api from gateway " + environmentName, axisFault); failedEnvironmentsMap.put(environmentName, axisFault.getMessage()); } } } return failedEnvironmentsMap; } /** * Checks whether the API has been published. * * @param api * - The API to be cheked. * @param tenantDomain * - Tenant Domain of the publisher * @return True if the API is available in at least one Gateway. False if * available in none. */ public boolean isAPIPublished(API api, String tenantDomain)throws APIManagementException { for (Environment environment : environments.values()) { try { APIGatewayAdminClient client = new APIGatewayAdminClient(environment); // If the API exists in at least one environment, consider as // published and return true. APIIdentifier id = api.getId(); if (client.getApi(tenantDomain, id) != null) { return true; } } catch (AxisFault axisFault) { /* didn't throw this exception to check api available in all the environments therefore we didn't throw exception to avoid if gateway unreachable affect */ if (!APIConstants.CREATED.equals(api.getStatus())) { log.error("Error occurred when check api is published on gateway" + environment.getName(), axisFault); } } } return false; } /** * Get the endpoint Security type of the published API * * @param api - The API to be checked. * @param tenantDomain - Tenant Domain of the publisher * @return Endpoint security type; Basic or Digest */ public String getAPIEndpointSecurityType(API api, String tenantDomain) throws APIManagementException { for (Environment environment : environments.values()) { try { APIGatewayAdminClient client = new APIGatewayAdminClient(environment); APIIdentifier id = api.getId(); APIData apiData = client.getApi(tenantDomain, id); if (apiData != null) { ResourceData[] resourceData = apiData.getResources(); for (ResourceData resource : resourceData) { if (resource != null && resource.getInSeqXml() != null && resource.getInSeqXml().contains("DigestAuthMediator")) { return APIConstants.APIEndpointSecurityConstants.DIGEST_AUTH; } } } } catch (AxisFault axisFault) { // didn't throw this exception to check api available in all the environments // therefore we didn't throw exception to avoid if gateway unreachable affect if (!APIConstants.CREATED.equals(api.getStatus())) { log.error("Error occurred when check api endpoint security type on gateway" + environment.getName(), axisFault); } } } return APIConstants.APIEndpointSecurityConstants.BASIC_AUTH; } public void setProductResourceSequences(APIProviderImpl apiProvider, APIProduct apiProduct, String tenantDomain) throws APIManagementException { for (APIProductResource resource : apiProduct.getProductResources()) { APIIdentifier apiIdentifier = resource.getApiIdentifier(); API api = apiProvider.getAPI(apiIdentifier); for (String environmentName : api.getEnvironments()) { Environment environment = environments.get(environmentName); try { APIGatewayAdminClient client = new APIGatewayAdminClient(environment); String inSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; if (client.isExistingSequence(inSequenceKey, tenantDomain)) { resource.setInSequenceName(inSequenceKey); } String outSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; if (client.isExistingSequence(outSequenceKey, tenantDomain)) { resource.setOutSequenceName(outSequenceKey); } String faultSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_FAULT_EXT; if (client.isExistingSequence(faultSequenceKey, tenantDomain)) { resource.setFaultSequenceName(faultSequenceKey); } } catch (AxisFault axisFault) { throw new APIManagementException("Error occurred while checking if product resources " + "have custom sequences", axisFault); } } } } /** * To deploy client certificate in given API environment. * * @param client API GatewayAdminClient . * @param api Relevant API. * @param tenantDomain Tenant domain. * @throws CertificateManagementException Certificate Management Exception. * @throws AxisFault AxisFault. */ private void deployClientCertificates(APIGatewayAdminClient client, API api, String tenantDomain) throws CertificateManagementException, AxisFault { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); List<ClientCertificateDTO> clientCertificateDTOList = CertificateMgtDAO.getInstance() .getClientCertificates(tenantId, null, api.getId()); if (clientCertificateDTOList != null) { for (ClientCertificateDTO clientCertificateDTO : clientCertificateDTOList) { client.addClientCertificate(clientCertificateDTO.getCertificate(), clientCertificateDTO.getAlias() + "_" + tenantId); } } } /** * To update client certificate in relevant API gateway environment. * * @param client API Gateway admi client. * @param api Relevant API. * @param tenantDomain Tenant domain. */ private void updateClientCertificates(APIGatewayAdminClient client, API api, String tenantDomain) throws CertificateManagementException, AxisFault { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); List<String> aliasList = CertificateMgtDAO.getInstance() .getDeletedClientCertificateAlias(api.getId(), tenantId); for (String alias : aliasList) { client.deleteClientCertificate(alias + "_" + tenantId); } List<ClientCertificateDTO> clientCertificateDTOList = CertificateMgtDAO.getInstance() .getClientCertificates(tenantId, null, api.getId()); if (clientCertificateDTOList != null) { for (ClientCertificateDTO clientCertificateDTO : clientCertificateDTOList) { client.addClientCertificate(clientCertificateDTO.getCertificate(), clientCertificateDTO.getAlias() + "_" + tenantId); } } } /** * To update the database instance with the successfully removed client certificates from teh gateway. * * @param api Relevant API related with teh removed certificate. * @param tenantDomain Tenant domain of the API. */ private void updateRemovedClientCertificates(API api, String tenantDomain) { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } try { CertificateMgtDAO.getInstance().updateRemovedCertificatesFromGateways(api.getId(), APIUtil.getTenantIdFromTenantDomain(tenantDomain)); /* The flow does not need to be blocked, as this failure do not related with updating client certificates in gateway, rather updating in database. There is no harm in database having outdated certificate information.*/ } catch (CertificateManagementException e) { log.error("Certificate Management Exception while trying to update the remove certificate from gateways " + "for the api " + api.getId() + " for the tenant domain " + tenantDomain, e); } } /** * To undeploy the client certificates from the gateway environment. * * @param client APIGatewayAdmin Client. * @param api Relevant API particular certificate is related with. * @param tenantDomain Tenant domain of the API. * @throws CertificateManagementException Certificate Management Exception. * @throws AxisFault AxisFault. */ private void unDeployClientCertificates(APIGatewayAdminClient client, API api, String tenantDomain) throws CertificateManagementException, AxisFault { if (!CertificateManagerImpl.getInstance().isClientCertificateBasedAuthenticationConfigured()) { return; } int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); List<ClientCertificateDTO> clientCertificateDTOList = CertificateMgtDAO.getInstance() .getClientCertificates(tenantId, null, api.getId()); if (clientCertificateDTOList != null) { for (ClientCertificateDTO clientCertificateDTO : clientCertificateDTOList) { client.deleteClientCertificate(clientCertificateDTO.getAlias() + "_" + tenantId); } } List<String> aliasList = CertificateMgtDAO.getInstance() .getDeletedClientCertificateAlias(api.getId(), tenantId); for (String alias : aliasList) { client.deleteClientCertificate(alias + "_" + tenantId); } } /** * Get the specified in/out sequences from api object * * @param api -API object * @param tenantDomain * @param environment * @throws APIManagementException * @throws AxisFault */ private void deployCustomSequences(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) throws APIManagementException, AxisFault { if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence())) { try { PrivilegedCarbonContext.startTenantFlow(); if(tenantDomain != null && !"".equals(tenantDomain)){ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); if (APIUtil.isSequenceDefined(api.getInSequence())) { deployInSequence(client, api, tenantId, tenantDomain, environment); } if (APIUtil.isSequenceDefined(api.getOutSequence())) { deployOutSequence(client, api, tenantId, tenantDomain, environment); } } catch (Exception e) { String msg = "Error in deploying the sequence to gateway"; log.error(msg, e); throw new APIManagementException(msg); } finally { PrivilegedCarbonContext.endTenantFlow(); } } } private void deployInSequence(APIGatewayAdminClient sequenceAdminServiceClient, API api, int tenantId, String tenantDomain, Environment environment) throws APIManagementException, AxisFault { String inSequenceName = api.getInSequence(); OMElement inSequence = APIUtil.getCustomSequence(inSequenceName, tenantId, "in", api.getId()); if (inSequence != null) { String inSeqExt = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; if (inSequence.getAttribute(new QName("name")) != null) { inSequence.getAttribute(new QName("name")).setAttributeValue(inSeqExt); } sequenceAdminServiceClient.addSequence(inSequence, tenantDomain); } } private void deployOutSequence(APIGatewayAdminClient client, API api, int tenantId, String tenantDomain, Environment environment) throws APIManagementException, AxisFault { String outSequenceName = api.getOutSequence(); OMElement outSequence = APIUtil.getCustomSequence(outSequenceName, tenantId, "out", api.getId()); if (outSequence != null) { String outSeqExt = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; if (outSequence.getAttribute(new QName("name")) != null) { outSequence.getAttribute(new QName("name")).setAttributeValue(outSeqExt); } client.addSequence(outSequence, tenantDomain); } } /** * Undeploy the sequences deployed in synapse * * @param api * @param tenantDomain * @param environment * @throws APIManagementException */ private void undeployCustomSequences(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) { if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence())) { try { PrivilegedCarbonContext.startTenantFlow(); if(tenantDomain != null && !"".equals(tenantDomain)){ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else{ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } if (APIUtil.isSequenceDefined(api.getInSequence())) { String inSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; client.deleteSequence(inSequence, tenantDomain); } if (APIUtil.isSequenceDefined(api.getOutSequence())) { String outSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; client.deleteSequence(outSequence, tenantDomain); } if (APIUtil.isSequenceDefined(api.getFaultSequence())) { String faultSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_FAULT_EXT; if(client.isExistingSequence(faultSequence, tenantDomain)) { client.deleteSequence(faultSequence, tenantDomain); } } } catch (Exception e) { String msg = "Error in deleting the sequence from gateway"; log.error(msg, e); } finally { PrivilegedCarbonContext.endTenantFlow(); } } } /** * Update the custom sequences in gateway * @param api * @param tenantDomain * @param environment * @throws APIManagementException */ private void updateCustomSequences(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) throws APIManagementException { //If sequences have been added, updated or removed. if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence()) || APIUtil.isSequenceDefined(api.getOldInSequence()) || APIUtil.isSequenceDefined(api.getOldOutSequence())) { try { PrivilegedCarbonContext.startTenantFlow(); if(tenantDomain != null && !"".equals(tenantDomain)){ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else{ PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); //If an inSequence has been added, updated or removed. if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOldInSequence())) { String inSequenceKey = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_IN_EXT; //If sequence already exists if (client.isExistingSequence(inSequenceKey, tenantDomain)) { //Delete existing sequence client.deleteSequence(inSequenceKey, tenantDomain); } //If an inSequence has been added or updated. if(APIUtil.isSequenceDefined(api.getInSequence())){ //Deploy the inSequence deployInSequence(client, api, tenantId, tenantDomain, environment); } } //If an outSequence has been added, updated or removed. if (APIUtil.isSequenceDefined(api.getOutSequence()) || APIUtil.isSequenceDefined(api.getOldOutSequence())) { String outSequence = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_OUT_EXT; //If the outSequence exists. if (client.isExistingSequence(outSequence, tenantDomain)) { //Delete existing outSequence client.deleteSequence(outSequence, tenantDomain); } //If an outSequence has been added or updated. if (APIUtil.isSequenceDefined(api.getOutSequence())){ //Deploy outSequence deployOutSequence(client, api, tenantId, tenantDomain, environment); } } } catch (Exception e) { String msg = "Error in updating the sequence at the Gateway"; log.error(msg, e); throw new APIManagementException(msg, e); } finally { PrivilegedCarbonContext.endTenantFlow(); } } } private void deployAPIFaultSequence(APIGatewayAdminClient client, API api, String tenantDomain, Environment environment) throws APIManagementException { String faultSequenceName = api.getFaultSequence(); String faultSeqExt = APIUtil.getSequenceExtensionName(api) + APIConstants.API_CUSTOM_SEQ_FAULT_EXT; boolean isTenantFlowStarted = false; try { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } //If a fault sequence has be defined. if (APIUtil.isSequenceDefined(faultSequenceName)) { int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); //If the sequence already exists if (client.isExistingSequence(faultSeqExt, tenantDomain)) { //Delete the sequence. We need to redeploy afterwards since the sequence may have been updated. client.deleteSequence(faultSeqExt, tenantDomain); } //Get the fault sequence xml OMElement faultSequence = APIUtil.getCustomSequence(faultSequenceName, tenantId, APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT, api.getId()); if (faultSequence != null) { if (APIUtil.isPerAPISequence(faultSequenceName, tenantId, api.getId(), APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT)) { if (faultSequence.getAttribute(new QName("name")) != null) { faultSequence.getAttribute(new QName("name")).setAttributeValue(faultSeqExt); } } else { //If the previous sequence was a per API fault sequence delete it if (client.isExistingSequence(faultSequenceName, tenantDomain)) { client.deleteSequence(faultSequenceName, tenantDomain); } } //Deploy the fault sequence client.addSequence(faultSequence, tenantDomain); } } else { if (client.isExistingSequence(faultSeqExt, tenantDomain)) { client.deleteSequence(faultSeqExt, tenantDomain); } } } catch (AxisFault e) { String msg = "Error while updating the fault sequence at the Gateway"; log.error(msg, e); throw new APIManagementException(msg, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } /** * Store the secured endpoint username password to registry * @param api * @param tenantDomain * @param environment * @throws APIManagementException */ private void setSecureVaultProperty(APIGatewayAdminClient securityAdminClient, API api, String tenantDomain, Environment environment) throws APIManagementException { boolean isSecureVaultEnabled = Boolean.parseBoolean(ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService(). getAPIManagerConfiguration().getFirstProperty(APIConstants.API_SECUREVAULT_ENABLE)); if (api.isEndpointSecured() && isSecureVaultEnabled) { try { securityAdminClient.setSecureVaultProperty(api, tenantDomain); } catch (Exception e) { String msg = "Error in setting secured password."; log.error(msg + ' ' + e.getLocalizedMessage(), e); throw new APIManagementException(msg); } } } }
Fix imports
components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIGatewayManager.java
Fix imports
Java
apache-2.0
26961042b5e377732fc3598afe52ccd4d499be35
0
release-engineering/pom-manipulation-ext,rnc/pom-manipulation-ext
/* * Copyright (C) 2012 Red Hat, Inc. (jcasey@redhat.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.commonjava.maven.ext.manip.impl; import org.junit.Test; import java.util.HashSet; import java.util.Set; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertFalse; public class VersionTest { @Test public void testAppendQualifierSuffix() { assertThat( Version.appendQualifierSuffix( "1.0", "foo" ), equalTo( "1.0.foo") ); assertThat( Version.appendQualifierSuffix( "1.0.0.Beta1", "foo" ), equalTo( "1.0.0.Beta1-foo") ); assertThat( Version.appendQualifierSuffix( "1.0_Betafoo", "foo" ), equalTo( "1.0_Betafoo") ); assertThat( Version.appendQualifierSuffix( "1.0_fooBeta", "foo" ), equalTo( "1.0_fooBeta-foo") ); assertThat( Version.appendQualifierSuffix( "1.0", "-foo" ), equalTo( "1.0-foo") ); assertThat( Version.appendQualifierSuffix( "1.0.0.Beta1", "_foo" ), equalTo( "1.0.0.Beta1_foo") ); assertThat( Version.appendQualifierSuffix( "1.0_Betafoo", "-foo" ), equalTo( "1.0_Beta-foo") ); assertThat( Version.appendQualifierSuffix( "1.0_Beta.foo", "-foo" ), equalTo( "1.0_Beta-foo") ); assertThat( Version.appendQualifierSuffix( "jboss-1-GA", "jboss" ), equalTo( "jboss-1-GA-jboss") ); assertThat( Version.appendQualifierSuffix( "1.2", "jboss-1-SNAPSHOT" ), equalTo( "1.2.jboss-1-SNAPSHOT") ); assertThat( Version.appendQualifierSuffix( "1.2-SNAPSHOT", "jboss1" ), equalTo( "1.2.jboss1-SNAPSHOT") ); assertThat( Version.appendQualifierSuffix( "1.2-jboss-1", "jboss-2" ), equalTo( "1.2-jboss-2") ); assertThat( Version.appendQualifierSuffix( "1.2-jboss-1", ".jboss-2" ), equalTo( "1.2.jboss-2") ); assertThat( Version.appendQualifierSuffix( "1.1-SNAPSHOT", ".test_jdk7-SNAPSHOT" ), equalTo( "1.1.test_jdk7-SNAPSHOT") ); } @Test public void testAppendQualifierSuffix_WithProperty() { assertThat( Version.appendQualifierSuffix( "${project.version}", "foo" ), equalTo( "${project.version}-foo") ); assertThat( Version.appendQualifierSuffix( "1.0.${micro}", ".foo" ), equalTo( "1.0.${micro}.foo") ); assertThat( Version.setBuildNumber( "${project.version}", "10" ), equalTo( "${project.version}-10") ); assertThat( Version.setBuildNumber( "${project.version}-foo", "10" ), equalTo( "${project.version}-foo-10") ); } @Test public void testAppendQualifierSuffix_MulitpleTimes() { String version = "1.2.0"; version = Version.appendQualifierSuffix( version, "jboss-1" ); version = Version.appendQualifierSuffix( version, "foo" ); assertThat( version, equalTo( "1.2.0.jboss-1-foo" ) ); version = "1.2"; version = Version.appendQualifierSuffix( version, "jboss-1" ); version = Version.appendQualifierSuffix( version, "jboss-2" ); version = Version.getOsgiVersion( version ); assertThat( version, equalTo( "1.2.0.jboss-2" ) ); } @Test public void testFindHighestMatchingBuildNumber() { String version = "1.2.0.Final-foo"; final Set<String> versionSet = new HashSet<>(); versionSet.add("1.2.0.Final-foo-1"); versionSet.add("1.2.0.Final-foo-2"); assertThat(Version.findHighestMatchingBuildNumber(version, versionSet), equalTo(2)); version = "1.2.0.Final-foo10"; versionSet.clear(); versionSet.add("1.2.0.Final-foo-1"); versionSet.add("1.2.0.Final-foo-2"); assertThat(Version.findHighestMatchingBuildNumber(version, versionSet), equalTo(2)); version = Version.appendQualifierSuffix( "0.0.4", "redhat-0" ); versionSet.clear(); versionSet.add( "0.0.1" ); versionSet.add( "0.0.2" ); versionSet.add( "0.0.3" ); versionSet.add( "0.0.4" ); versionSet.add( "0.0.4.redhat-2" ); assertThat( Version.findHighestMatchingBuildNumber( version, versionSet ), equalTo( 2 ) ); version = "1.2-foo-1"; versionSet.clear(); versionSet.add( "1.2-foo-4" ); assertThat( Version.findHighestMatchingBuildNumber( version, versionSet ), equalTo( 4 ) ); } @Test public void testFindHighestMatchingBuildNumber_OSGi() { String version = "1.2.0.Final-foo"; final Set<String> versionSet = new HashSet<>(); versionSet.add( "1.2.0.Final-foo-10" ); versionSet.add( "1.2.0.Final-foo-2" ); assertThat( Version.findHighestMatchingBuildNumber( version, versionSet ), equalTo( 10 ) ); versionSet.clear(); } @Test public void testFindHighestMatchingBuildNumber_ZeroFill() { String majorOnlyVersion = "7"; String version = Version.appendQualifierSuffix( majorOnlyVersion, "redhat" ); final Set<String> versionSet = new HashSet<>(); versionSet.add( "7.0.0.redhat-2" ); assertThat( Version.findHighestMatchingBuildNumber( version, versionSet ), equalTo( 2 ) ); String majorMinorVersion = "7.1"; version = Version.appendQualifierSuffix( majorMinorVersion, "redhat" ); versionSet.clear(); versionSet.add( "7.1.0.redhat-4" ); assertThat( Version.findHighestMatchingBuildNumber( version, versionSet ), equalTo( 4 ) ); } @Test public void testGetBuildNumber() { assertThat( Version.getBuildNumber( "1.0-SNAPSHOT" ), equalTo( "" ) ); assertThat( Version.getBuildNumber( "1.0.0.Beta1" ), equalTo( "1" ) ); assertThat( Version.getBuildNumber( "1.0.beta.1-2" ), equalTo( "2" ) ); assertThat( Version.getBuildNumber( "Beta3" ), equalTo( "3" ) ); assertThat( Version.getBuildNumber( "1.x.2.beta4t" ), equalTo( "" ) ); assertThat( Version.getBuildNumber( "1.0.0.Beta11-SNAPSHOT" ), equalTo( "11" ) ); assertThat( Version.getBuildNumber( "1.0.0.Beta1-SNAPSHOT-10" ), equalTo( "10" ) ); } @Test public void testGetMMM() { assertThat( Version.getMMM( "1.0-SNAPSHOT" ), equalTo( "1.0" ) ); assertThat( Version.getMMM( "1.0.0.Beta1" ), equalTo( "1.0.0" ) ); assertThat( Version.getMMM( "1.0.beta.1" ), equalTo( "1.0" ) ); assertThat( Version.getMMM( "Beta1" ), equalTo( "" ) ); assertThat( Version.getMMM( "1.x.2.beta1" ), equalTo( "1" ) ); } @Test public void testGetOsgiMMM() { assertThat( Version.getOsgiMMM( "1.0", false ), equalTo( "1.0" ) ); assertThat( Version.getOsgiMMM( "1.0", true ), equalTo( "1.0.0" ) ); assertThat( Version.getOsgiMMM( "13_2-43", false ), equalTo( "13.2.43" ) ); assertThat( Version.getOsgiMMM( "1", false ), equalTo( "1" ) ); assertThat( Version.getOsgiMMM( "2", true ), equalTo( "2.0.0" ) ); assertThat( Version.getOsgiMMM( "beta1", false ), equalTo( "" ) ); assertThat( Version.getOsgiMMM( "GA-1-GA-foo", true ), equalTo( "" ) ); } @Test public void testGetOsgiVersion() { assertThat( Version.getOsgiVersion( "1.0" ), equalTo( "1.0" ) ); assertThat( Version.getOsgiVersion( "1_2_3" ), equalTo( "1.2.3" ) ); assertThat( Version.getOsgiVersion( "1-2.3beta4" ), equalTo( "1.2.3.beta4" ) ); assertThat( Version.getOsgiVersion( "1.2.3.4.beta" ), equalTo( "1.2.3.4-beta" ) ); assertThat( Version.getOsgiVersion( "12.4-beta" ), equalTo( "12.4.0.beta" ) ); assertThat( Version.getOsgiVersion( "-beta1" ), equalTo( "-beta1" ) ); assertThat( Version.getOsgiVersion( "12.beta1_3-5.hello" ), equalTo( "12.0.0.beta1_3-5-hello" ) ); } @Test public void testGetQualifier() { assertThat( Version.getQualifier( "1.0-SNAPSHOT" ), equalTo( "SNAPSHOT" ) ); assertThat( Version.getQualifierWithDelim( "1.0-SNAPSHOT" ), equalTo( "-SNAPSHOT" ) ); assertThat( Version.getQualifier( "1.0.0.Beta1" ), equalTo( "Beta1" ) ); assertThat( Version.getQualifierWithDelim( "1.0.0.Beta1" ), equalTo( ".Beta1" ) ); assertThat( Version.getQualifier( "1.0.beta.1" ), equalTo( "beta.1" ) ); assertThat( Version.getQualifierWithDelim( "1.0.beta.1" ), equalTo( ".beta.1" ) ); assertThat( Version.getQualifier( "Beta1" ), equalTo( "Beta1" ) ); assertThat( Version.getQualifierWithDelim( "Beta1" ), equalTo( "Beta1" ) ); assertThat( Version.getQualifier( "1.x.2.beta1" ), equalTo( "x.2.beta1" ) ); assertThat( Version.getQualifierWithDelim( "1.x.2.beta1" ), equalTo( ".x.2.beta1" ) ); assertThat( Version.getQualifier( "1.2" ), equalTo( "" ) ); assertThat( Version.getQualifierWithDelim( "1.2" ), equalTo( "" ) ); assertThat( Version.getQualifier( "1.5-3_beta-SNAPSHOT-1" ), equalTo( "beta-SNAPSHOT-1" ) ); assertThat( Version.getQualifierWithDelim( "1.5-3_beta-SNAPSHOT-1" ), equalTo( "_beta-SNAPSHOT-1" ) ); assertThat( Version.getQualifier( "_beta-SNAPSHOT-1" ), equalTo( "beta-SNAPSHOT-1" ) ); assertThat( Version.getQualifierWithDelim( "_beta-SNAPSHOT-1" ), equalTo( "_beta-SNAPSHOT-1" ) ); } @Test public void testGetQualifierBase() { assertThat(Version.getQualifierBase("1.0-SNAPSHOT"), equalTo("")); assertThat(Version.getQualifierBase("1.0.0.Beta1"), equalTo("Beta")); assertThat(Version.getQualifierBase("1.0.0.jboss-test-SNAPSHOT"), equalTo("jboss-test")); assertThat(Version.getQualifierBase("${project.version}-test-1"), equalTo("${project.version}-test")); } @Test public void testGetSnapshot() { assertThat( Version.getSnapshot( "1.0-SNAPSHOT" ), equalTo( "SNAPSHOT" ) ); assertThat( Version.getSnapshotWithDelim( "1.0-SNAPSHOT" ), equalTo( "-SNAPSHOT" ) ); assertThat( Version.getSnapshot( "1.0.0.SNAPSHOT" ), equalTo( "SNAPSHOT" ) ); assertThat( Version.getSnapshotWithDelim( "1.0.0.SNAPSHOT" ), equalTo( ".SNAPSHOT" ) ); assertThat( Version.getSnapshot( "1.0.0.Beta1-snapshot" ), equalTo( "snapshot" ) ); assertThat( Version.getSnapshotWithDelim( "1.0.0.Beta1-snapshot" ), equalTo( "-snapshot" ) ); assertThat( Version.getSnapshot( "1_snaPsHot" ), equalTo( "snaPsHot" ) ); assertThat( Version.getSnapshotWithDelim( "1_snaPsHot" ), equalTo( "_snaPsHot" ) ); assertThat( Version.getSnapshot( "1.0" ), equalTo( "" ) ); assertThat( Version.getSnapshotWithDelim( "1.0" ), equalTo( "" ) ); assertThat( Version.getSnapshot( "1.0-foo" ), equalTo( "" ) ); assertThat( Version.getSnapshotWithDelim( "1.0-foo" ), equalTo( "" ) ); } @Test public void testIsSnapshot() { assertTrue( Version.isSnapshot( "1.0-SNAPSHOT" ) ); assertTrue( Version.isSnapshot( "1.0-snapshot" ) ); assertTrue( Version.isSnapshot( "1.0.SnapsHot" ) ); assertTrue( Version.isSnapshot( "1.0.0snapshot" ) ); assertTrue( Version.isSnapshot( "snapshot" ) ); assertFalse( Version.isSnapshot( "1" ) ); assertFalse( Version.isSnapshot( "1.0-snapsho" ) ); assertFalse( Version.isSnapshot( "1.0.beta1-" ) ); } @Test public void testIsEmpty() throws Exception { assertTrue( Version.isEmpty(null) ); assertTrue( Version.isEmpty("") ); assertTrue( Version.isEmpty(" \n") ); assertFalse( Version.isEmpty( "a") ); assertFalse( Version.isEmpty( " a \n") ); } @Test public void testRemoveSnapshot() { assertThat( Version.removeSnapshot( "1.0-SNAPSHOT" ), equalTo( "1.0" ) ); assertThat( Version.removeSnapshot( "1.0.0.Beta1_snapshot" ), equalTo( "1.0.0.Beta1" ) ); assertThat( Version.removeSnapshot( "1.snaPsHot" ), equalTo( "1" ) ); assertThat( Version.removeSnapshot( "SNAPSHOT" ), equalTo( "" ) ); assertThat( Version.removeSnapshot( "1.0.snapshot.beta1" ), equalTo( "1.0.snapshot.beta1" ) ); } @Test public void testSetBuildNumber() { assertThat( Version.setBuildNumber( "1.0.beta1", "2" ), equalTo( "1.0.beta2") ); assertThat( Version.setBuildNumber( "1.0_2-Beta1-SNAPSHOT", "41" ), equalTo( "1.0_2-Beta41-SNAPSHOT") ); assertThat( Version.setBuildNumber( "1.0.2.1", "3" ), equalTo( "1.0.2.3") ); assertThat( Version.setBuildNumber( "1.0.2", "3" ), equalTo( "1.0.2.3") ); assertThat( Version.setBuildNumber( "1.0", "2" ), equalTo( "1.0.2") ); assertThat( Version.setBuildNumber( "1.0-alpha", "001" ), equalTo( "1.0-alpha-001") ); } @Test public void testSetSnapshot() { assertThat( Version.setSnapshot( "1.0-SNAPSHOT", true ), equalTo( "1.0-SNAPSHOT" ) ); assertThat( Version.setSnapshot( "1.0-SNAPSHOT", false ), equalTo( "1.0" ) ); assertThat( Version.setSnapshot( "1.1", true ), equalTo( "1.1-SNAPSHOT" ) ); assertThat( Version.setSnapshot( "1.1", false ), equalTo( "1.1" ) ); assertThat( Version.setSnapshot( "1.2.jboss-1", true ), equalTo( "1.2.jboss-1-SNAPSHOT" ) ); assertThat( Version.setSnapshot( "1.2.jboss-1", false ), equalTo( "1.2.jboss-1" ) ); assertThat( Version.setSnapshot( "1.0.0.Beta1_snapshot", true ), equalTo( "1.0.0.Beta1_snapshot" ) ); assertThat( Version.setSnapshot( "1.0.0.Beta1_snapshot", false ), equalTo( "1.0.0.Beta1" ) ); assertThat( Version.setSnapshot( "1.snaPsHot", true ), equalTo( "1.snaPsHot" ) ); assertThat( Version.setSnapshot( "1.snaPsHot", false ), equalTo( "1" ) ); assertThat( Version.setSnapshot( "SNAPSHOT", true ), equalTo( "SNAPSHOT" ) ); assertThat( Version.setSnapshot( "SNAPSHOT", false ), equalTo( "" ) ); assertThat( Version.setSnapshot( "1.0.snapshot.beta1", true ), equalTo( "1.0.snapshot.beta1-SNAPSHOT" ) ); assertThat( Version.setSnapshot( "1.0.snapshot.beta1", false ), equalTo( "1.0.snapshot.beta1" ) ); } @Test public void testRemoveLeadingDelimiters() { assertThat( Version.removeLeadingDelimiter( ".1.2" ), equalTo( "1.2" ) ); assertThat( Version.removeLeadingDelimiter( "_Beta1" ), equalTo( "Beta1" ) ); assertThat( Version.removeLeadingDelimiter( "1.0-SNAPSHOT" ), equalTo( "1.0-SNAPSHOT" ) ); assertThat( Version.removeLeadingDelimiter( "1.0_foo-" ), equalTo( "1.0_foo-" ) ); } @Test public void testValidOsgi() throws Exception { assertTrue( Version.isValidOSGi("1") ); assertTrue( Version.isValidOSGi("1.2") ); assertTrue( Version.isValidOSGi("1.2.3") ); assertTrue( Version.isValidOSGi("1.2.3.beta1") ); assertTrue( Version.isValidOSGi("1.2.3.beta_1") ); assertTrue( Version.isValidOSGi("1.2.3.beta-1") ); assertTrue( Version.isValidOSGi("0.0.1") ); assertFalse( Version.isValidOSGi("1.2.3.beta|1") ); assertFalse( Version.isValidOSGi("1.2.3.beta^1") ); assertFalse( Version.isValidOSGi("1.2.3.beta.1") ); assertFalse( Version.isValidOSGi("1.2.beta1") ); assertFalse( Version.isValidOSGi("1beta") ); assertFalse( Version.isValidOSGi("beta1") ); } }
core/src/test/java/org/commonjava/maven/ext/manip/impl/VersionTest.java
/* * Copyright (C) 2012 Red Hat, Inc. (jcasey@redhat.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.commonjava.maven.ext.manip.impl; import org.junit.Test; import java.util.HashSet; import java.util.Set; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertFalse; public class VersionTest { @Test public void testAppendQualifierSuffix() { assertThat( Version.appendQualifierSuffix( "1.0", "foo" ), equalTo( "1.0.foo") ); assertThat( Version.appendQualifierSuffix( "1.0.0.Beta1", "foo" ), equalTo( "1.0.0.Beta1-foo") ); assertThat( Version.appendQualifierSuffix( "1.0_Betafoo", "foo" ), equalTo( "1.0_Betafoo") ); assertThat( Version.appendQualifierSuffix( "1.0_fooBeta", "foo" ), equalTo( "1.0_fooBeta-foo") ); assertThat( Version.appendQualifierSuffix( "1.0", "-foo" ), equalTo( "1.0-foo") ); assertThat( Version.appendQualifierSuffix( "1.0.0.Beta1", "_foo" ), equalTo( "1.0.0.Beta1_foo") ); assertThat( Version.appendQualifierSuffix( "1.0_Betafoo", "-foo" ), equalTo( "1.0_Beta-foo") ); assertThat( Version.appendQualifierSuffix( "1.0_Beta.foo", "-foo" ), equalTo( "1.0_Beta-foo") ); assertThat( Version.appendQualifierSuffix( "jboss-1-GA", "jboss" ), equalTo( "jboss-1-GA-jboss") ); assertThat( Version.appendQualifierSuffix( "1.2", "jboss-1-SNAPSHOT" ), equalTo( "1.2.jboss-1-SNAPSHOT") ); assertThat( Version.appendQualifierSuffix( "1.2-SNAPSHOT", "jboss1" ), equalTo( "1.2.jboss1-SNAPSHOT") ); assertThat( Version.appendQualifierSuffix( "1.2-jboss-1", "jboss-2" ), equalTo( "1.2-jboss-2") ); assertThat( Version.appendQualifierSuffix( "1.2-jboss-1", ".jboss-2" ), equalTo( "1.2.jboss-2") ); assertThat( Version.appendQualifierSuffix( "1.1-SNAPSHOT", ".test_jdk7-SNAPSHOT" ), equalTo( "1.1.test_jdk7-SNAPSHOT") ); } @Test public void testAppendQualifierSuffix_MulitpleTimes() { String version = "1.2.0"; version = Version.appendQualifierSuffix( version, "jboss-1" ); version = Version.appendQualifierSuffix( version, "foo" ); assertThat( version, equalTo( "1.2.0.jboss-1-foo" ) ); version = "1.2"; version = Version.appendQualifierSuffix( version, "jboss-1" ); version = Version.appendQualifierSuffix( version, "jboss-2" ); version = Version.getOsgiVersion( version ); assertThat( version, equalTo( "1.2.0.jboss-2" ) ); } @Test public void testFindHighestMatchingBuildNumber() { String version = "1.2.0.Final-foo"; final Set<String> versionSet = new HashSet<>(); versionSet.add("1.2.0.Final-foo-1"); versionSet.add("1.2.0.Final-foo-2"); assertThat(Version.findHighestMatchingBuildNumber(version, versionSet), equalTo(2)); version = "1.2.0.Final-foo10"; versionSet.clear(); versionSet.add("1.2.0.Final-foo-1"); versionSet.add("1.2.0.Final-foo-2"); assertThat(Version.findHighestMatchingBuildNumber(version, versionSet), equalTo(2)); version = Version.appendQualifierSuffix( "0.0.4", "redhat-0" ); versionSet.clear(); versionSet.add( "0.0.1" ); versionSet.add( "0.0.2" ); versionSet.add( "0.0.3" ); versionSet.add( "0.0.4" ); versionSet.add( "0.0.4.redhat-2" ); assertThat( Version.findHighestMatchingBuildNumber( version, versionSet ), equalTo( 2 ) ); version = "1.2-foo-1"; versionSet.clear(); versionSet.add( "1.2-foo-4" ); assertThat( Version.findHighestMatchingBuildNumber( version, versionSet ), equalTo( 4 ) ); } @Test public void testFindHighestMatchingBuildNumber_OSGi() { String version = "1.2.0.Final-foo"; final Set<String> versionSet = new HashSet<>(); versionSet.add( "1.2.0.Final-foo-10" ); versionSet.add( "1.2.0.Final-foo-2" ); assertThat( Version.findHighestMatchingBuildNumber( version, versionSet ), equalTo( 10 ) ); versionSet.clear(); } @Test public void testFindHighestMatchingBuildNumber_ZeroFill() { String majorOnlyVersion = "7"; String version = Version.appendQualifierSuffix( majorOnlyVersion, "redhat" ); final Set<String> versionSet = new HashSet<>(); versionSet.add( "7.0.0.redhat-2" ); assertThat( Version.findHighestMatchingBuildNumber( version, versionSet ), equalTo( 2 ) ); String majorMinorVersion = "7.1"; version = Version.appendQualifierSuffix( majorMinorVersion, "redhat" ); versionSet.clear(); versionSet.add( "7.1.0.redhat-4" ); assertThat( Version.findHighestMatchingBuildNumber( version, versionSet ), equalTo( 4 ) ); } @Test public void testGetBuildNumber() { assertThat( Version.getBuildNumber( "1.0-SNAPSHOT" ), equalTo( "" ) ); assertThat( Version.getBuildNumber( "1.0.0.Beta1" ), equalTo( "1" ) ); assertThat( Version.getBuildNumber( "1.0.beta.1-2" ), equalTo( "2" ) ); assertThat( Version.getBuildNumber( "Beta3" ), equalTo( "3" ) ); assertThat( Version.getBuildNumber( "1.x.2.beta4t" ), equalTo( "" ) ); assertThat( Version.getBuildNumber( "1.0.0.Beta11-SNAPSHOT" ), equalTo( "11" ) ); assertThat( Version.getBuildNumber( "1.0.0.Beta1-SNAPSHOT-10" ), equalTo( "10" ) ); } @Test public void testGetMMM() { assertThat( Version.getMMM( "1.0-SNAPSHOT" ), equalTo( "1.0" ) ); assertThat( Version.getMMM( "1.0.0.Beta1" ), equalTo( "1.0.0" ) ); assertThat( Version.getMMM( "1.0.beta.1" ), equalTo( "1.0" ) ); assertThat( Version.getMMM( "Beta1" ), equalTo( "" ) ); assertThat( Version.getMMM( "1.x.2.beta1" ), equalTo( "1" ) ); } @Test public void testGetOsgiMMM() { assertThat( Version.getOsgiMMM( "1.0", false ), equalTo( "1.0" ) ); assertThat( Version.getOsgiMMM( "1.0", true ), equalTo( "1.0.0" ) ); assertThat( Version.getOsgiMMM( "13_2-43", false ), equalTo( "13.2.43" ) ); assertThat( Version.getOsgiMMM( "1", false ), equalTo( "1" ) ); assertThat( Version.getOsgiMMM( "2", true ), equalTo( "2.0.0" ) ); assertThat( Version.getOsgiMMM( "beta1", false ), equalTo( "" ) ); assertThat( Version.getOsgiMMM( "GA-1-GA-foo", true ), equalTo( "" ) ); } @Test public void testGetOsgiVersion() { assertThat( Version.getOsgiVersion( "1.0" ), equalTo( "1.0" ) ); assertThat( Version.getOsgiVersion( "1_2_3" ), equalTo( "1.2.3" ) ); assertThat( Version.getOsgiVersion( "1-2.3beta4" ), equalTo( "1.2.3.beta4" ) ); assertThat( Version.getOsgiVersion( "1.2.3.4.beta" ), equalTo( "1.2.3.4-beta" ) ); assertThat( Version.getOsgiVersion( "12.4-beta" ), equalTo( "12.4.0.beta" ) ); assertThat( Version.getOsgiVersion( "-beta1" ), equalTo( "-beta1" ) ); assertThat( Version.getOsgiVersion( "12.beta1_3-5.hello" ), equalTo( "12.0.0.beta1_3-5-hello" ) ); } @Test public void testGetQualifier() { assertThat( Version.getQualifier( "1.0-SNAPSHOT" ), equalTo( "SNAPSHOT" ) ); assertThat( Version.getQualifierWithDelim( "1.0-SNAPSHOT" ), equalTo( "-SNAPSHOT" ) ); assertThat( Version.getQualifier( "1.0.0.Beta1" ), equalTo( "Beta1" ) ); assertThat( Version.getQualifierWithDelim( "1.0.0.Beta1" ), equalTo( ".Beta1" ) ); assertThat( Version.getQualifier( "1.0.beta.1" ), equalTo( "beta.1" ) ); assertThat( Version.getQualifierWithDelim( "1.0.beta.1" ), equalTo( ".beta.1" ) ); assertThat( Version.getQualifier( "Beta1" ), equalTo( "Beta1" ) ); assertThat( Version.getQualifierWithDelim( "Beta1" ), equalTo( "Beta1" ) ); assertThat( Version.getQualifier( "1.x.2.beta1" ), equalTo( "x.2.beta1" ) ); assertThat( Version.getQualifierWithDelim( "1.x.2.beta1" ), equalTo( ".x.2.beta1" ) ); assertThat( Version.getQualifier( "1.2" ), equalTo( "" ) ); assertThat( Version.getQualifierWithDelim( "1.2" ), equalTo( "" ) ); assertThat( Version.getQualifier( "1.5-3_beta-SNAPSHOT-1" ), equalTo( "beta-SNAPSHOT-1" ) ); assertThat( Version.getQualifierWithDelim( "1.5-3_beta-SNAPSHOT-1" ), equalTo( "_beta-SNAPSHOT-1" ) ); assertThat( Version.getQualifier( "_beta-SNAPSHOT-1" ), equalTo( "beta-SNAPSHOT-1" ) ); assertThat( Version.getQualifierWithDelim( "_beta-SNAPSHOT-1" ), equalTo( "_beta-SNAPSHOT-1" ) ); } @Test public void testGetSnapshot() { assertThat( Version.getSnapshot( "1.0-SNAPSHOT" ), equalTo( "SNAPSHOT" ) ); assertThat( Version.getSnapshotWithDelim( "1.0-SNAPSHOT" ), equalTo( "-SNAPSHOT" ) ); assertThat( Version.getSnapshot( "1.0.0.SNAPSHOT" ), equalTo( "SNAPSHOT" ) ); assertThat( Version.getSnapshotWithDelim( "1.0.0.SNAPSHOT" ), equalTo( ".SNAPSHOT" ) ); assertThat( Version.getSnapshot( "1.0.0.Beta1-snapshot" ), equalTo( "snapshot" ) ); assertThat( Version.getSnapshotWithDelim( "1.0.0.Beta1-snapshot" ), equalTo( "-snapshot" ) ); assertThat( Version.getSnapshot( "1_snaPsHot" ), equalTo( "snaPsHot" ) ); assertThat( Version.getSnapshotWithDelim( "1_snaPsHot" ), equalTo( "_snaPsHot" ) ); assertThat( Version.getSnapshot( "1.0" ), equalTo( "" ) ); assertThat( Version.getSnapshotWithDelim( "1.0" ), equalTo( "" ) ); assertThat( Version.getSnapshot( "1.0-foo" ), equalTo( "" ) ); assertThat( Version.getSnapshotWithDelim( "1.0-foo" ), equalTo( "" ) ); } @Test public void testIsSnapshot() { assertTrue( Version.isSnapshot( "1.0-SNAPSHOT" ) ); assertTrue( Version.isSnapshot( "1.0-snapshot" ) ); assertTrue( Version.isSnapshot( "1.0.SnapsHot" ) ); assertTrue( Version.isSnapshot( "1.0.0snapshot" ) ); assertTrue( Version.isSnapshot( "snapshot" ) ); assertFalse( Version.isSnapshot( "1" ) ); assertFalse( Version.isSnapshot( "1.0-snapsho" ) ); assertFalse( Version.isSnapshot( "1.0.beta1-" ) ); } @Test public void testIsEmpty() throws Exception { assertTrue( Version.isEmpty(null) ); assertTrue( Version.isEmpty("") ); assertTrue( Version.isEmpty(" \n") ); assertFalse( Version.isEmpty( "a") ); assertFalse( Version.isEmpty( " a \n") ); } @Test public void testRemoveSnapshot() { assertThat( Version.removeSnapshot( "1.0-SNAPSHOT" ), equalTo( "1.0" ) ); assertThat( Version.removeSnapshot( "1.0.0.Beta1_snapshot" ), equalTo( "1.0.0.Beta1" ) ); assertThat( Version.removeSnapshot( "1.snaPsHot" ), equalTo( "1" ) ); assertThat( Version.removeSnapshot( "SNAPSHOT" ), equalTo( "" ) ); assertThat( Version.removeSnapshot( "1.0.snapshot.beta1" ), equalTo( "1.0.snapshot.beta1" ) ); } @Test public void testSetBuildNumber() { assertThat( Version.setBuildNumber( "1.0.beta1", "2" ), equalTo( "1.0.beta2") ); assertThat( Version.setBuildNumber( "1.0_2-Beta1-SNAPSHOT", "41" ), equalTo( "1.0_2-Beta41-SNAPSHOT") ); assertThat( Version.setBuildNumber( "1.0.2.1", "3" ), equalTo( "1.0.2.3") ); assertThat( Version.setBuildNumber( "1.0.2", "3" ), equalTo( "1.0.2.3") ); assertThat( Version.setBuildNumber( "1.0", "2" ), equalTo( "1.0.2") ); assertThat( Version.setBuildNumber( "1.0-alpha", "001" ), equalTo( "1.0-alpha-001") ); } @Test public void testSetSnapshot() { assertThat( Version.setSnapshot( "1.0-SNAPSHOT", true ), equalTo( "1.0-SNAPSHOT" ) ); assertThat( Version.setSnapshot( "1.0-SNAPSHOT", false ), equalTo( "1.0" ) ); assertThat( Version.setSnapshot( "1.0.0.Beta1_snapshot", true ), equalTo( "1.0.0.Beta1_snapshot" ) ); assertThat( Version.setSnapshot( "1.0.0.Beta1_snapshot", false ), equalTo( "1.0.0.Beta1" ) ); assertThat( Version.setSnapshot( "1.snaPsHot", true ), equalTo( "1.snaPsHot" ) ); assertThat( Version.setSnapshot( "1.snaPsHot", false ), equalTo( "1" ) ); assertThat( Version.setSnapshot( "SNAPSHOT", true ), equalTo( "SNAPSHOT" ) ); assertThat( Version.setSnapshot( "SNAPSHOT", false ), equalTo( "" ) ); assertThat( Version.setSnapshot( "1.0.snapshot.beta1", true ), equalTo( "1.0.snapshot.beta1-SNAPSHOT" ) ); assertThat( Version.setSnapshot( "1.0.snapshot.beta1", false ), equalTo( "1.0.snapshot.beta1" ) ); } @Test public void testStripLeadingDelimiters() { assertThat( Version.removeLeadingDelimiter( ".1.2" ), equalTo( "1.2" ) ); assertThat( Version.removeLeadingDelimiter( "_Beta1" ), equalTo( "Beta1" ) ); assertThat( Version.removeLeadingDelimiter( "1.0-SNAPSHOT" ), equalTo( "1.0-SNAPSHOT" ) ); assertThat( Version.removeLeadingDelimiter( "1.0_foo-" ), equalTo( "1.0_foo-" ) ); } @Test public void testValidOsgi() throws Exception { assertTrue( Version.isValidOSGi("1") ); assertTrue( Version.isValidOSGi("1.2") ); assertTrue( Version.isValidOSGi("1.2.3") ); assertTrue( Version.isValidOSGi("1.2.3.beta1") ); assertTrue( Version.isValidOSGi("1.2.3.beta_1") ); assertTrue( Version.isValidOSGi("1.2.3.beta-1") ); assertTrue( Version.isValidOSGi("0.0.1") ); assertFalse( Version.isValidOSGi("1.2.3.beta|1") ); assertFalse( Version.isValidOSGi("1.2.3.beta^1") ); assertFalse( Version.isValidOSGi("1.2.3.beta.1") ); assertFalse( Version.isValidOSGi("1.2.beta1") ); assertFalse( Version.isValidOSGi("1beta") ); assertFalse( Version.isValidOSGi("beta1") ); } }
Add some version parsing tests for property handling
core/src/test/java/org/commonjava/maven/ext/manip/impl/VersionTest.java
Add some version parsing tests for property handling
Java
apache-2.0
4887c06f71f684f3238c3cacf5b2a20b97760a98
0
mpouttuclarke/cdap,caskdata/cdap,anthcp/cdap,anthcp/cdap,hsaputra/cdap,anthcp/cdap,chtyim/cdap,caskdata/cdap,chtyim/cdap,mpouttuclarke/cdap,mpouttuclarke/cdap,caskdata/cdap,anthcp/cdap,caskdata/cdap,caskdata/cdap,hsaputra/cdap,mpouttuclarke/cdap,chtyim/cdap,anthcp/cdap,hsaputra/cdap,mpouttuclarke/cdap,hsaputra/cdap,chtyim/cdap,hsaputra/cdap,chtyim/cdap,chtyim/cdap,caskdata/cdap
/* * Copyright © 2014 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package co.cask.cdap; import co.cask.cdap.common.conf.CConfiguration; import co.cask.cdap.common.conf.Configuration; import co.cask.cdap.common.conf.ConfigurationJsonTool; import co.cask.cdap.common.conf.SConfiguration; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import com.google.common.io.Files; import com.google.common.util.concurrent.AbstractExecutionThreadService; import com.google.inject.Inject; import com.google.inject.name.Named; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedReader; import java.io.File; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Writer; import java.util.concurrent.Executor; /** * WebCloudAppService is a basic Server wrapper that launches node.js and our * webapp main.js file. It then basically sits there waiting, doing nothing. * * All output is sent to our Logging service. */ final class WebCloudAppService extends AbstractExecutionThreadService { private static final String JSON_PATH = "cdap-config.json"; private static final String JSON_SECURITY_PATH = "cdap-security-config.json"; private static final Logger LOG = LoggerFactory.getLogger(WebCloudAppService.class); private static final String NODE_JS_EXECUTABLE = "node"; private static final File WEB_APP_BASE; static final String WEB_APP; static { // Determine what's the path to the main.js, based on what's on the directory // When run from IDE, the base is "cdap-web-app". When run from SDK, it's "web-app" File base = new File("web-app"); if (!base.isDirectory()) { base = new File("cdap-web-app"); } if (!base.isDirectory()) { // It's ok as the path might get pass from StandaloneMain LOG.warn("Unable to determine web-app directory"); } WEB_APP_BASE = base; WEB_APP = new File(new File(new File(base, "server"), "local"), "main.js").getAbsolutePath(); } private final File webAppBase; private final File webAppPath; private final CConfiguration cConf; private final SConfiguration sConf; private Process process; private BufferedReader bufferedReader; @Inject public WebCloudAppService(@Named("web-app-path")String webAppPath, CConfiguration cConf, SConfiguration sConf) { this.webAppPath = new File(webAppPath); // This is ok since this class is only used in standalone, the path is always [base]/server/local/main.js // However, this could change if the layer of web-app changed, which require adjustment to this class anyway this.webAppBase = this.webAppPath.getParentFile().getParentFile().getParentFile(); this.cConf = cConf; this.sConf = sConf; } /** * Start the service. */ @Override protected void startUp() throws Exception { generateConfigFile(new File(webAppBase, JSON_PATH), cConf); generateConfigFile(new File(webAppBase, JSON_SECURITY_PATH), sConf); ProcessBuilder builder = new ProcessBuilder(NODE_JS_EXECUTABLE, webAppPath.getAbsolutePath()); builder.redirectErrorStream(true); LOG.info("Starting Web Cloud App ... (" + webAppPath + ")"); process = builder.start(); final InputStream is = process.getInputStream(); final InputStreamReader isr = new InputStreamReader(is); bufferedReader = new BufferedReader(isr); } private void generateConfigFile(File path, Configuration config) throws Exception { Writer configWriter = Files.newWriter(path, Charsets.UTF_8); try { ConfigurationJsonTool.exportToJson(config, configWriter); } finally { configWriter.close(); } } /** * Processes the output of the command. */ @Override protected void run() throws Exception { LOG.info("Web Cloud App running ..."); try { String line; while ((line = bufferedReader.readLine()) != null) { LOG.trace(line); } } catch (Exception e) { LOG.error(e.getMessage()); } } /** * Returns the {@link Executor} that will be used to run this service. */ @Override protected Executor executor() { return new Executor() { @Override public void execute(Runnable command) { Thread thread = new Thread(command, getServiceName()); thread.setDaemon(true); thread.start(); } }; } /** * Invoked to request the service to stop. * <p/> * <p>By default this method does nothing. */ @Override protected void triggerShutdown() { process.destroy(); } /** * Stop the service. */ @Override protected void shutDown() throws Exception { LOG.info("Shutting down Web Cloud App ..."); process.waitFor(); // Cleanup generated files new File(webAppBase, JSON_PATH).delete(); new File(webAppBase, JSON_SECURITY_PATH).delete(); } }
cdap-standalone/src/main/java/co/cask/cdap/WebCloudAppService.java
/* * Copyright © 2014 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package co.cask.cdap; import co.cask.cdap.common.conf.CConfiguration; import co.cask.cdap.common.conf.Configuration; import co.cask.cdap.common.conf.ConfigurationJsonTool; import co.cask.cdap.common.conf.SConfiguration; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import com.google.common.io.Files; import com.google.common.util.concurrent.AbstractExecutionThreadService; import com.google.inject.Inject; import com.google.inject.name.Named; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedReader; import java.io.File; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Writer; import java.util.concurrent.Executor; /** * WebCloudAppService is a basic Server wrapper that launches node.js and our * webapp main.js file. It then basically sits there waiting, doing nothing. * * All output is sent to our Logging service. */ final class WebCloudAppService extends AbstractExecutionThreadService { private static final String JSON_PATH = "cdap-config.json"; private static final String JSON_SECURITY_PATH = "cdap-security-config.json"; private static final Logger LOG = LoggerFactory.getLogger(WebCloudAppService.class); private static final String NODE_JS_EXECUTABLE = "node"; private static final File WEB_APP_BASE; static final String WEB_APP; static { // Determine what's the path to the main.js, based on what's on the directory // When run from IDE, the base is "cdap-web-app". When run from SDK, it's "web-app" File base = new File("web-app"); if (!base.isDirectory()) { base = new File("cdap-web-app"); } Preconditions.checkState(base.isDirectory(), "Unable to determine web-app directory"); WEB_APP_BASE = base; WEB_APP = new File(new File(new File(base, "server"), "local"), "main.js").getAbsolutePath(); } private final File webAppPath; private final CConfiguration cConf; private final SConfiguration sConf; private Process process; private BufferedReader bufferedReader; @Inject public WebCloudAppService(@Named("web-app-path")String webAppPath, CConfiguration cConf, SConfiguration sConf) { this.webAppPath = new File(webAppPath); this.cConf = cConf; this.sConf = sConf; } /** * Start the service. */ @Override protected void startUp() throws Exception { // This is ok since this class is only used in singlenode, hence the path is always [base]/server/local/main.js generateConfigFile(new File(WEB_APP_BASE, JSON_PATH), cConf); generateConfigFile(new File(WEB_APP_BASE, JSON_SECURITY_PATH), sConf); ProcessBuilder builder = new ProcessBuilder(NODE_JS_EXECUTABLE, webAppPath.getAbsolutePath()); builder.redirectErrorStream(true); LOG.info("Starting Web Cloud App ... (" + webAppPath + ")"); process = builder.start(); final InputStream is = process.getInputStream(); final InputStreamReader isr = new InputStreamReader(is); bufferedReader = new BufferedReader(isr); } private void generateConfigFile(File path, Configuration config) throws Exception { Writer configWriter = Files.newWriter(path, Charsets.UTF_8); try { ConfigurationJsonTool.exportToJson(config, configWriter); } finally { configWriter.close(); } } /** * Processes the output of the command. */ @Override protected void run() throws Exception { LOG.info("Web Cloud App running ..."); try { String line; while ((line = bufferedReader.readLine()) != null) { LOG.trace(line); } } catch (Exception e) { LOG.error(e.getMessage()); } } /** * Returns the {@link Executor} that will be used to run this service. */ @Override protected Executor executor() { return new Executor() { @Override public void execute(Runnable command) { Thread thread = new Thread(command, getServiceName()); thread.setDaemon(true); thread.start(); } }; } /** * Invoked to request the service to stop. * <p/> * <p>By default this method does nothing. */ @Override protected void triggerShutdown() { process.destroy(); } /** * Stop the service. */ @Override protected void shutDown() throws Exception { LOG.info("Shutting down Web Cloud App ..."); process.waitFor(); } }
Better logic in WebCloudAppService to deal with case where the path to main.js is not from the constant.
cdap-standalone/src/main/java/co/cask/cdap/WebCloudAppService.java
Better logic in WebCloudAppService to deal with case where the path to main.js is not from the constant.
Java
apache-2.0
80035ca4fc9fcf78609813990a6bca5cd5caaf9d
0
hhu94/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,hhu94/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,hhu94/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,hhu94/Synapse-Repository-Services
package org.sagebionetworks.repo.manager.backup; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertNotNull; import java.io.File; import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.junit.Before; import org.junit.Test; import org.sagebionetworks.evaluation.dao.EvaluationDAO; import org.sagebionetworks.evaluation.dao.ParticipantDAO; import org.sagebionetworks.evaluation.model.Evaluation; import org.sagebionetworks.evaluation.model.EvaluationStatus; import org.sagebionetworks.evaluation.model.Participant; import org.sagebionetworks.repo.model.ConflictingUpdateException; import org.sagebionetworks.repo.model.DatastoreException; import org.sagebionetworks.repo.model.InvalidModelException; import org.sagebionetworks.repo.web.NotFoundException; /** * This is a unit test for CompetitionBackupDriverImpl. * @author bkng */ public class EvaluationBackupDriverTest { EvaluationBackupDriver sourceDriver = null; EvaluationBackupDriver destinationDriver = null; Map<String, Participant> srcParts; Map<String, Evaluation> srcComps; Map<String, Participant> dstParts; Map<String, Evaluation> dstComps; private EvaluationDAO createCompetitionDAO(final Map<String, Evaluation> comps) { return (EvaluationDAO)Proxy.newProxyInstance(EvaluationBackupDriverTest.class.getClassLoader(), new Class<?>[]{EvaluationDAO.class}, new InvocationHandler() { private long nextKey = 0; @Override public Object invoke(Object synapseClient, Method method, Object[] args) throws Throwable { if (method.equals(EvaluationDAO.class.getMethod("get", String.class))) { Evaluation comp = comps.get((String)args[0]); if (comp==null) throw new NotFoundException(); return comp; } else if (method.equals(EvaluationDAO.class.getMethod("create", Evaluation.class, Long.class))) { Evaluation comp = (Evaluation)args[0]; if (comp.getId()==null) { if (comps.containsKey(""+nextKey)) throw new IllegalStateException(); comp.setId("" + (nextKey++)); } else { if (comps.containsKey(comp.getId())) throw new RuntimeException("already exists"); nextKey = Long.parseLong(comp.getId())+1; } comps.put(comp.getId(), comp); return comp.getId(); } else if (method.equals(EvaluationDAO.class.getMethod("createFromBackup", Evaluation.class, Long.class))) { Evaluation comp = (Evaluation)args[0]; if (comp.getId()==null) { if (comps.containsKey(""+nextKey)) throw new IllegalStateException(); comp.setId("" + (nextKey++)); } else { if (comps.containsKey(comp.getId())) throw new RuntimeException("already exists"); nextKey = Long.parseLong(comp.getId())+1; } comps.put(comp.getId(), comp); return comp.getId(); } else if (method.equals(EvaluationDAO.class.getMethod("updateFromBackup", Evaluation.class))) { Evaluation comp = (Evaluation)args[0]; if (comp.getId()==null || !comps.containsKey(comp.getId())) throw new RuntimeException("doesn't exist"); comps.put(comp.getId(), comp); return null; } else { throw new IllegalArgumentException(method.getName()); } } }); } private ParticipantDAO createParticipantDAO(final Map<String, Participant> parts) { return (ParticipantDAO)Proxy.newProxyInstance(EvaluationBackupDriverTest.class.getClassLoader(), new Class<?>[]{ParticipantDAO.class}, new InvocationHandler() { @Override public Object invoke(Object synapseClient, Method method, Object[] args) throws Throwable { if (method.equals(ParticipantDAO.class.getMethod("create", Participant.class))) { Participant p = (Participant)args[0]; parts.put(p.getUserId(), p); return Long.parseLong(p.getUserId()); } else if (method.equals(ParticipantDAO.class.getMethod("getAllByEvaluation", String.class, long.class, long.class))) { String compId = (String)args[0]; List<Participant> result = new ArrayList<Participant>(); for (Participant p : parts.values()) if (compId.equals(p.getEvaluationId())) result.add(p); return result; } else { throw new IllegalArgumentException(method.getName()); } } }); } private Evaluation createCompetition() throws Exception { Evaluation comp = new Evaluation(); comp.setContentSource("contentSource"); comp.setCreatedOn(new Date()); comp.setDescription("description"); comp.setEtag("eTag"); comp.setId("123"); comp.setName("name"); comp.setOwnerId("456"); comp.setStatus(EvaluationStatus.CLOSED); return comp; } private Participant createParticipant(String compId, String userId) throws Exception { Participant part = new Participant(); part.setEvaluationId(compId); part.setCreatedOn(new Date()); part.setUserId(userId); return part; } @Before public void before() throws Exception { srcParts = new HashMap<String, Participant>(); srcComps = new HashMap<String, Evaluation>(); dstParts = new HashMap<String, Participant>(); dstComps = new HashMap<String, Evaluation>(); EvaluationDAO srcCompetitionDAO = createCompetitionDAO(srcComps); Evaluation comp = createCompetition(); String id = srcCompetitionDAO.create(comp, Long.parseLong(comp.getOwnerId())); assertNotNull(id); ParticipantDAO srcParticipantDAO = createParticipantDAO(srcParts); int numParts = 3; for (int i = 0; i < numParts; i++) { srcParticipantDAO.create(createParticipant(comp.getId(), "" + i)); } assertEquals(1, srcComps.size()); assertEquals(numParts, srcParts.size()); EvaluationDAO dstCompetitionDAO = createCompetitionDAO(dstComps); ParticipantDAO dstParticipantDAO = createParticipantDAO(dstParts); sourceDriver = new EvaluationBackupDriver(srcCompetitionDAO, srcParticipantDAO); destinationDriver = new EvaluationBackupDriver(dstCompetitionDAO, dstParticipantDAO); } @Test public void testRoundTrip() throws IOException, DatastoreException, NotFoundException, InterruptedException, InvalidModelException, ConflictingUpdateException{ // Create a temp file File temp = File.createTempFile("CompetitionBackupDriverTest", ".zip"); try{ // Try to write to the temp file Progress progress = new Progress(); Set<String> ids = new HashSet<String>(); for (String key : srcComps.keySet()) ids.add(key); sourceDriver.writeBackup(temp, progress, ids); System.out.println("Resulting file: "+temp.getAbsolutePath()+" with a size of: "+temp.length()+" bytes"); assertTrue(temp.length() > 10); // They should start off as non equal assertTrue(dstParts.isEmpty()); assertTrue(dstComps.isEmpty()); // Now read push the backup progress = new Progress(); destinationDriver.restoreFromBackup(temp, progress); // At this point all of the data should have migrated from the source to the destination assertEquals(srcParts, dstParts); assertEquals(srcComps, dstComps); }finally{ // Cleanup the file temp.delete(); } } }
services/repository-managers/src/test/java/org/sagebionetworks/repo/manager/backup/EvaluationBackupDriverTest.java
package org.sagebionetworks.repo.manager.backup; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertNotNull; import java.io.File; import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.junit.Before; import org.junit.Test; import org.sagebionetworks.evaluation.dao.EvaluationDAO; import org.sagebionetworks.evaluation.dao.ParticipantDAO; import org.sagebionetworks.evaluation.model.Evaluation; import org.sagebionetworks.evaluation.model.EvaluationStatus; import org.sagebionetworks.evaluation.model.Participant; import org.sagebionetworks.repo.model.ConflictingUpdateException; import org.sagebionetworks.repo.model.DatastoreException; import org.sagebionetworks.repo.model.InvalidModelException; import org.sagebionetworks.repo.web.NotFoundException; /** * This is a unit test for CompetitionBackupDriverImpl. * @author bkng */ public class EvaluationBackupDriverTest { EvaluationBackupDriver sourceDriver = null; EvaluationBackupDriver destinationDriver = null; Map<String, Participant> srcParts; Map<String, Evaluation> srcComps; Map<String, Participant> dstParts; Map<String, Evaluation> dstComps; private EvaluationDAO createCompetitionDAO(final Map<String, Evaluation> comps) { return (EvaluationDAO)Proxy.newProxyInstance(EvaluationBackupDriverTest.class.getClassLoader(), new Class<?>[]{EvaluationDAO.class}, new InvocationHandler() { private long nextKey = 0; @Override public Object invoke(Object synapseClient, Method method, Object[] args) throws Throwable { if (method.equals(EvaluationDAO.class.getMethod("get", String.class))) { Evaluation comp = comps.get((String)args[0]); if (comp==null) throw new NotFoundException(); return comp; } else if (method.equals(EvaluationDAO.class.getMethod("create", Evaluation.class, Long.class))) { Evaluation comp = (Evaluation)args[0]; if (comp.getId()==null) { if (comps.containsKey(""+nextKey)) throw new IllegalStateException(); comp.setId("" + (nextKey++)); } else { if (comps.containsKey(comp.getId())) throw new RuntimeException("already exists"); nextKey = Long.parseLong(comp.getId())+1; } comps.put(comp.getId(), comp); return comp.getId(); } else if (method.equals(EvaluationDAO.class.getMethod("createFromBackup", Evaluation.class, Long.class))) { Evaluation comp = (Evaluation)args[0]; if (comp.getId()==null) { if (comps.containsKey(""+nextKey)) throw new IllegalStateException(); comp.setId("" + (nextKey++)); } else { if (comps.containsKey(comp.getId())) throw new RuntimeException("already exists"); nextKey = Long.parseLong(comp.getId())+1; } comps.put(comp.getId(), comp); return comp.getId(); } else if (method.equals(EvaluationDAO.class.getMethod("updateFromBackup", Evaluation.class))) { Evaluation comp = (Evaluation)args[0]; if (comp.getId()==null || !comps.containsKey(comp.getId())) throw new RuntimeException("doesn't exist"); comps.put(comp.getId(), comp); return null; } else { throw new IllegalArgumentException(method.getName()); } } }); } private ParticipantDAO createParticipantDAO(final Map<String, Participant> parts) { return (ParticipantDAO)Proxy.newProxyInstance(EvaluationBackupDriverTest.class.getClassLoader(), new Class<?>[]{ParticipantDAO.class}, new InvocationHandler() { @Override public Object invoke(Object synapseClient, Method method, Object[] args) throws Throwable { if (method.equals(ParticipantDAO.class.getMethod("create", Participant.class))) { Participant p = (Participant)args[0]; parts.put(p.getUserId(), p); return null; } else if (method.equals(ParticipantDAO.class.getMethod("getAllByEvaluation", String.class, long.class, long.class))) { String compId = (String)args[0]; List<Participant> result = new ArrayList<Participant>(); for (Participant p : parts.values()) if (compId.equals(p.getEvaluationId())) result.add(p); return result; } else { throw new IllegalArgumentException(method.getName()); } } }); } private Evaluation createCompetition() throws Exception { Evaluation comp = new Evaluation(); comp.setContentSource("contentSource"); comp.setCreatedOn(new Date()); comp.setDescription("description"); comp.setEtag("eTag"); comp.setId("123"); comp.setName("name"); comp.setOwnerId("456"); comp.setStatus(EvaluationStatus.CLOSED); return comp; } private Participant createParticipant(String compId, String userId) throws Exception { Participant part = new Participant(); part.setEvaluationId(compId); part.setCreatedOn(new Date()); part.setUserId(userId); return part; } @Before public void before() throws Exception { srcParts = new HashMap<String, Participant>(); srcComps = new HashMap<String, Evaluation>(); dstParts = new HashMap<String, Participant>(); dstComps = new HashMap<String, Evaluation>(); EvaluationDAO srcCompetitionDAO = createCompetitionDAO(srcComps); Evaluation comp = createCompetition(); String id = srcCompetitionDAO.create(comp, Long.parseLong(comp.getOwnerId())); assertNotNull(id); ParticipantDAO srcParticipantDAO = createParticipantDAO(srcParts); int numParts = 3; for (int i = 0; i < numParts; i++) { srcParticipantDAO.create(createParticipant(comp.getId(), "" + i)); } assertEquals(1, srcComps.size()); assertEquals(numParts, srcParts.size()); EvaluationDAO dstCompetitionDAO = createCompetitionDAO(dstComps); ParticipantDAO dstParticipantDAO = createParticipantDAO(dstParts); sourceDriver = new EvaluationBackupDriver(srcCompetitionDAO, srcParticipantDAO); destinationDriver = new EvaluationBackupDriver(dstCompetitionDAO, dstParticipantDAO); } @Test public void testRoundTrip() throws IOException, DatastoreException, NotFoundException, InterruptedException, InvalidModelException, ConflictingUpdateException{ // Create a temp file File temp = File.createTempFile("CompetitionBackupDriverTest", ".zip"); try{ // Try to write to the temp file Progress progress = new Progress(); Set<String> ids = new HashSet<String>(); for (String key : srcComps.keySet()) ids.add(key); sourceDriver.writeBackup(temp, progress, ids); System.out.println("Resulting file: "+temp.getAbsolutePath()+" with a size of: "+temp.length()+" bytes"); assertTrue(temp.length() > 10); // They should start off as non equal assertTrue(dstParts.isEmpty()); assertTrue(dstComps.isEmpty()); // Now read push the backup progress = new Progress(); destinationDriver.restoreFromBackup(temp, progress); // At this point all of the data should have migrated from the source to the destination assertEquals(srcParts, dstParts); assertEquals(srcComps, dstComps); }finally{ // Cleanup the file temp.delete(); } } }
PLFM-1858
services/repository-managers/src/test/java/org/sagebionetworks/repo/manager/backup/EvaluationBackupDriverTest.java
PLFM-1858
Java
apache-2.0
25a2e5a2df6ada03b36102dc5d46f115c8de2547
0
googleinterns/calcite,datametica/calcite,jcamachor/calcite,vlsi/calcite,jcamachor/calcite,jcamachor/calcite,julianhyde/calcite,julianhyde/calcite,vlsi/calcite,apache/calcite,julianhyde/calcite,jcamachor/calcite,googleinterns/calcite,vlsi/calcite,apache/calcite,vlsi/calcite,vlsi/calcite,looker-open-source/calcite,jcamachor/calcite,googleinterns/calcite,looker-open-source/calcite,vlsi/incubator-calcite,apache/calcite,datametica/calcite,looker-open-source/calcite,datametica/calcite,julianhyde/calcite,datametica/calcite,googleinterns/calcite,apache/calcite,julianhyde/calcite,julianhyde/calcite,datametica/calcite,googleinterns/calcite,looker-open-source/calcite,vlsi/incubator-calcite,googleinterns/calcite,looker-open-source/calcite,datametica/calcite,jcamachor/calcite,apache/calcite,vlsi/calcite,apache/calcite,looker-open-source/calcite
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.sql2rel; import org.apache.calcite.linq4j.Ord; import org.apache.calcite.linq4j.function.Function2; import org.apache.calcite.plan.Context; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptCostImpl; import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.plan.RelOptUtil; import org.apache.calcite.plan.hep.HepPlanner; import org.apache.calcite.plan.hep.HepProgram; import org.apache.calcite.plan.hep.HepRelVertex; import org.apache.calcite.rel.BiRel; import org.apache.calcite.rel.RelCollation; import org.apache.calcite.rel.RelHomogeneousShuttle; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.Aggregate; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.rel.core.Correlate; import org.apache.calcite.rel.core.CorrelationId; import org.apache.calcite.rel.core.Filter; import org.apache.calcite.rel.core.Join; import org.apache.calcite.rel.core.JoinRelType; import org.apache.calcite.rel.core.Project; import org.apache.calcite.rel.core.RelFactories; import org.apache.calcite.rel.core.Sort; import org.apache.calcite.rel.core.Values; import org.apache.calcite.rel.logical.LogicalAggregate; import org.apache.calcite.rel.logical.LogicalCorrelate; import org.apache.calcite.rel.logical.LogicalFilter; import org.apache.calcite.rel.logical.LogicalJoin; import org.apache.calcite.rel.logical.LogicalProject; import org.apache.calcite.rel.logical.LogicalSnapshot; import org.apache.calcite.rel.metadata.RelMdUtil; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.rules.FilterCorrelateRule; import org.apache.calcite.rel.rules.FilterJoinRule; import org.apache.calcite.rel.rules.FilterProjectTransposeRule; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexCorrelVariable; import org.apache.calcite.rex.RexFieldAccess; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexShuttle; import org.apache.calcite.rex.RexSubQuery; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.rex.RexVisitorImpl; import org.apache.calcite.sql.SqlExplainFormat; import org.apache.calcite.sql.SqlExplainLevel; import org.apache.calcite.sql.SqlFunction; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlCountAggFunction; import org.apache.calcite.sql.fun.SqlSingleValueAggFunction; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.tools.RelBuilder; import org.apache.calcite.tools.RelBuilderFactory; import org.apache.calcite.util.Holder; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Litmus; import org.apache.calcite.util.Pair; import org.apache.calcite.util.ReflectUtil; import org.apache.calcite.util.ReflectiveVisitor; import org.apache.calcite.util.Util; import org.apache.calcite.util.mapping.Mappings; import org.apache.calcite.util.trace.CalciteTrace; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.Multimap; import com.google.common.collect.MultimapBuilder; import com.google.common.collect.Sets; import com.google.common.collect.SortedSetMultimap; import org.slf4j.Logger; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Objects; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.stream.Collectors; import javax.annotation.Nonnull; /** * RelDecorrelator replaces all correlated expressions (corExp) in a relational * expression (RelNode) tree with non-correlated expressions that are produced * from joining the RelNode that produces the corExp with the RelNode that * references it. * * <p>TODO:</p> * <ul> * <li>replace {@code CorelMap} constructor parameter with a RelNode * <li>make {@link #currentRel} immutable (would require a fresh * RelDecorrelator for each node being decorrelated)</li> * <li>make fields of {@code CorelMap} immutable</li> * <li>make sub-class rules static, and have them create their own * de-correlator</li> * </ul> */ public class RelDecorrelator implements ReflectiveVisitor { //~ Static fields/initializers --------------------------------------------- private static final Logger SQL2REL_LOGGER = CalciteTrace.getSqlToRelTracer(); //~ Instance fields -------------------------------------------------------- private final RelBuilder relBuilder; // map built during translation protected CorelMap cm; private final ReflectUtil.MethodDispatcher<Frame> dispatcher = ReflectUtil.createMethodDispatcher(Frame.class, this, "decorrelateRel", RelNode.class); // The rel which is being visited private RelNode currentRel; private final Context context; /** Built during decorrelation, of rel to all the newly created correlated * variables in its output, and to map old input positions to new input * positions. This is from the view point of the parent rel of a new rel. */ private final Map<RelNode, Frame> map = new HashMap<>(); private final HashSet<Correlate> generatedCorRels = new HashSet<>(); //~ Constructors ----------------------------------------------------------- protected RelDecorrelator( CorelMap cm, Context context, RelBuilder relBuilder) { this.cm = cm; this.context = context; this.relBuilder = relBuilder; } //~ Methods ---------------------------------------------------------------- @Deprecated // to be removed before 2.0 public static RelNode decorrelateQuery(RelNode rootRel) { final RelBuilder relBuilder = RelFactories.LOGICAL_BUILDER.create(rootRel.getCluster(), null); return decorrelateQuery(rootRel, relBuilder); } /** Decorrelates a query. * * <p>This is the main entry point to {@code RelDecorrelator}. * * @param rootRel Root node of the query * @param relBuilder Builder for relational expressions * * @return Equivalent query with all * {@link org.apache.calcite.rel.core.Correlate} instances removed */ public static RelNode decorrelateQuery(RelNode rootRel, RelBuilder relBuilder) { final CorelMap corelMap = new CorelMapBuilder().build(rootRel); if (!corelMap.hasCorrelation()) { return rootRel; } final RelOptCluster cluster = rootRel.getCluster(); final RelDecorrelator decorrelator = new RelDecorrelator(corelMap, cluster.getPlanner().getContext(), relBuilder); RelNode newRootRel = decorrelator.removeCorrelationViaRule(rootRel); if (SQL2REL_LOGGER.isDebugEnabled()) { SQL2REL_LOGGER.debug( RelOptUtil.dumpPlan("Plan after removing Correlator", newRootRel, SqlExplainFormat.TEXT, SqlExplainLevel.EXPPLAN_ATTRIBUTES)); } if (!decorrelator.cm.mapCorToCorRel.isEmpty()) { newRootRel = decorrelator.decorrelate(newRootRel); } return newRootRel; } private void setCurrent(RelNode root, Correlate corRel) { currentRel = corRel; if (corRel != null) { cm = new CorelMapBuilder().build(Util.first(root, corRel)); } } protected RelBuilderFactory relBuilderFactory() { return RelBuilder.proto(relBuilder); } protected RelNode decorrelate(RelNode root) { // first adjust count() expression if any final RelBuilderFactory f = relBuilderFactory(); HepProgram program = HepProgram.builder() .addRuleInstance(new AdjustProjectForCountAggregateRule(false, f)) .addRuleInstance(new AdjustProjectForCountAggregateRule(true, f)) .addRuleInstance( new FilterJoinRule.FilterIntoJoinRule(true, f, FilterJoinRule.TRUE_PREDICATE)) .addRuleInstance( new FilterProjectTransposeRule(Filter.class, Project.class, true, true, f)) .addRuleInstance(new FilterCorrelateRule(f)) .build(); HepPlanner planner = createPlanner(program); planner.setRoot(root); root = planner.findBestExp(); // Perform decorrelation. map.clear(); final Frame frame = getInvoke(root, null); if (frame != null) { // has been rewritten; apply rules post-decorrelation final HepProgram program2 = HepProgram.builder() .addRuleInstance( new FilterJoinRule.FilterIntoJoinRule( true, f, FilterJoinRule.TRUE_PREDICATE)) .addRuleInstance( new FilterJoinRule.JoinConditionPushRule( f, FilterJoinRule.TRUE_PREDICATE)) .build(); final HepPlanner planner2 = createPlanner(program2); final RelNode newRoot = frame.r; planner2.setRoot(newRoot); return planner2.findBestExp(); } return root; } private Function2<RelNode, RelNode, Void> createCopyHook() { return (oldNode, newNode) -> { if (cm.mapRefRelToCorRef.containsKey(oldNode)) { cm.mapRefRelToCorRef.putAll(newNode, cm.mapRefRelToCorRef.get(oldNode)); } if (oldNode instanceof Correlate && newNode instanceof Correlate) { Correlate oldCor = (Correlate) oldNode; CorrelationId c = oldCor.getCorrelationId(); if (cm.mapCorToCorRel.get(c) == oldNode) { cm.mapCorToCorRel.put(c, newNode); } if (generatedCorRels.contains(oldNode)) { generatedCorRels.add((Correlate) newNode); } } return null; }; } private HepPlanner createPlanner(HepProgram program) { // Create a planner with a hook to update the mapping tables when a // node is copied when it is registered. return new HepPlanner( program, context, true, createCopyHook(), RelOptCostImpl.FACTORY); } public RelNode removeCorrelationViaRule(RelNode root) { final RelBuilderFactory f = relBuilderFactory(); HepProgram program = HepProgram.builder() .addRuleInstance(new RemoveSingleAggregateRule(f)) .addRuleInstance(new RemoveCorrelationForScalarProjectRule(f)) .addRuleInstance(new RemoveCorrelationForScalarAggregateRule(f)) .build(); HepPlanner planner = createPlanner(program); planner.setRoot(root); return planner.findBestExp(); } protected RexNode decorrelateExpr(RelNode currentRel, Map<RelNode, Frame> map, CorelMap cm, RexNode exp) { DecorrelateRexShuttle shuttle = new DecorrelateRexShuttle(currentRel, map, cm); return exp.accept(shuttle); } protected RexNode removeCorrelationExpr( RexNode exp, boolean projectPulledAboveLeftCorrelator) { RemoveCorrelationRexShuttle shuttle = new RemoveCorrelationRexShuttle(relBuilder.getRexBuilder(), projectPulledAboveLeftCorrelator, null, ImmutableSet.of()); return exp.accept(shuttle); } protected RexNode removeCorrelationExpr( RexNode exp, boolean projectPulledAboveLeftCorrelator, RexInputRef nullIndicator) { RemoveCorrelationRexShuttle shuttle = new RemoveCorrelationRexShuttle(relBuilder.getRexBuilder(), projectPulledAboveLeftCorrelator, nullIndicator, ImmutableSet.of()); return exp.accept(shuttle); } protected RexNode removeCorrelationExpr( RexNode exp, boolean projectPulledAboveLeftCorrelator, Set<Integer> isCount) { RemoveCorrelationRexShuttle shuttle = new RemoveCorrelationRexShuttle(relBuilder.getRexBuilder(), projectPulledAboveLeftCorrelator, null, isCount); return exp.accept(shuttle); } /** Fallback if none of the other {@code decorrelateRel} methods match. */ public Frame decorrelateRel(RelNode rel) { RelNode newRel = rel.copy(rel.getTraitSet(), rel.getInputs()); if (rel.getInputs().size() > 0) { List<RelNode> oldInputs = rel.getInputs(); List<RelNode> newInputs = new ArrayList<>(); for (int i = 0; i < oldInputs.size(); ++i) { final Frame frame = getInvoke(oldInputs.get(i), rel); if (frame == null || !frame.corDefOutputs.isEmpty()) { // if input is not rewritten, or if it produces correlated // variables, terminate rewrite return null; } newInputs.add(frame.r); newRel.replaceInput(i, frame.r); } if (!Util.equalShallow(oldInputs, newInputs)) { newRel = rel.copy(rel.getTraitSet(), newInputs); } } // the output position should not change since there are no corVars // coming from below. return register(rel, newRel, identityMap(rel.getRowType().getFieldCount()), ImmutableSortedMap.of()); } public Frame decorrelateRel(Sort rel) { // // Rewrite logic: // // 1. change the collations field to reference the new input. // // Sort itself should not reference corVars. assert !cm.mapRefRelToCorRef.containsKey(rel); // Sort only references field positions in collations field. // The collations field in the newRel now need to refer to the // new output positions in its input. // Its output does not change the input ordering, so there's no // need to call propagateExpr. final RelNode oldInput = rel.getInput(); final Frame frame = getInvoke(oldInput, rel); if (frame == null) { // If input has not been rewritten, do not rewrite this rel. return null; } final RelNode newInput = frame.r; Mappings.TargetMapping mapping = Mappings.target(frame.oldToNewOutputs, oldInput.getRowType().getFieldCount(), newInput.getRowType().getFieldCount()); RelCollation oldCollation = rel.getCollation(); RelCollation newCollation = RexUtil.apply(mapping, oldCollation); final int offset = rel.offset == null ? -1 : RexLiteral.intValue(rel.offset); final int fetch = rel.fetch == null ? -1 : RexLiteral.intValue(rel.fetch); final RelNode newSort = relBuilder .push(newInput) .sortLimit(offset, fetch, relBuilder.fields(newCollation)) .build(); // Sort does not change input ordering return register(rel, newSort, frame.oldToNewOutputs, frame.corDefOutputs); } public Frame decorrelateRel(Values rel) { // There are no inputs, so rel does not need to be changed. return null; } public Frame decorrelateRel(LogicalAggregate rel) { return decorrelateRel((Aggregate) rel); } public Frame decorrelateRel(Aggregate rel) { // // Rewrite logic: // // 1. Permute the group by keys to the front. // 2. If the input of an aggregate produces correlated variables, // add them to the group list. // 3. Change aggCalls to reference the new project. // // Aggregate itself should not reference corVars. assert !cm.mapRefRelToCorRef.containsKey(rel); final RelNode oldInput = rel.getInput(); final Frame frame = getInvoke(oldInput, rel); if (frame == null) { // If input has not been rewritten, do not rewrite this rel. return null; } final RelNode newInput = frame.r; // aggregate outputs mapping: group keys and aggregates final Map<Integer, Integer> outputMap = new HashMap<>(); // map from newInput final Map<Integer, Integer> mapNewInputToProjOutputs = new HashMap<>(); final int oldGroupKeyCount = rel.getGroupSet().cardinality(); // Project projects the original expressions, // plus any correlated variables the input wants to pass along. final List<Pair<RexNode, String>> projects = new ArrayList<>(); List<RelDataTypeField> newInputOutput = newInput.getRowType().getFieldList(); int newPos = 0; // oldInput has the original group by keys in the front. final NavigableMap<Integer, RexLiteral> omittedConstants = new TreeMap<>(); for (int i = 0; i < oldGroupKeyCount; i++) { final RexLiteral constant = projectedLiteral(newInput, i); if (constant != null) { // Exclude constants. Aggregate({true}) occurs because Aggregate({}) // would generate 1 row even when applied to an empty table. omittedConstants.put(i, constant); continue; } // add mapping of group keys. outputMap.put(i, newPos); int newInputPos = frame.oldToNewOutputs.get(i); projects.add(RexInputRef.of2(newInputPos, newInputOutput)); mapNewInputToProjOutputs.put(newInputPos, newPos); newPos++; } final SortedMap<CorDef, Integer> corDefOutputs = new TreeMap<>(); if (!frame.corDefOutputs.isEmpty()) { // If input produces correlated variables, move them to the front, // right after any existing GROUP BY fields. // Now add the corVars from the input, starting from // position oldGroupKeyCount. for (Map.Entry<CorDef, Integer> entry : frame.corDefOutputs.entrySet()) { projects.add(RexInputRef.of2(entry.getValue(), newInputOutput)); corDefOutputs.put(entry.getKey(), newPos); mapNewInputToProjOutputs.put(entry.getValue(), newPos); newPos++; } } // add the remaining fields final int newGroupKeyCount = newPos; for (int i = 0; i < newInputOutput.size(); i++) { if (!mapNewInputToProjOutputs.containsKey(i)) { projects.add(RexInputRef.of2(i, newInputOutput)); mapNewInputToProjOutputs.put(i, newPos); newPos++; } } assert newPos == newInputOutput.size(); // This Project will be what the old input maps to, // replacing any previous mapping from old input). RelNode newProject = relBuilder.push(newInput) .projectNamed(Pair.left(projects), Pair.right(projects), true) .build(); // update mappings: // oldInput ----> newInput // // newProject // | // oldInput ----> newInput // // is transformed to // // oldInput ----> newProject // | // newInput Map<Integer, Integer> combinedMap = new HashMap<>(); for (Integer oldInputPos : frame.oldToNewOutputs.keySet()) { combinedMap.put(oldInputPos, mapNewInputToProjOutputs.get( frame.oldToNewOutputs.get(oldInputPos))); } register(oldInput, newProject, combinedMap, corDefOutputs); // now it's time to rewrite the Aggregate final ImmutableBitSet newGroupSet = ImmutableBitSet.range(newGroupKeyCount); List<AggregateCall> newAggCalls = new ArrayList<>(); List<AggregateCall> oldAggCalls = rel.getAggCallList(); ImmutableList<ImmutableBitSet> newGroupSets = null; if (rel.getGroupType() != Aggregate.Group.SIMPLE) { final ImmutableBitSet addedGroupSet = ImmutableBitSet.range(oldGroupKeyCount, newGroupKeyCount); final Iterable<ImmutableBitSet> tmpGroupSets = Iterables.transform(rel.getGroupSets(), bitSet -> bitSet.union(addedGroupSet)); newGroupSets = ImmutableBitSet.ORDERING.immutableSortedCopy(tmpGroupSets); } int oldInputOutputFieldCount = rel.getGroupSet().cardinality(); int newInputOutputFieldCount = newGroupSet.cardinality(); int i = -1; for (AggregateCall oldAggCall : oldAggCalls) { ++i; List<Integer> oldAggArgs = oldAggCall.getArgList(); List<Integer> aggArgs = new ArrayList<>(); // Adjust the Aggregate argument positions. // Note Aggregate does not change input ordering, so the input // output position mapping can be used to derive the new positions // for the argument. for (int oldPos : oldAggArgs) { aggArgs.add(combinedMap.get(oldPos)); } final int filterArg = oldAggCall.filterArg < 0 ? oldAggCall.filterArg : combinedMap.get(oldAggCall.filterArg); newAggCalls.add( oldAggCall.adaptTo(newProject, aggArgs, filterArg, oldGroupKeyCount, newGroupKeyCount)); // The old to new output position mapping will be the same as that // of newProject, plus any aggregates that the oldAgg produces. outputMap.put( oldInputOutputFieldCount + i, newInputOutputFieldCount + i); } relBuilder.push(newProject).aggregate( relBuilder.groupKey(newGroupSet, newGroupSets), newAggCalls); if (!omittedConstants.isEmpty()) { final List<RexNode> postProjects = new ArrayList<>(relBuilder.fields()); for (Map.Entry<Integer, RexLiteral> entry : omittedConstants.descendingMap().entrySet()) { int index = entry.getKey() + frame.corDefOutputs.size(); postProjects.add(index, entry.getValue()); // Shift the outputs whose index equals with or bigger than the added index // with 1 offset. shiftMapping(outputMap, index, 1); // Then add the constant key mapping. outputMap.put(entry.getKey(), index); } relBuilder.project(postProjects); } // Aggregate does not change input ordering so corVars will be // located at the same position as the input newProject. return register(rel, relBuilder.build(), outputMap, corDefOutputs); } /** * Shift the mapping to fixed offset from the {@code startIndex}. * @param mapping the original mapping * @param startIndex any output whose index equals with or bigger than the starting index * would be shift * @param offset shift offset */ private static void shiftMapping(Map<Integer, Integer> mapping, int startIndex, int offset) { for (Map.Entry<Integer, Integer> entry : mapping.entrySet()) { if (entry.getValue() >= startIndex) { mapping.put(entry.getKey(), entry.getValue() + offset); } else { mapping.put(entry.getKey(), entry.getValue()); } } } public Frame getInvoke(RelNode r, RelNode parent) { final Frame frame = dispatcher.invoke(r); if (frame != null) { map.put(r, frame); } currentRel = parent; return frame; } /** Returns a literal output field, or null if it is not literal. */ private static RexLiteral projectedLiteral(RelNode rel, int i) { if (rel instanceof Project) { final Project project = (Project) rel; final RexNode node = project.getProjects().get(i); if (node instanceof RexLiteral) { return (RexLiteral) node; } } return null; } public Frame decorrelateRel(LogicalProject rel) { return decorrelateRel((Project) rel); } public Frame decorrelateRel(Project rel) { // // Rewrite logic: // // 1. Pass along any correlated variables coming from the input. // final RelNode oldInput = rel.getInput(); Frame frame = getInvoke(oldInput, rel); if (frame == null) { // If input has not been rewritten, do not rewrite this rel. return null; } final List<RexNode> oldProjects = rel.getProjects(); final List<RelDataTypeField> relOutput = rel.getRowType().getFieldList(); // Project projects the original expressions, // plus any correlated variables the input wants to pass along. final List<Pair<RexNode, String>> projects = new ArrayList<>(); // If this Project has correlated reference, create value generator // and produce the correlated variables in the new output. if (cm.mapRefRelToCorRef.containsKey(rel)) { frame = decorrelateInputWithValueGenerator(rel, frame); } // Project projects the original expressions final Map<Integer, Integer> mapOldToNewOutputs = new HashMap<>(); int newPos; for (newPos = 0; newPos < oldProjects.size(); newPos++) { projects.add( newPos, Pair.of( decorrelateExpr(currentRel, map, cm, oldProjects.get(newPos)), relOutput.get(newPos).getName())); mapOldToNewOutputs.put(newPos, newPos); } // Project any correlated variables the input wants to pass along. final SortedMap<CorDef, Integer> corDefOutputs = new TreeMap<>(); for (Map.Entry<CorDef, Integer> entry : frame.corDefOutputs.entrySet()) { projects.add( RexInputRef.of2(entry.getValue(), frame.r.getRowType().getFieldList())); corDefOutputs.put(entry.getKey(), newPos); newPos++; } RelNode newProject = relBuilder.push(frame.r) .projectNamed(Pair.left(projects), Pair.right(projects), true) .build(); return register(rel, newProject, mapOldToNewOutputs, corDefOutputs); } /** * Create RelNode tree that produces a list of correlated variables. * * @param correlations correlated variables to generate * @param valueGenFieldOffset offset in the output that generated columns * will start * @param corDefOutputs output positions for the correlated variables * generated * @return RelNode the root of the resultant RelNode tree */ private RelNode createValueGenerator( Iterable<CorRef> correlations, int valueGenFieldOffset, SortedMap<CorDef, Integer> corDefOutputs) { final Map<RelNode, List<Integer>> mapNewInputToOutputs = new HashMap<>(); final Map<RelNode, Integer> mapNewInputToNewOffset = new HashMap<>(); // Input provides the definition of a correlated variable. // Add to map all the referenced positions (relative to each input rel). for (CorRef corVar : correlations) { final int oldCorVarOffset = corVar.field; final RelNode oldInput = getCorRel(corVar); assert oldInput != null; final Frame frame = getFrame(oldInput, true); assert frame != null; final RelNode newInput = frame.r; final List<Integer> newLocalOutputs; if (!mapNewInputToOutputs.containsKey(newInput)) { newLocalOutputs = new ArrayList<>(); } else { newLocalOutputs = mapNewInputToOutputs.get(newInput); } final int newCorVarOffset = frame.oldToNewOutputs.get(oldCorVarOffset); // Add all unique positions referenced. if (!newLocalOutputs.contains(newCorVarOffset)) { newLocalOutputs.add(newCorVarOffset); } mapNewInputToOutputs.put(newInput, newLocalOutputs); } int offset = 0; // Project only the correlated fields out of each input // and join the project together. // To make sure the plan does not change in terms of join order, // join these rels based on their occurrence in corVar list which // is sorted. final Set<RelNode> joinedInputs = new HashSet<>(); RelNode r = null; for (CorRef corVar : correlations) { final RelNode oldInput = getCorRel(corVar); assert oldInput != null; final RelNode newInput = getFrame(oldInput, true).r; assert newInput != null; if (!joinedInputs.contains(newInput)) { final List<Integer> positions = mapNewInputToOutputs.get(newInput); final List<String> fieldNames = newInput.getRowType().getFieldNames(); RelNode distinct = relBuilder.push(newInput) .project(relBuilder.fields(positions)) .distinct() .build(); RelOptCluster cluster = distinct.getCluster(); joinedInputs.add(newInput); mapNewInputToNewOffset.put(newInput, offset); offset += distinct.getRowType().getFieldCount(); if (r == null) { r = distinct; } else { r = relBuilder.push(r).push(distinct) .join(JoinRelType.INNER, cluster.getRexBuilder().makeLiteral(true)).build(); } } } // Translate the positions of correlated variables to be relative to // the join output, leaving room for valueGenFieldOffset because // valueGenerators are joined with the original left input of the rel // referencing correlated variables. for (CorRef corRef : correlations) { // The first input of a Correlate is always the rel defining // the correlated variables. final RelNode oldInput = getCorRel(corRef); assert oldInput != null; final Frame frame = getFrame(oldInput, true); final RelNode newInput = frame.r; assert newInput != null; final List<Integer> newLocalOutputs = mapNewInputToOutputs.get(newInput); final int newLocalOutput = frame.oldToNewOutputs.get(corRef.field); // newOutput is the index of the corVar in the referenced // position list plus the offset of referenced position list of // each newInput. final int newOutput = newLocalOutputs.indexOf(newLocalOutput) + mapNewInputToNewOffset.get(newInput) + valueGenFieldOffset; corDefOutputs.put(corRef.def(), newOutput); } return r; } private Frame getFrame(RelNode r, boolean safe) { final Frame frame = map.get(r); if (frame == null && safe) { return new Frame(r, r, ImmutableSortedMap.of(), identityMap(r.getRowType().getFieldCount())); } return frame; } private RelNode getCorRel(CorRef corVar) { final RelNode r = cm.mapCorToCorRel.get(corVar.corr); return r.getInput(0); } /** Adds a value generator to satisfy the correlating variables used by * a relational expression, if those variables are not already provided by * its input. */ private Frame maybeAddValueGenerator(RelNode rel, Frame frame) { final CorelMap cm1 = new CorelMapBuilder().build(frame.r, rel); if (!cm1.mapRefRelToCorRef.containsKey(rel)) { return frame; } final Collection<CorRef> needs = cm1.mapRefRelToCorRef.get(rel); final ImmutableSortedSet<CorDef> haves = frame.corDefOutputs.keySet(); if (hasAll(needs, haves)) { return frame; } return decorrelateInputWithValueGenerator(rel, frame); } /** Returns whether all of a collection of {@link CorRef}s are satisfied * by at least one of a collection of {@link CorDef}s. */ private boolean hasAll(Collection<CorRef> corRefs, Collection<CorDef> corDefs) { for (CorRef corRef : corRefs) { if (!has(corDefs, corRef)) { return false; } } return true; } /** Returns whether a {@link CorrelationId} is satisfied by at least one of a * collection of {@link CorDef}s. */ private boolean has(Collection<CorDef> corDefs, CorRef corr) { for (CorDef corDef : corDefs) { if (corDef.corr.equals(corr.corr) && corDef.field == corr.field) { return true; } } return false; } private Frame decorrelateInputWithValueGenerator(RelNode rel, Frame frame) { // currently only handles one input assert rel.getInputs().size() == 1; RelNode oldInput = frame.r; final SortedMap<CorDef, Integer> corDefOutputs = new TreeMap<>(frame.corDefOutputs); final Collection<CorRef> corVarList = cm.mapRefRelToCorRef.get(rel); // Try to populate correlation variables using local fields. // This means that we do not need a value generator. if (rel instanceof Filter) { SortedMap<CorDef, Integer> map = new TreeMap<>(); List<RexNode> projects = new ArrayList<>(); for (CorRef correlation : corVarList) { final CorDef def = correlation.def(); if (corDefOutputs.containsKey(def) || map.containsKey(def)) { continue; } try { findCorrelationEquivalent(correlation, ((Filter) rel).getCondition()); } catch (Util.FoundOne e) { if (e.getNode() instanceof RexInputRef) { map.put(def, ((RexInputRef) e.getNode()).getIndex()); } else { map.put(def, frame.r.getRowType().getFieldCount() + projects.size()); projects.add((RexNode) e.getNode()); } } } // If all correlation variables are now satisfied, skip creating a value // generator. if (map.size() == corVarList.size()) { map.putAll(frame.corDefOutputs); final RelNode r; if (!projects.isEmpty()) { relBuilder.push(oldInput) .project(Iterables.concat(relBuilder.fields(), projects)); r = relBuilder.build(); } else { r = oldInput; } return register(rel.getInput(0), r, frame.oldToNewOutputs, map); } } int leftInputOutputCount = frame.r.getRowType().getFieldCount(); // can directly add positions into corDefOutputs since join // does not change the output ordering from the inputs. RelNode valueGen = createValueGenerator(corVarList, leftInputOutputCount, corDefOutputs); RelNode join = relBuilder.push(frame.r).push(valueGen) .join(JoinRelType.INNER, relBuilder.literal(true), ImmutableSet.of()).build(); // Join or Filter does not change the old input ordering. All // input fields from newLeftInput (i.e. the original input to the old // Filter) are in the output and in the same position. return register(rel.getInput(0), join, frame.oldToNewOutputs, corDefOutputs); } /** Finds a {@link RexInputRef} that is equivalent to a {@link CorRef}, * and if found, throws a {@link org.apache.calcite.util.Util.FoundOne}. */ private void findCorrelationEquivalent(CorRef correlation, RexNode e) throws Util.FoundOne { switch (e.getKind()) { case EQUALS: final RexCall call = (RexCall) e; final List<RexNode> operands = call.getOperands(); if (references(operands.get(0), correlation)) { throw new Util.FoundOne(operands.get(1)); } if (references(operands.get(1), correlation)) { throw new Util.FoundOne(operands.get(0)); } break; case AND: for (RexNode operand : ((RexCall) e).getOperands()) { findCorrelationEquivalent(correlation, operand); } } } private boolean references(RexNode e, CorRef correlation) { switch (e.getKind()) { case CAST: final RexNode operand = ((RexCall) e).getOperands().get(0); if (isWidening(e.getType(), operand.getType())) { return references(operand, correlation); } return false; case FIELD_ACCESS: final RexFieldAccess f = (RexFieldAccess) e; if (f.getField().getIndex() == correlation.field && f.getReferenceExpr() instanceof RexCorrelVariable) { if (((RexCorrelVariable) f.getReferenceExpr()).id == correlation.corr) { return true; } } // fall through default: return false; } } /** Returns whether one type is just a widening of another. * * <p>For example:<ul> * <li>{@code VARCHAR(10)} is a widening of {@code VARCHAR(5)}. * <li>{@code VARCHAR(10)} is a widening of {@code VARCHAR(10) NOT NULL}. * </ul> */ private boolean isWidening(RelDataType type, RelDataType type1) { return type.getSqlTypeName() == type1.getSqlTypeName() && type.getPrecision() >= type1.getPrecision(); } public Frame decorrelateRel(LogicalSnapshot rel) { if (RexUtil.containsCorrelation(rel.getPeriod())) { return null; } return decorrelateRel((RelNode) rel); } public Frame decorrelateRel(LogicalFilter rel) { return decorrelateRel((Filter) rel); } public Frame decorrelateRel(Filter rel) { // // Rewrite logic: // // 1. If a Filter references a correlated field in its filter // condition, rewrite the Filter to be // Filter // Join(cross product) // originalFilterInput // ValueGenerator(produces distinct sets of correlated variables) // and rewrite the correlated fieldAccess in the filter condition to // reference the Join output. // // 2. If Filter does not reference correlated variables, simply // rewrite the filter condition using new input. // final RelNode oldInput = rel.getInput(); Frame frame = getInvoke(oldInput, rel); if (frame == null) { // If input has not been rewritten, do not rewrite this rel. return null; } // If this Filter has correlated reference, create value generator // and produce the correlated variables in the new output. if (false) { if (cm.mapRefRelToCorRef.containsKey(rel)) { frame = decorrelateInputWithValueGenerator(rel, frame); } } else { frame = maybeAddValueGenerator(rel, frame); } final CorelMap cm2 = new CorelMapBuilder().build(rel); // Replace the filter expression to reference output of the join // Map filter to the new filter over join relBuilder.push(frame.r) .filter(decorrelateExpr(currentRel, map, cm2, rel.getCondition())); // Filter does not change the input ordering. // Filter rel does not permute the input. // All corVars produced by filter will have the same output positions in the // input rel. return register(rel, relBuilder.build(), frame.oldToNewOutputs, frame.corDefOutputs); } public Frame decorrelateRel(LogicalCorrelate rel) { return decorrelateRel((Correlate) rel); } public Frame decorrelateRel(Correlate rel) { // // Rewrite logic: // // The original left input will be joined with the new right input that // has generated correlated variables propagated up. For any generated // corVars that are not used in the join key, pass them along to be // joined later with the Correlates that produce them. // // the right input to Correlate should produce correlated variables final RelNode oldLeft = rel.getInput(0); final RelNode oldRight = rel.getInput(1); final Frame leftFrame = getInvoke(oldLeft, rel); final Frame rightFrame = getInvoke(oldRight, rel); if (leftFrame == null || rightFrame == null) { // If any input has not been rewritten, do not rewrite this rel. return null; } if (rightFrame.corDefOutputs.isEmpty()) { return null; } assert rel.getRequiredColumns().cardinality() <= rightFrame.corDefOutputs.keySet().size(); // Change correlator rel into a join. // Join all the correlated variables produced by this correlator rel // with the values generated and propagated from the right input final SortedMap<CorDef, Integer> corDefOutputs = new TreeMap<>(rightFrame.corDefOutputs); final List<RexNode> conditions = new ArrayList<>(); final List<RelDataTypeField> newLeftOutput = leftFrame.r.getRowType().getFieldList(); int newLeftFieldCount = newLeftOutput.size(); final List<RelDataTypeField> newRightOutput = rightFrame.r.getRowType().getFieldList(); for (Map.Entry<CorDef, Integer> rightOutput : new ArrayList<>(corDefOutputs.entrySet())) { final CorDef corDef = rightOutput.getKey(); if (!corDef.corr.equals(rel.getCorrelationId())) { continue; } final int newLeftPos = leftFrame.oldToNewOutputs.get(corDef.field); final int newRightPos = rightOutput.getValue(); conditions.add( relBuilder.call(SqlStdOperatorTable.EQUALS, RexInputRef.of(newLeftPos, newLeftOutput), new RexInputRef(newLeftFieldCount + newRightPos, newRightOutput.get(newRightPos).getType()))); // remove this corVar from output position mapping corDefOutputs.remove(corDef); } // Update the output position for the corVars: only pass on the cor // vars that are not used in the join key. for (CorDef corDef : corDefOutputs.keySet()) { int newPos = corDefOutputs.get(corDef) + newLeftFieldCount; corDefOutputs.put(corDef, newPos); } // then add any corVar from the left input. Do not need to change // output positions. corDefOutputs.putAll(leftFrame.corDefOutputs); // Create the mapping between the output of the old correlation rel // and the new join rel final Map<Integer, Integer> mapOldToNewOutputs = new HashMap<>(); int oldLeftFieldCount = oldLeft.getRowType().getFieldCount(); int oldRightFieldCount = oldRight.getRowType().getFieldCount(); //noinspection AssertWithSideEffects assert rel.getRowType().getFieldCount() == oldLeftFieldCount + oldRightFieldCount; // Left input positions are not changed. mapOldToNewOutputs.putAll(leftFrame.oldToNewOutputs); // Right input positions are shifted by newLeftFieldCount. for (int i = 0; i < oldRightFieldCount; i++) { mapOldToNewOutputs.put(i + oldLeftFieldCount, rightFrame.oldToNewOutputs.get(i) + newLeftFieldCount); } final RexNode condition = RexUtil.composeConjunction(relBuilder.getRexBuilder(), conditions); RelNode newJoin = relBuilder.push(leftFrame.r).push(rightFrame.r) .join(rel.getJoinType(), condition, ImmutableSet.of()).build(); return register(rel, newJoin, mapOldToNewOutputs, corDefOutputs); } public Frame decorrelateRel(LogicalJoin rel) { return decorrelateRel((Join) rel); } public Frame decorrelateRel(Join rel) { // For SEMI/ANTI join decorrelate it's input directly, // because the correlate variables can only be propagated from // the left side, which is not supported yet. if (!rel.getJoinType().projectsRight()) { return decorrelateRel((RelNode) rel); } // // Rewrite logic: // // 1. rewrite join condition. // 2. map output positions and produce corVars if any. // final RelNode oldLeft = rel.getInput(0); final RelNode oldRight = rel.getInput(1); final Frame leftFrame = getInvoke(oldLeft, rel); final Frame rightFrame = getInvoke(oldRight, rel); if (leftFrame == null || rightFrame == null) { // If any input has not been rewritten, do not rewrite this rel. return null; } final RelNode newJoin = relBuilder .push(leftFrame.r) .push(rightFrame.r) .join(rel.getJoinType(), decorrelateExpr(currentRel, map, cm, rel.getCondition()), ImmutableSet.of()) .build(); // Create the mapping between the output of the old correlation rel // and the new join rel Map<Integer, Integer> mapOldToNewOutputs = new HashMap<>(); int oldLeftFieldCount = oldLeft.getRowType().getFieldCount(); int newLeftFieldCount = leftFrame.r.getRowType().getFieldCount(); int oldRightFieldCount = oldRight.getRowType().getFieldCount(); //noinspection AssertWithSideEffects assert rel.getRowType().getFieldCount() == oldLeftFieldCount + oldRightFieldCount; // Left input positions are not changed. mapOldToNewOutputs.putAll(leftFrame.oldToNewOutputs); // Right input positions are shifted by newLeftFieldCount. for (int i = 0; i < oldRightFieldCount; i++) { mapOldToNewOutputs.put(i + oldLeftFieldCount, rightFrame.oldToNewOutputs.get(i) + newLeftFieldCount); } final SortedMap<CorDef, Integer> corDefOutputs = new TreeMap<>(leftFrame.corDefOutputs); // Right input positions are shifted by newLeftFieldCount. for (Map.Entry<CorDef, Integer> entry : rightFrame.corDefOutputs.entrySet()) { corDefOutputs.put(entry.getKey(), entry.getValue() + newLeftFieldCount); } return register(rel, newJoin, mapOldToNewOutputs, corDefOutputs); } private static RexInputRef getNewForOldInputRef(RelNode currentRel, Map<RelNode, Frame> map, RexInputRef oldInputRef) { assert currentRel != null; int oldOrdinal = oldInputRef.getIndex(); int newOrdinal = 0; // determine which input rel oldOrdinal references, and adjust // oldOrdinal to be relative to that input rel RelNode oldInput = null; for (RelNode oldInput0 : currentRel.getInputs()) { RelDataType oldInputType = oldInput0.getRowType(); int n = oldInputType.getFieldCount(); if (oldOrdinal < n) { oldInput = oldInput0; break; } RelNode newInput = map.get(oldInput0).r; newOrdinal += newInput.getRowType().getFieldCount(); oldOrdinal -= n; } assert oldInput != null; final Frame frame = map.get(oldInput); assert frame != null; // now oldOrdinal is relative to oldInput int oldLocalOrdinal = oldOrdinal; // figure out the newLocalOrdinal, relative to the newInput. int newLocalOrdinal = oldLocalOrdinal; if (!frame.oldToNewOutputs.isEmpty()) { newLocalOrdinal = frame.oldToNewOutputs.get(oldLocalOrdinal); } newOrdinal += newLocalOrdinal; return new RexInputRef(newOrdinal, frame.r.getRowType().getFieldList().get(newLocalOrdinal).getType()); } /** * Pulls project above the join from its RHS input. Enforces nullability * for join output. * * @param join Join * @param project Original project as the right-hand input of the join * @param nullIndicatorPos Position of null indicator * @return the subtree with the new Project at the root */ private RelNode projectJoinOutputWithNullability( Join join, Project project, int nullIndicatorPos) { final RelDataTypeFactory typeFactory = join.getCluster().getTypeFactory(); final RelNode left = join.getLeft(); final JoinRelType joinType = join.getJoinType(); RexInputRef nullIndicator = new RexInputRef( nullIndicatorPos, typeFactory.createTypeWithNullability( join.getRowType().getFieldList().get(nullIndicatorPos) .getType(), true)); // now create the new project List<Pair<RexNode, String>> newProjExprs = new ArrayList<>(); // project everything from the LHS and then those from the original // projRel List<RelDataTypeField> leftInputFields = left.getRowType().getFieldList(); for (int i = 0; i < leftInputFields.size(); i++) { newProjExprs.add(RexInputRef.of2(i, leftInputFields)); } // Marked where the projected expr is coming from so that the types will // become nullable for the original projections which are now coming out // of the nullable side of the OJ. boolean projectPulledAboveLeftCorrelator = joinType.generatesNullsOnRight(); for (Pair<RexNode, String> pair : project.getNamedProjects()) { RexNode newProjExpr = removeCorrelationExpr( pair.left, projectPulledAboveLeftCorrelator, nullIndicator); newProjExprs.add(Pair.of(newProjExpr, pair.right)); } return relBuilder.push(join) .projectNamed(Pair.left(newProjExprs), Pair.right(newProjExprs), true) .build(); } /** * Pulls a {@link Project} above a {@link Correlate} from its RHS input. * Enforces nullability for join output. * * @param correlate Correlate * @param project the original project as the RHS input of the join * @param isCount Positions which are calls to the <code>COUNT</code> * aggregation function * @return the subtree with the new Project at the root */ private RelNode aggregateCorrelatorOutput( Correlate correlate, Project project, Set<Integer> isCount) { final RelNode left = correlate.getLeft(); final JoinRelType joinType = correlate.getJoinType(); // now create the new project final List<Pair<RexNode, String>> newProjects = new ArrayList<>(); // Project everything from the LHS and then those from the original // project final List<RelDataTypeField> leftInputFields = left.getRowType().getFieldList(); for (int i = 0; i < leftInputFields.size(); i++) { newProjects.add(RexInputRef.of2(i, leftInputFields)); } // Marked where the projected expr is coming from so that the types will // become nullable for the original projections which are now coming out // of the nullable side of the OJ. boolean projectPulledAboveLeftCorrelator = joinType.generatesNullsOnRight(); for (Pair<RexNode, String> pair : project.getNamedProjects()) { RexNode newProjExpr = removeCorrelationExpr( pair.left, projectPulledAboveLeftCorrelator, isCount); newProjects.add(Pair.of(newProjExpr, pair.right)); } return relBuilder.push(correlate) .projectNamed(Pair.left(newProjects), Pair.right(newProjects), true) .build(); } /** * Checks whether the correlations in projRel and filter are related to * the correlated variables provided by corRel. * * @param correlate Correlate * @param project The original Project as the RHS input of the join * @param filter Filter * @param correlatedJoinKeys Correlated join keys * @return true if filter and proj only references corVar provided by corRel */ private boolean checkCorVars( Correlate correlate, Project project, Filter filter, List<RexFieldAccess> correlatedJoinKeys) { if (filter != null) { assert correlatedJoinKeys != null; // check that all correlated refs in the filter condition are // used in the join(as field access). Set<CorRef> corVarInFilter = Sets.newHashSet(cm.mapRefRelToCorRef.get(filter)); for (RexFieldAccess correlatedJoinKey : correlatedJoinKeys) { corVarInFilter.remove(cm.mapFieldAccessToCorRef.get(correlatedJoinKey)); } if (!corVarInFilter.isEmpty()) { return false; } // Check that the correlated variables referenced in these // comparisons do come from the Correlate. corVarInFilter.addAll(cm.mapRefRelToCorRef.get(filter)); for (CorRef corVar : corVarInFilter) { if (cm.mapCorToCorRel.get(corVar.corr) != correlate) { return false; } } } // if project has any correlated reference, make sure they are also // provided by the current correlate. They will be projected out of the LHS // of the correlate. if ((project != null) && cm.mapRefRelToCorRef.containsKey(project)) { for (CorRef corVar : cm.mapRefRelToCorRef.get(project)) { if (cm.mapCorToCorRel.get(corVar.corr) != correlate) { return false; } } } return true; } /** * Remove correlated variables from the tree at root corRel * * @param correlate Correlate */ private void removeCorVarFromTree(Correlate correlate) { if (cm.mapCorToCorRel.get(correlate.getCorrelationId()) == correlate) { cm.mapCorToCorRel.remove(correlate.getCorrelationId()); } } /** * Projects all {@code input} output fields plus the additional expressions. * * @param input Input relational expression * @param additionalExprs Additional expressions and names * @return the new Project */ private RelNode createProjectWithAdditionalExprs( RelNode input, List<Pair<RexNode, String>> additionalExprs) { final List<RelDataTypeField> fieldList = input.getRowType().getFieldList(); List<Pair<RexNode, String>> projects = new ArrayList<>(); Ord.forEach(fieldList, (field, i) -> projects.add( Pair.of(relBuilder.getRexBuilder().makeInputRef(field.getType(), i), field.getName()))); projects.addAll(additionalExprs); return relBuilder.push(input) .projectNamed(Pair.left(projects), Pair.right(projects), true) .build(); } /* Returns an immutable map with the identity [0: 0, .., count-1: count-1]. */ static Map<Integer, Integer> identityMap(int count) { ImmutableMap.Builder<Integer, Integer> builder = ImmutableMap.builder(); for (int i = 0; i < count; i++) { builder.put(i, i); } return builder.build(); } /** Registers a relational expression and the relational expression it became * after decorrelation. */ Frame register(RelNode rel, RelNode newRel, Map<Integer, Integer> oldToNewOutputs, SortedMap<CorDef, Integer> corDefOutputs) { final Frame frame = new Frame(rel, newRel, corDefOutputs, oldToNewOutputs); map.put(rel, frame); return frame; } static boolean allLessThan(Collection<Integer> integers, int limit, Litmus ret) { for (int value : integers) { if (value >= limit) { return ret.fail("out of range; value: {}, limit: {}", value, limit); } } return ret.succeed(); } private static RelNode stripHep(RelNode rel) { if (rel instanceof HepRelVertex) { HepRelVertex hepRelVertex = (HepRelVertex) rel; rel = hepRelVertex.getCurrentRel(); } return rel; } //~ Inner Classes ---------------------------------------------------------- /** Shuttle that decorrelates. */ private static class DecorrelateRexShuttle extends RexShuttle { private final RelNode currentRel; private final Map<RelNode, Frame> map; private final CorelMap cm; private DecorrelateRexShuttle(RelNode currentRel, Map<RelNode, Frame> map, CorelMap cm) { this.currentRel = Objects.requireNonNull(currentRel); this.map = Objects.requireNonNull(map); this.cm = Objects.requireNonNull(cm); } @Override public RexNode visitFieldAccess(RexFieldAccess fieldAccess) { int newInputOutputOffset = 0; for (RelNode input : currentRel.getInputs()) { final Frame frame = map.get(input); if (frame != null) { // try to find in this input rel the position of corVar final CorRef corRef = cm.mapFieldAccessToCorRef.get(fieldAccess); if (corRef != null) { Integer newInputPos = frame.corDefOutputs.get(corRef.def()); if (newInputPos != null) { // This input does produce the corVar referenced. return new RexInputRef(newInputPos + newInputOutputOffset, frame.r.getRowType().getFieldList().get(newInputPos) .getType()); } } // this input does not produce the corVar needed newInputOutputOffset += frame.r.getRowType().getFieldCount(); } else { // this input is not rewritten newInputOutputOffset += input.getRowType().getFieldCount(); } } return fieldAccess; } @Override public RexNode visitInputRef(RexInputRef inputRef) { final RexInputRef ref = getNewForOldInputRef(currentRel, map, inputRef); if (ref.getIndex() == inputRef.getIndex() && ref.getType() == inputRef.getType()) { return inputRef; // re-use old object, to prevent needless expr cloning } return ref; } } /** Shuttle that removes correlations. */ private class RemoveCorrelationRexShuttle extends RexShuttle { final RexBuilder rexBuilder; final RelDataTypeFactory typeFactory; final boolean projectPulledAboveLeftCorrelator; final RexInputRef nullIndicator; final ImmutableSet<Integer> isCount; RemoveCorrelationRexShuttle( RexBuilder rexBuilder, boolean projectPulledAboveLeftCorrelator, RexInputRef nullIndicator, Set<Integer> isCount) { this.projectPulledAboveLeftCorrelator = projectPulledAboveLeftCorrelator; this.nullIndicator = nullIndicator; // may be null this.isCount = ImmutableSet.copyOf(isCount); this.rexBuilder = rexBuilder; this.typeFactory = rexBuilder.getTypeFactory(); } private RexNode createCaseExpression( RexInputRef nullInputRef, RexLiteral lit, RexNode rexNode) { RexNode[] caseOperands = new RexNode[3]; // Construct a CASE expression to handle the null indicator. // // This also covers the case where a left correlated sub-query // projects fields from outer relation. Since LOJ cannot produce // nulls on the LHS, the projection now need to make a nullable LHS // reference using a nullability indicator. If this this indicator // is null, it means the sub-query does not produce any value. As a // result, any RHS ref by this sub-query needs to produce null value. // WHEN indicator IS NULL caseOperands[0] = rexBuilder.makeCall( SqlStdOperatorTable.IS_NULL, new RexInputRef( nullInputRef.getIndex(), typeFactory.createTypeWithNullability( nullInputRef.getType(), true))); // THEN CAST(NULL AS newInputTypeNullable) caseOperands[1] = lit == null ? rexBuilder.makeNullLiteral(rexNode.getType()) : rexBuilder.makeCast(rexNode.getType(), lit); // ELSE cast (newInput AS newInputTypeNullable) END caseOperands[2] = rexBuilder.makeCast( typeFactory.createTypeWithNullability( rexNode.getType(), true), rexNode); return rexBuilder.makeCall( SqlStdOperatorTable.CASE, caseOperands); } @Override public RexNode visitFieldAccess(RexFieldAccess fieldAccess) { if (cm.mapFieldAccessToCorRef.containsKey(fieldAccess)) { // if it is a corVar, change it to be input ref. CorRef corVar = cm.mapFieldAccessToCorRef.get(fieldAccess); // corVar offset should point to the leftInput of currentRel, // which is the Correlate. RexNode newRexNode = new RexInputRef(corVar.field, fieldAccess.getType()); if (projectPulledAboveLeftCorrelator && (nullIndicator != null)) { // need to enforce nullability by applying an additional // cast operator over the transformed expression. newRexNode = createCaseExpression(nullIndicator, null, newRexNode); } return newRexNode; } return fieldAccess; } @Override public RexNode visitInputRef(RexInputRef inputRef) { if (currentRel instanceof Correlate) { // if this rel references corVar // and now it needs to be rewritten // it must have been pulled above the Correlate // replace the input ref to account for the LHS of the // Correlate final int leftInputFieldCount = ((Correlate) currentRel).getLeft().getRowType() .getFieldCount(); RelDataType newType = inputRef.getType(); if (projectPulledAboveLeftCorrelator) { newType = typeFactory.createTypeWithNullability(newType, true); } int pos = inputRef.getIndex(); RexInputRef newInputRef = new RexInputRef(leftInputFieldCount + pos, newType); if ((isCount != null) && isCount.contains(pos)) { return createCaseExpression( newInputRef, rexBuilder.makeExactLiteral(BigDecimal.ZERO), newInputRef); } else { return newInputRef; } } return inputRef; } @Override public RexNode visitLiteral(RexLiteral literal) { // Use nullIndicator to decide whether to project null. // Do nothing if the literal is null. if (!RexUtil.isNull(literal) && projectPulledAboveLeftCorrelator && (nullIndicator != null)) { return createCaseExpression(nullIndicator, null, literal); } return literal; } @Override public RexNode visitCall(final RexCall call) { RexNode newCall; boolean[] update = {false}; List<RexNode> clonedOperands = visitList(call.operands, update); if (update[0]) { SqlOperator operator = call.getOperator(); boolean isSpecialCast = false; if (operator instanceof SqlFunction) { SqlFunction function = (SqlFunction) operator; if (function.getKind() == SqlKind.CAST) { if (call.operands.size() < 2) { isSpecialCast = true; } } } final RelDataType newType; if (!isSpecialCast) { // TODO: ideally this only needs to be called if the result // type will also change. However, since that requires // support from type inference rules to tell whether a rule // decides return type based on input types, for now all // operators will be recreated with new type if any operand // changed, unless the operator has "built-in" type. newType = rexBuilder.deriveReturnType(operator, clonedOperands); } else { // Use the current return type when creating a new call, for // operators with return type built into the operator // definition, and with no type inference rules, such as // cast function with less than 2 operands. // TODO: Comments in RexShuttle.visitCall() mention other // types in this category. Need to resolve those together // and preferably in the base class RexShuttle. newType = call.getType(); } newCall = rexBuilder.makeCall( newType, operator, clonedOperands); } else { newCall = call; } if (projectPulledAboveLeftCorrelator && (nullIndicator != null)) { return createCaseExpression(nullIndicator, null, newCall); } return newCall; } } /** * Rule to remove single_value rel. For cases like * * <blockquote>AggRel single_value proj/filter/agg/ join on unique LHS key * AggRel single group</blockquote> */ private final class RemoveSingleAggregateRule extends RelOptRule { RemoveSingleAggregateRule(RelBuilderFactory relBuilderFactory) { super( operand( Aggregate.class, operand( Project.class, operand(Aggregate.class, any()))), relBuilderFactory, null); } public void onMatch(RelOptRuleCall call) { Aggregate singleAggregate = call.rel(0); Project project = call.rel(1); Aggregate aggregate = call.rel(2); // check singleAggRel is single_value agg if ((!singleAggregate.getGroupSet().isEmpty()) || (singleAggregate.getAggCallList().size() != 1) || !(singleAggregate.getAggCallList().get(0).getAggregation() instanceof SqlSingleValueAggFunction)) { return; } // check projRel only projects one expression // check this project only projects one expression, i.e. scalar // sub-queries. List<RexNode> projExprs = project.getProjects(); if (projExprs.size() != 1) { return; } // check the input to project is an aggregate on the entire input if (!aggregate.getGroupSet().isEmpty()) { return; } // singleAggRel produces a nullable type, so create the new // projection that casts proj expr to a nullable type. final RelBuilder relBuilder = call.builder(); final RelDataType type = relBuilder.getTypeFactory() .createTypeWithNullability(projExprs.get(0).getType(), true); final RexNode cast = relBuilder.getRexBuilder().makeCast(type, projExprs.get(0)); relBuilder.push(aggregate) .project(cast); call.transformTo(relBuilder.build()); } } /** Planner rule that removes correlations for scalar projects. */ private final class RemoveCorrelationForScalarProjectRule extends RelOptRule { RemoveCorrelationForScalarProjectRule(RelBuilderFactory relBuilderFactory) { super( operand(Correlate.class, operand(RelNode.class, any()), operand(Aggregate.class, operand(Project.class, operand(RelNode.class, any())))), relBuilderFactory, null); } public void onMatch(RelOptRuleCall call) { final Correlate correlate = call.rel(0); final RelNode left = call.rel(1); final Aggregate aggregate = call.rel(2); final Project project = call.rel(3); RelNode right = call.rel(4); final RelOptCluster cluster = correlate.getCluster(); setCurrent(call.getPlanner().getRoot(), correlate); // Check for this pattern. // The pattern matching could be simplified if rules can be applied // during decorrelation. // // Correlate(left correlation, condition = true) // leftInput // Aggregate (groupby (0) single_value()) // Project-A (may reference corVar) // rightInput final JoinRelType joinType = correlate.getJoinType(); // corRel.getCondition was here, however Correlate was updated so it // never includes a join condition. The code was not modified for brevity. RexNode joinCond = relBuilder.literal(true); if ((joinType != JoinRelType.LEFT) || (joinCond != relBuilder.literal(true))) { return; } // check that the agg is of the following type: // doing a single_value() on the entire input if ((!aggregate.getGroupSet().isEmpty()) || (aggregate.getAggCallList().size() != 1) || !(aggregate.getAggCallList().get(0).getAggregation() instanceof SqlSingleValueAggFunction)) { return; } // check this project only projects one expression, i.e. scalar // sub-queries. if (project.getProjects().size() != 1) { return; } int nullIndicatorPos; if ((right instanceof Filter) && cm.mapRefRelToCorRef.containsKey(right)) { // rightInput has this shape: // // Filter (references corVar) // filterInput // If rightInput is a filter and contains correlated // reference, make sure the correlated keys in the filter // condition forms a unique key of the RHS. Filter filter = (Filter) right; right = filter.getInput(); assert right instanceof HepRelVertex; right = ((HepRelVertex) right).getCurrentRel(); // check filter input contains no correlation if (RelOptUtil.getVariablesUsed(right).size() > 0) { return; } // extract the correlation out of the filter // First breaking up the filter conditions into equality // comparisons between rightJoinKeys (from the original // filterInput) and correlatedJoinKeys. correlatedJoinKeys // can be expressions, while rightJoinKeys need to be input // refs. These comparisons are AND'ed together. List<RexNode> tmpRightJoinKeys = new ArrayList<>(); List<RexNode> correlatedJoinKeys = new ArrayList<>(); RelOptUtil.splitCorrelatedFilterCondition( filter, tmpRightJoinKeys, correlatedJoinKeys, false); // check that the columns referenced in these comparisons form // an unique key of the filterInput final List<RexInputRef> rightJoinKeys = new ArrayList<>(); for (RexNode key : tmpRightJoinKeys) { assert key instanceof RexInputRef; rightJoinKeys.add((RexInputRef) key); } // check that the columns referenced in rightJoinKeys form an // unique key of the filterInput if (rightJoinKeys.isEmpty()) { return; } // The join filters out the nulls. So, it's ok if there are // nulls in the join keys. final RelMetadataQuery mq = call.getMetadataQuery(); if (!RelMdUtil.areColumnsDefinitelyUniqueWhenNullsFiltered(mq, right, rightJoinKeys)) { SQL2REL_LOGGER.debug("{} are not unique keys for {}", rightJoinKeys, right); return; } RexUtil.FieldAccessFinder visitor = new RexUtil.FieldAccessFinder(); RexUtil.apply(visitor, correlatedJoinKeys, null); List<RexFieldAccess> correlatedKeyList = visitor.getFieldAccessList(); if (!checkCorVars(correlate, project, filter, correlatedKeyList)) { return; } // Change the plan to this structure. // Note that the Aggregate is removed. // // Project-A' (replace corVar to input ref from the Join) // Join (replace corVar to input ref from leftInput) // leftInput // rightInput (previously filterInput) // Change the filter condition into a join condition joinCond = removeCorrelationExpr(filter.getCondition(), false); nullIndicatorPos = left.getRowType().getFieldCount() + rightJoinKeys.get(0).getIndex(); } else if (cm.mapRefRelToCorRef.containsKey(project)) { // check filter input contains no correlation if (RelOptUtil.getVariablesUsed(right).size() > 0) { return; } if (!checkCorVars(correlate, project, null, null)) { return; } // Change the plan to this structure. // // Project-A' (replace corVar to input ref from Join) // Join (left, condition = true) // leftInput // Aggregate(groupby(0), single_value(0), s_v(1)....) // Project-B (everything from input plus literal true) // projectInput // make the new Project to provide a null indicator right = createProjectWithAdditionalExprs(right, ImmutableList.of( Pair.of(relBuilder.literal(true), "nullIndicator"))); // make the new aggRel right = RelOptUtil.createSingleValueAggRel(cluster, right); // The last field: // single_value(true) // is the nullIndicator nullIndicatorPos = left.getRowType().getFieldCount() + right.getRowType().getFieldCount() - 1; } else { return; } // make the new join rel Join join = (Join) relBuilder.push(left).push(right) .join(joinType, joinCond).build(); RelNode newProject = projectJoinOutputWithNullability(join, project, nullIndicatorPos); call.transformTo(newProject); removeCorVarFromTree(correlate); } } /** Planner rule that removes correlations for scalar aggregates. */ private final class RemoveCorrelationForScalarAggregateRule extends RelOptRule { RemoveCorrelationForScalarAggregateRule(RelBuilderFactory relBuilderFactory) { super( operand(Correlate.class, operand(RelNode.class, any()), operand(Project.class, operandJ(Aggregate.class, null, Aggregate::isSimple, operand(Project.class, operand(RelNode.class, any()))))), relBuilderFactory, null); } public void onMatch(RelOptRuleCall call) { final Correlate correlate = call.rel(0); final RelNode left = call.rel(1); final Project aggOutputProject = call.rel(2); final Aggregate aggregate = call.rel(3); final Project aggInputProject = call.rel(4); RelNode right = call.rel(5); final RelBuilder builder = call.builder(); final RexBuilder rexBuilder = builder.getRexBuilder(); final RelOptCluster cluster = correlate.getCluster(); setCurrent(call.getPlanner().getRoot(), correlate); // check for this pattern // The pattern matching could be simplified if rules can be applied // during decorrelation, // // CorrelateRel(left correlation, condition = true) // leftInput // Project-A (a RexNode) // Aggregate (groupby (0), agg0(), agg1()...) // Project-B (references coVar) // rightInput // check aggOutputProject projects only one expression final List<RexNode> aggOutputProjects = aggOutputProject.getProjects(); if (aggOutputProjects.size() != 1) { return; } final JoinRelType joinType = correlate.getJoinType(); // corRel.getCondition was here, however Correlate was updated so it // never includes a join condition. The code was not modified for brevity. RexNode joinCond = rexBuilder.makeLiteral(true); if ((joinType != JoinRelType.LEFT) || (joinCond != rexBuilder.makeLiteral(true))) { return; } // check that the agg is on the entire input if (!aggregate.getGroupSet().isEmpty()) { return; } final List<RexNode> aggInputProjects = aggInputProject.getProjects(); final List<AggregateCall> aggCalls = aggregate.getAggCallList(); final Set<Integer> isCountStar = new HashSet<>(); // mark if agg produces count(*) which needs to reference the // nullIndicator after the transformation. int k = -1; for (AggregateCall aggCall : aggCalls) { ++k; if ((aggCall.getAggregation() instanceof SqlCountAggFunction) && (aggCall.getArgList().size() == 0)) { isCountStar.add(k); } } if ((right instanceof Filter) && cm.mapRefRelToCorRef.containsKey(right)) { // rightInput has this shape: // // Filter (references corVar) // filterInput Filter filter = (Filter) right; right = filter.getInput(); assert right instanceof HepRelVertex; right = ((HepRelVertex) right).getCurrentRel(); // check filter input contains no correlation if (RelOptUtil.getVariablesUsed(right).size() > 0) { return; } // check filter condition type First extract the correlation out // of the filter // First breaking up the filter conditions into equality // comparisons between rightJoinKeys(from the original // filterInput) and correlatedJoinKeys. correlatedJoinKeys // can only be RexFieldAccess, while rightJoinKeys can be // expressions. These comparisons are AND'ed together. List<RexNode> rightJoinKeys = new ArrayList<>(); List<RexNode> tmpCorrelatedJoinKeys = new ArrayList<>(); RelOptUtil.splitCorrelatedFilterCondition( filter, rightJoinKeys, tmpCorrelatedJoinKeys, true); // make sure the correlated reference forms a unique key check // that the columns referenced in these comparisons form an // unique key of the leftInput List<RexFieldAccess> correlatedJoinKeys = new ArrayList<>(); List<RexInputRef> correlatedInputRefJoinKeys = new ArrayList<>(); for (RexNode joinKey : tmpCorrelatedJoinKeys) { assert joinKey instanceof RexFieldAccess; correlatedJoinKeys.add((RexFieldAccess) joinKey); RexNode correlatedInputRef = removeCorrelationExpr(joinKey, false); assert correlatedInputRef instanceof RexInputRef; correlatedInputRefJoinKeys.add( (RexInputRef) correlatedInputRef); } // check that the columns referenced in rightJoinKeys form an // unique key of the filterInput if (correlatedInputRefJoinKeys.isEmpty()) { return; } // The join filters out the nulls. So, it's ok if there are // nulls in the join keys. final RelMetadataQuery mq = call.getMetadataQuery(); if (!RelMdUtil.areColumnsDefinitelyUniqueWhenNullsFiltered(mq, left, correlatedInputRefJoinKeys)) { SQL2REL_LOGGER.debug("{} are not unique keys for {}", correlatedJoinKeys, left); return; } // check corVar references are valid if (!checkCorVars(correlate, aggInputProject, filter, correlatedJoinKeys)) { return; } // Rewrite the above plan: // // Correlate(left correlation, condition = true) // leftInput // Project-A (a RexNode) // Aggregate (groupby(0), agg0(),agg1()...) // Project-B (may reference corVar) // Filter (references corVar) // rightInput (no correlated reference) // // to this plan: // // Project-A' (all gby keys + rewritten nullable ProjExpr) // Aggregate (groupby(all left input refs) // agg0(rewritten expression), // agg1()...) // Project-B' (rewritten original projected exprs) // Join(replace corVar w/ input ref from leftInput) // leftInput // rightInput // // In the case where agg is count(*) or count($corVar), it is // changed to count(nullIndicator). // Note: any non-nullable field from the RHS can be used as // the indicator however a "true" field is added to the // projection list from the RHS for simplicity to avoid // searching for non-null fields. // // Project-A' (all gby keys + rewritten nullable ProjExpr) // Aggregate (groupby(all left input refs), // count(nullIndicator), other aggs...) // Project-B' (all left input refs plus // the rewritten original projected exprs) // Join(replace corVar to input ref from leftInput) // leftInput // Project (everything from rightInput plus // the nullIndicator "true") // rightInput // // first change the filter condition into a join condition joinCond = removeCorrelationExpr(filter.getCondition(), false); } else if (cm.mapRefRelToCorRef.containsKey(aggInputProject)) { // check rightInput contains no correlation if (RelOptUtil.getVariablesUsed(right).size() > 0) { return; } // check corVar references are valid if (!checkCorVars(correlate, aggInputProject, null, null)) { return; } int nFields = left.getRowType().getFieldCount(); ImmutableBitSet allCols = ImmutableBitSet.range(nFields); // leftInput contains unique keys // i.e. each row is distinct and can group by on all the left // fields final RelMetadataQuery mq = call.getMetadataQuery(); if (!RelMdUtil.areColumnsDefinitelyUnique(mq, left, allCols)) { SQL2REL_LOGGER.debug("There are no unique keys for {}", left); return; } // // Rewrite the above plan: // // CorrelateRel(left correlation, condition = true) // leftInput // Project-A (a RexNode) // Aggregate (groupby(0), agg0(), agg1()...) // Project-B (references coVar) // rightInput (no correlated reference) // // to this plan: // // Project-A' (all gby keys + rewritten nullable ProjExpr) // Aggregate (groupby(all left input refs) // agg0(rewritten expression), // agg1()...) // Project-B' (rewritten original projected exprs) // Join (LOJ cond = true) // leftInput // rightInput // // In the case where agg is count($corVar), it is changed to // count(nullIndicator). // Note: any non-nullable field from the RHS can be used as // the indicator however a "true" field is added to the // projection list from the RHS for simplicity to avoid // searching for non-null fields. // // Project-A' (all gby keys + rewritten nullable ProjExpr) // Aggregate (groupby(all left input refs), // count(nullIndicator), other aggs...) // Project-B' (all left input refs plus // the rewritten original projected exprs) // Join (replace corVar to input ref from leftInput) // leftInput // Project (everything from rightInput plus // the nullIndicator "true") // rightInput } else { return; } RelDataType leftInputFieldType = left.getRowType(); int leftInputFieldCount = leftInputFieldType.getFieldCount(); int joinOutputProjExprCount = leftInputFieldCount + aggInputProjects.size() + 1; right = createProjectWithAdditionalExprs(right, ImmutableList.of( Pair.of(rexBuilder.makeLiteral(true), "nullIndicator"))); Join join = (Join) relBuilder.push(left).push(right) .join(joinType, joinCond, ImmutableSet.of()).build(); // To the consumer of joinOutputProjRel, nullIndicator is located // at the end int nullIndicatorPos = join.getRowType().getFieldCount() - 1; RexInputRef nullIndicator = new RexInputRef( nullIndicatorPos, cluster.getTypeFactory().createTypeWithNullability( join.getRowType().getFieldList() .get(nullIndicatorPos).getType(), true)); // first project all group-by keys plus the transformed agg input List<RexNode> joinOutputProjects = new ArrayList<>(); // LOJ Join preserves LHS types for (int i = 0; i < leftInputFieldCount; i++) { joinOutputProjects.add( rexBuilder.makeInputRef( leftInputFieldType.getFieldList().get(i).getType(), i)); } for (RexNode aggInputProjExpr : aggInputProjects) { joinOutputProjects.add( removeCorrelationExpr(aggInputProjExpr, joinType.generatesNullsOnRight(), nullIndicator)); } joinOutputProjects.add( rexBuilder.makeInputRef(join, nullIndicatorPos)); final RelNode joinOutputProject = builder.push(join) .project(joinOutputProjects) .build(); // nullIndicator is now at a different location in the output of // the join nullIndicatorPos = joinOutputProjExprCount - 1; final int groupCount = leftInputFieldCount; List<AggregateCall> newAggCalls = new ArrayList<>(); k = -1; for (AggregateCall aggCall : aggCalls) { ++k; final List<Integer> argList; if (isCountStar.contains(k)) { // this is a count(*), transform it to count(nullIndicator) // the null indicator is located at the end argList = Collections.singletonList(nullIndicatorPos); } else { argList = new ArrayList<>(); for (int aggArg : aggCall.getArgList()) { argList.add(aggArg + groupCount); } } int filterArg = aggCall.filterArg < 0 ? aggCall.filterArg : aggCall.filterArg + groupCount; newAggCalls.add( aggCall.adaptTo(joinOutputProject, argList, filterArg, aggregate.getGroupCount(), groupCount)); } ImmutableBitSet groupSet = ImmutableBitSet.range(groupCount); builder.push(joinOutputProject).aggregate(builder.groupKey(groupSet, null), newAggCalls); List<RexNode> newAggOutputProjectList = new ArrayList<>(); for (int i : groupSet) { newAggOutputProjectList.add( rexBuilder.makeInputRef(builder.peek(), i)); } RexNode newAggOutputProjects = removeCorrelationExpr(aggOutputProjects.get(0), false); newAggOutputProjectList.add( rexBuilder.makeCast( cluster.getTypeFactory().createTypeWithNullability( newAggOutputProjects.getType(), true), newAggOutputProjects)); builder.project(newAggOutputProjectList); call.transformTo(builder.build()); removeCorVarFromTree(correlate); } } // REVIEW jhyde 29-Oct-2007: This rule is non-static, depends on the state // of members in RelDecorrelator, and has side-effects in the decorrelator. // This breaks the contract of a planner rule, and the rule will not be // reusable in other planners. // REVIEW jvs 29-Oct-2007: Shouldn't it also be incorporating // the flavor attribute into the description? /** Planner rule that adjusts projects when counts are added. */ private final class AdjustProjectForCountAggregateRule extends RelOptRule { final boolean flavor; AdjustProjectForCountAggregateRule(boolean flavor, RelBuilderFactory relBuilderFactory) { super( flavor ? operand(Correlate.class, operand(RelNode.class, any()), operand(Project.class, operand(Aggregate.class, any()))) : operand(Correlate.class, operand(RelNode.class, any()), operand(Aggregate.class, any())), relBuilderFactory, null); this.flavor = flavor; } public void onMatch(RelOptRuleCall call) { final Correlate correlate = call.rel(0); final RelNode left = call.rel(1); final Project aggOutputProject; final Aggregate aggregate; if (flavor) { aggOutputProject = call.rel(2); aggregate = call.rel(3); } else { aggregate = call.rel(2); // Create identity projection final List<Pair<RexNode, String>> projects = new ArrayList<>(); final List<RelDataTypeField> fields = aggregate.getRowType().getFieldList(); for (int i = 0; i < fields.size(); i++) { projects.add(RexInputRef.of2(projects.size(), fields)); } final RelBuilder relBuilder = call.builder(); relBuilder.push(aggregate) .projectNamed(Pair.left(projects), Pair.right(projects), true); aggOutputProject = (Project) relBuilder.build(); } onMatch2(call, correlate, left, aggOutputProject, aggregate); } private void onMatch2( RelOptRuleCall call, Correlate correlate, RelNode leftInput, Project aggOutputProject, Aggregate aggregate) { if (generatedCorRels.contains(correlate)) { // This Correlate was generated by a previous invocation of // this rule. No further work to do. return; } setCurrent(call.getPlanner().getRoot(), correlate); // check for this pattern // The pattern matching could be simplified if rules can be applied // during decorrelation, // // CorrelateRel(left correlation, condition = true) // leftInput // Project-A (a RexNode) // Aggregate (groupby (0), agg0(), agg1()...) // check aggOutputProj projects only one expression List<RexNode> aggOutputProjExprs = aggOutputProject.getProjects(); if (aggOutputProjExprs.size() != 1) { return; } JoinRelType joinType = correlate.getJoinType(); // corRel.getCondition was here, however Correlate was updated so it // never includes a join condition. The code was not modified for brevity. RexNode joinCond = relBuilder.literal(true); if ((joinType != JoinRelType.LEFT) || (joinCond != relBuilder.literal(true))) { return; } // check that the agg is on the entire input if (!aggregate.getGroupSet().isEmpty()) { return; } List<AggregateCall> aggCalls = aggregate.getAggCallList(); Set<Integer> isCount = new HashSet<>(); // remember the count() positions int i = -1; for (AggregateCall aggCall : aggCalls) { ++i; if (aggCall.getAggregation() instanceof SqlCountAggFunction) { isCount.add(i); } } // now rewrite the plan to // // Project-A' (all LHS plus transformed original projections, // replacing references to count() with case statement) // Correlate(left correlation, condition = true) // leftInput // Aggregate(groupby (0), agg0(), agg1()...) // List<RexNode> requiredNodes = correlate.getRequiredColumns().asList().stream() .map(ord -> relBuilder.getRexBuilder().makeInputRef(correlate, ord)) .collect(Collectors.toList()); Correlate newCorrelate = (Correlate) relBuilder.push(leftInput) .push(aggregate).correlate(correlate.getJoinType(), correlate.getCorrelationId(), requiredNodes).build(); // remember this rel so we don't fire rule on it again // REVIEW jhyde 29-Oct-2007: rules should not save state; rule // should recognize patterns where it does or does not need to do // work generatedCorRels.add(newCorrelate); // need to update the mapCorToCorRel Update the output position // for the corVars: only pass on the corVars that are not used in // the join key. if (cm.mapCorToCorRel.get(correlate.getCorrelationId()) == correlate) { cm.mapCorToCorRel.put(correlate.getCorrelationId(), newCorrelate); } RelNode newOutput = aggregateCorrelatorOutput(newCorrelate, aggOutputProject, isCount); call.transformTo(newOutput); } } /** * A unique reference to a correlation field. * * <p>For instance, if a RelNode references emp.name multiple times, it would * result in multiple {@code CorRef} objects that differ just in * {@link CorRef#uniqueKey}. */ static class CorRef implements Comparable<CorRef> { public final int uniqueKey; public final CorrelationId corr; public final int field; CorRef(CorrelationId corr, int field, int uniqueKey) { this.corr = corr; this.field = field; this.uniqueKey = uniqueKey; } @Override public String toString() { return corr.getName() + '.' + field; } @Override public int hashCode() { return Objects.hash(uniqueKey, corr, field); } @Override public boolean equals(Object o) { return this == o || o instanceof CorRef && uniqueKey == ((CorRef) o).uniqueKey && corr == ((CorRef) o).corr && field == ((CorRef) o).field; } public int compareTo(@Nonnull CorRef o) { int c = corr.compareTo(o.corr); if (c != 0) { return c; } c = Integer.compare(field, o.field); if (c != 0) { return c; } return Integer.compare(uniqueKey, o.uniqueKey); } public CorDef def() { return new CorDef(corr, field); } } /** A correlation and a field. */ static class CorDef implements Comparable<CorDef> { public final CorrelationId corr; public final int field; CorDef(CorrelationId corr, int field) { this.corr = corr; this.field = field; } @Override public String toString() { return corr.getName() + '.' + field; } @Override public int hashCode() { return Objects.hash(corr, field); } @Override public boolean equals(Object o) { return this == o || o instanceof CorDef && corr == ((CorDef) o).corr && field == ((CorDef) o).field; } public int compareTo(@Nonnull CorDef o) { int c = corr.compareTo(o.corr); if (c != 0) { return c; } return Integer.compare(field, o.field); } } /** A map of the locations of * {@link org.apache.calcite.rel.core.Correlate} * in a tree of {@link RelNode}s. * * <p>It is used to drive the decorrelation process. * Treat it as immutable; rebuild if you modify the tree. * * <p>There are three maps:<ol> * * <li>{@link #mapRefRelToCorRef} maps a {@link RelNode} to the correlated * variables it references; * * <li>{@link #mapCorToCorRel} maps a correlated variable to the * {@link Correlate} providing it; * * <li>{@link #mapFieldAccessToCorRef} maps a rex field access to * the corVar it represents. Because typeFlattener does not clone or * modify a correlated field access this map does not need to be * updated. * * </ol> */ protected static class CorelMap { private final Multimap<RelNode, CorRef> mapRefRelToCorRef; private final SortedMap<CorrelationId, RelNode> mapCorToCorRel; private final Map<RexFieldAccess, CorRef> mapFieldAccessToCorRef; // TODO: create immutable copies of all maps private CorelMap(Multimap<RelNode, CorRef> mapRefRelToCorRef, SortedMap<CorrelationId, RelNode> mapCorToCorRel, Map<RexFieldAccess, CorRef> mapFieldAccessToCorRef) { this.mapRefRelToCorRef = mapRefRelToCorRef; this.mapCorToCorRel = mapCorToCorRel; this.mapFieldAccessToCorRef = ImmutableMap.copyOf(mapFieldAccessToCorRef); } @Override public String toString() { return "mapRefRelToCorRef=" + mapRefRelToCorRef + "\nmapCorToCorRel=" + mapCorToCorRel + "\nmapFieldAccessToCorRef=" + mapFieldAccessToCorRef + "\n"; } @Override public boolean equals(Object obj) { return obj == this || obj instanceof CorelMap && mapRefRelToCorRef.equals(((CorelMap) obj).mapRefRelToCorRef) && mapCorToCorRel.equals(((CorelMap) obj).mapCorToCorRel) && mapFieldAccessToCorRef.equals( ((CorelMap) obj).mapFieldAccessToCorRef); } @Override public int hashCode() { return Objects.hash(mapRefRelToCorRef, mapCorToCorRel, mapFieldAccessToCorRef); } /** Creates a CorelMap with given contents. */ public static CorelMap of( SortedSetMultimap<RelNode, CorRef> mapRefRelToCorVar, SortedMap<CorrelationId, RelNode> mapCorToCorRel, Map<RexFieldAccess, CorRef> mapFieldAccessToCorVar) { return new CorelMap(mapRefRelToCorVar, mapCorToCorRel, mapFieldAccessToCorVar); } public SortedMap<CorrelationId, RelNode> getMapCorToCorRel() { return mapCorToCorRel; } /** * Returns whether there are any correlating variables in this statement. * * @return whether there are any correlating variables */ public boolean hasCorrelation() { return !mapCorToCorRel.isEmpty(); } } /** Builds a {@link org.apache.calcite.sql2rel.RelDecorrelator.CorelMap}. */ public static class CorelMapBuilder extends RelHomogeneousShuttle { final SortedMap<CorrelationId, RelNode> mapCorToCorRel = new TreeMap<>(); final SortedSetMultimap<RelNode, CorRef> mapRefRelToCorRef = MultimapBuilder.SortedSetMultimapBuilder.hashKeys() .treeSetValues() .build(); final Map<RexFieldAccess, CorRef> mapFieldAccessToCorVar = new HashMap<>(); final Holder<Integer> offset = Holder.of(0); int corrIdGenerator = 0; /** Creates a CorelMap by iterating over a {@link RelNode} tree. */ public CorelMap build(RelNode... rels) { for (RelNode rel : rels) { stripHep(rel).accept(this); } return new CorelMap(mapRefRelToCorRef, mapCorToCorRel, mapFieldAccessToCorVar); } @Override public RelNode visit(RelNode other) { if (other instanceof Join) { Join join = (Join) other; try { stack.push(join); join.getCondition().accept(rexVisitor(join)); } finally { stack.pop(); } return visitJoin(join); } else if (other instanceof Correlate) { Correlate correlate = (Correlate) other; mapCorToCorRel.put(correlate.getCorrelationId(), correlate); return visitJoin(correlate); } else if (other instanceof Filter) { Filter filter = (Filter) other; try { stack.push(filter); filter.getCondition().accept(rexVisitor(filter)); } finally { stack.pop(); } } else if (other instanceof Project) { Project project = (Project) other; try { stack.push(project); for (RexNode node : project.getProjects()) { node.accept(rexVisitor(project)); } } finally { stack.pop(); } } return super.visit(other); } @Override protected RelNode visitChild(RelNode parent, int i, RelNode input) { return super.visitChild(parent, i, stripHep(input)); } private RelNode visitJoin(BiRel join) { final int x = offset.get(); visitChild(join, 0, join.getLeft()); offset.set(x + join.getLeft().getRowType().getFieldCount()); visitChild(join, 1, join.getRight()); offset.set(x); return join; } private RexVisitorImpl<Void> rexVisitor(final RelNode rel) { return new RexVisitorImpl<Void>(true) { @Override public Void visitFieldAccess(RexFieldAccess fieldAccess) { final RexNode ref = fieldAccess.getReferenceExpr(); if (ref instanceof RexCorrelVariable) { final RexCorrelVariable var = (RexCorrelVariable) ref; if (mapFieldAccessToCorVar.containsKey(fieldAccess)) { // for cases where different Rel nodes are referring to // same correlation var (e.g. in case of NOT IN) // avoid generating another correlation var // and record the 'rel' is using the same correlation mapRefRelToCorRef.put(rel, mapFieldAccessToCorVar.get(fieldAccess)); } else { final CorRef correlation = new CorRef(var.id, fieldAccess.getField().getIndex(), corrIdGenerator++); mapFieldAccessToCorVar.put(fieldAccess, correlation); mapRefRelToCorRef.put(rel, correlation); } } return super.visitFieldAccess(fieldAccess); } @Override public Void visitSubQuery(RexSubQuery subQuery) { subQuery.rel.accept(CorelMapBuilder.this); return super.visitSubQuery(subQuery); } }; } } /** Frame describing the relational expression after decorrelation * and where to find the output fields and correlation variables * among its output fields. */ static class Frame { final RelNode r; final ImmutableSortedMap<CorDef, Integer> corDefOutputs; final ImmutableSortedMap<Integer, Integer> oldToNewOutputs; Frame(RelNode oldRel, RelNode r, SortedMap<CorDef, Integer> corDefOutputs, Map<Integer, Integer> oldToNewOutputs) { this.r = Objects.requireNonNull(r); this.corDefOutputs = ImmutableSortedMap.copyOf(corDefOutputs); this.oldToNewOutputs = ImmutableSortedMap.copyOf(oldToNewOutputs); assert allLessThan(this.corDefOutputs.values(), r.getRowType().getFieldCount(), Litmus.THROW); assert allLessThan(this.oldToNewOutputs.keySet(), oldRel.getRowType().getFieldCount(), Litmus.THROW); assert allLessThan(this.oldToNewOutputs.values(), r.getRowType().getFieldCount(), Litmus.THROW); } } } // End RelDecorrelator.java
core/src/main/java/org/apache/calcite/sql2rel/RelDecorrelator.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.sql2rel; import org.apache.calcite.linq4j.Ord; import org.apache.calcite.linq4j.function.Function2; import org.apache.calcite.plan.Context; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptCostImpl; import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.plan.RelOptUtil; import org.apache.calcite.plan.hep.HepPlanner; import org.apache.calcite.plan.hep.HepProgram; import org.apache.calcite.plan.hep.HepRelVertex; import org.apache.calcite.rel.BiRel; import org.apache.calcite.rel.RelCollation; import org.apache.calcite.rel.RelHomogeneousShuttle; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.Aggregate; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.rel.core.Correlate; import org.apache.calcite.rel.core.CorrelationId; import org.apache.calcite.rel.core.Filter; import org.apache.calcite.rel.core.Join; import org.apache.calcite.rel.core.JoinRelType; import org.apache.calcite.rel.core.Project; import org.apache.calcite.rel.core.RelFactories; import org.apache.calcite.rel.core.Sort; import org.apache.calcite.rel.core.Values; import org.apache.calcite.rel.logical.LogicalAggregate; import org.apache.calcite.rel.logical.LogicalCorrelate; import org.apache.calcite.rel.logical.LogicalFilter; import org.apache.calcite.rel.logical.LogicalJoin; import org.apache.calcite.rel.logical.LogicalProject; import org.apache.calcite.rel.logical.LogicalSnapshot; import org.apache.calcite.rel.metadata.RelMdUtil; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.rules.FilterCorrelateRule; import org.apache.calcite.rel.rules.FilterJoinRule; import org.apache.calcite.rel.rules.FilterProjectTransposeRule; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexCorrelVariable; import org.apache.calcite.rex.RexFieldAccess; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexShuttle; import org.apache.calcite.rex.RexSubQuery; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.rex.RexVisitorImpl; import org.apache.calcite.sql.SqlExplainFormat; import org.apache.calcite.sql.SqlExplainLevel; import org.apache.calcite.sql.SqlFunction; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlCountAggFunction; import org.apache.calcite.sql.fun.SqlSingleValueAggFunction; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.tools.RelBuilder; import org.apache.calcite.tools.RelBuilderFactory; import org.apache.calcite.util.Holder; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Litmus; import org.apache.calcite.util.Pair; import org.apache.calcite.util.ReflectUtil; import org.apache.calcite.util.ReflectiveVisitor; import org.apache.calcite.util.Util; import org.apache.calcite.util.mapping.Mappings; import org.apache.calcite.util.trace.CalciteTrace; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.Multimap; import com.google.common.collect.MultimapBuilder; import com.google.common.collect.Sets; import com.google.common.collect.SortedSetMultimap; import org.slf4j.Logger; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Objects; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.stream.Collectors; import javax.annotation.Nonnull; /** * RelDecorrelator replaces all correlated expressions (corExp) in a relational * expression (RelNode) tree with non-correlated expressions that are produced * from joining the RelNode that produces the corExp with the RelNode that * references it. * * <p>TODO:</p> * <ul> * <li>replace {@code CorelMap} constructor parameter with a RelNode * <li>make {@link #currentRel} immutable (would require a fresh * RelDecorrelator for each node being decorrelated)</li> * <li>make fields of {@code CorelMap} immutable</li> * <li>make sub-class rules static, and have them create their own * de-correlator</li> * </ul> */ public class RelDecorrelator implements ReflectiveVisitor { //~ Static fields/initializers --------------------------------------------- private static final Logger SQL2REL_LOGGER = CalciteTrace.getSqlToRelTracer(); //~ Instance fields -------------------------------------------------------- private final RelBuilder relBuilder; // map built during translation protected CorelMap cm; private final ReflectUtil.MethodDispatcher<Frame> dispatcher = ReflectUtil.createMethodDispatcher(Frame.class, this, "decorrelateRel", RelNode.class); // The rel which is being visited private RelNode currentRel; private final Context context; /** Built during decorrelation, of rel to all the newly created correlated * variables in its output, and to map old input positions to new input * positions. This is from the view point of the parent rel of a new rel. */ private final Map<RelNode, Frame> map = new HashMap<>(); private final HashSet<Correlate> generatedCorRels = new HashSet<>(); //~ Constructors ----------------------------------------------------------- protected RelDecorrelator( CorelMap cm, Context context, RelBuilder relBuilder) { this.cm = cm; this.context = context; this.relBuilder = relBuilder; } //~ Methods ---------------------------------------------------------------- @Deprecated // to be removed before 2.0 public static RelNode decorrelateQuery(RelNode rootRel) { final RelBuilder relBuilder = RelFactories.LOGICAL_BUILDER.create(rootRel.getCluster(), null); return decorrelateQuery(rootRel, relBuilder); } /** Decorrelates a query. * * <p>This is the main entry point to {@code RelDecorrelator}. * * @param rootRel Root node of the query * @param relBuilder Builder for relational expressions * * @return Equivalent query with all * {@link org.apache.calcite.rel.core.Correlate} instances removed */ public static RelNode decorrelateQuery(RelNode rootRel, RelBuilder relBuilder) { final CorelMap corelMap = new CorelMapBuilder().build(rootRel); if (!corelMap.hasCorrelation()) { return rootRel; } final RelOptCluster cluster = rootRel.getCluster(); final RelDecorrelator decorrelator = new RelDecorrelator(corelMap, cluster.getPlanner().getContext(), relBuilder); RelNode newRootRel = decorrelator.removeCorrelationViaRule(rootRel); if (SQL2REL_LOGGER.isDebugEnabled()) { SQL2REL_LOGGER.debug( RelOptUtil.dumpPlan("Plan after removing Correlator", newRootRel, SqlExplainFormat.TEXT, SqlExplainLevel.EXPPLAN_ATTRIBUTES)); } if (!decorrelator.cm.mapCorToCorRel.isEmpty()) { newRootRel = decorrelator.decorrelate(newRootRel); } return newRootRel; } private void setCurrent(RelNode root, Correlate corRel) { currentRel = corRel; if (corRel != null) { cm = new CorelMapBuilder().build(Util.first(root, corRel)); } } protected RelBuilderFactory relBuilderFactory() { return RelBuilder.proto(relBuilder); } protected RelNode decorrelate(RelNode root) { // first adjust count() expression if any final RelBuilderFactory f = relBuilderFactory(); HepProgram program = HepProgram.builder() .addRuleInstance(new AdjustProjectForCountAggregateRule(false, f)) .addRuleInstance(new AdjustProjectForCountAggregateRule(true, f)) .addRuleInstance( new FilterJoinRule.FilterIntoJoinRule(true, f, FilterJoinRule.TRUE_PREDICATE)) .addRuleInstance( new FilterProjectTransposeRule(Filter.class, Project.class, true, true, f)) .addRuleInstance(new FilterCorrelateRule(f)) .build(); HepPlanner planner = createPlanner(program); planner.setRoot(root); root = planner.findBestExp(); // Perform decorrelation. map.clear(); final Frame frame = getInvoke(root, null); if (frame != null) { // has been rewritten; apply rules post-decorrelation final HepProgram program2 = HepProgram.builder() .addRuleInstance( new FilterJoinRule.FilterIntoJoinRule( true, f, FilterJoinRule.TRUE_PREDICATE)) .addRuleInstance( new FilterJoinRule.JoinConditionPushRule( f, FilterJoinRule.TRUE_PREDICATE)) .build(); final HepPlanner planner2 = createPlanner(program2); final RelNode newRoot = frame.r; planner2.setRoot(newRoot); return planner2.findBestExp(); } return root; } private Function2<RelNode, RelNode, Void> createCopyHook() { return (oldNode, newNode) -> { if (cm.mapRefRelToCorRef.containsKey(oldNode)) { cm.mapRefRelToCorRef.putAll(newNode, cm.mapRefRelToCorRef.get(oldNode)); } if (oldNode instanceof Correlate && newNode instanceof Correlate) { Correlate oldCor = (Correlate) oldNode; CorrelationId c = oldCor.getCorrelationId(); if (cm.mapCorToCorRel.get(c) == oldNode) { cm.mapCorToCorRel.put(c, newNode); } if (generatedCorRels.contains(oldNode)) { generatedCorRels.add((Correlate) newNode); } } return null; }; } private HepPlanner createPlanner(HepProgram program) { // Create a planner with a hook to update the mapping tables when a // node is copied when it is registered. return new HepPlanner( program, context, true, createCopyHook(), RelOptCostImpl.FACTORY); } public RelNode removeCorrelationViaRule(RelNode root) { final RelBuilderFactory f = relBuilderFactory(); HepProgram program = HepProgram.builder() .addRuleInstance(new RemoveSingleAggregateRule(f)) .addRuleInstance(new RemoveCorrelationForScalarProjectRule(f)) .addRuleInstance(new RemoveCorrelationForScalarAggregateRule(f)) .build(); HepPlanner planner = createPlanner(program); planner.setRoot(root); return planner.findBestExp(); } protected RexNode decorrelateExpr(RelNode currentRel, Map<RelNode, Frame> map, CorelMap cm, RexNode exp) { DecorrelateRexShuttle shuttle = new DecorrelateRexShuttle(currentRel, map, cm); return exp.accept(shuttle); } protected RexNode removeCorrelationExpr( RexNode exp, boolean projectPulledAboveLeftCorrelator) { RemoveCorrelationRexShuttle shuttle = new RemoveCorrelationRexShuttle(relBuilder.getRexBuilder(), projectPulledAboveLeftCorrelator, null, ImmutableSet.of()); return exp.accept(shuttle); } protected RexNode removeCorrelationExpr( RexNode exp, boolean projectPulledAboveLeftCorrelator, RexInputRef nullIndicator) { RemoveCorrelationRexShuttle shuttle = new RemoveCorrelationRexShuttle(relBuilder.getRexBuilder(), projectPulledAboveLeftCorrelator, nullIndicator, ImmutableSet.of()); return exp.accept(shuttle); } protected RexNode removeCorrelationExpr( RexNode exp, boolean projectPulledAboveLeftCorrelator, Set<Integer> isCount) { RemoveCorrelationRexShuttle shuttle = new RemoveCorrelationRexShuttle(relBuilder.getRexBuilder(), projectPulledAboveLeftCorrelator, null, isCount); return exp.accept(shuttle); } /** Fallback if none of the other {@code decorrelateRel} methods match. */ public Frame decorrelateRel(RelNode rel) { RelNode newRel = rel.copy(rel.getTraitSet(), rel.getInputs()); if (rel.getInputs().size() > 0) { List<RelNode> oldInputs = rel.getInputs(); List<RelNode> newInputs = new ArrayList<>(); for (int i = 0; i < oldInputs.size(); ++i) { final Frame frame = getInvoke(oldInputs.get(i), rel); if (frame == null || !frame.corDefOutputs.isEmpty()) { // if input is not rewritten, or if it produces correlated // variables, terminate rewrite return null; } newInputs.add(frame.r); newRel.replaceInput(i, frame.r); } if (!Util.equalShallow(oldInputs, newInputs)) { newRel = rel.copy(rel.getTraitSet(), newInputs); } } // the output position should not change since there are no corVars // coming from below. return register(rel, newRel, identityMap(rel.getRowType().getFieldCount()), ImmutableSortedMap.of()); } public Frame decorrelateRel(Sort rel) { // // Rewrite logic: // // 1. change the collations field to reference the new input. // // Sort itself should not reference corVars. assert !cm.mapRefRelToCorRef.containsKey(rel); // Sort only references field positions in collations field. // The collations field in the newRel now need to refer to the // new output positions in its input. // Its output does not change the input ordering, so there's no // need to call propagateExpr. final RelNode oldInput = rel.getInput(); final Frame frame = getInvoke(oldInput, rel); if (frame == null) { // If input has not been rewritten, do not rewrite this rel. return null; } final RelNode newInput = frame.r; Mappings.TargetMapping mapping = Mappings.target(frame.oldToNewOutputs, oldInput.getRowType().getFieldCount(), newInput.getRowType().getFieldCount()); RelCollation oldCollation = rel.getCollation(); RelCollation newCollation = RexUtil.apply(mapping, oldCollation); final int offset = rel.offset == null ? -1 : RexLiteral.intValue(rel.offset); final int fetch = rel.fetch == null ? -1 : RexLiteral.intValue(rel.fetch); final RelNode newSort = relBuilder .push(newInput) .sortLimit(offset, fetch, relBuilder.fields(newCollation)) .build(); // Sort does not change input ordering return register(rel, newSort, frame.oldToNewOutputs, frame.corDefOutputs); } public Frame decorrelateRel(Values rel) { // There are no inputs, so rel does not need to be changed. return null; } public Frame decorrelateRel(LogicalAggregate rel) { return decorrelateRel((Aggregate) rel); } public Frame decorrelateRel(Aggregate rel) { // // Rewrite logic: // // 1. Permute the group by keys to the front. // 2. If the input of an aggregate produces correlated variables, // add them to the group list. // 3. Change aggCalls to reference the new project. // // Aggregate itself should not reference corVars. assert !cm.mapRefRelToCorRef.containsKey(rel); final RelNode oldInput = rel.getInput(); final Frame frame = getInvoke(oldInput, rel); if (frame == null) { // If input has not been rewritten, do not rewrite this rel. return null; } final RelNode newInput = frame.r; // aggregate outputs mapping: group keys and aggregates final Map<Integer, Integer> outputMap = new HashMap<>(); // map from newInput final Map<Integer, Integer> mapNewInputToProjOutputs = new HashMap<>(); final int oldGroupKeyCount = rel.getGroupSet().cardinality(); // Project projects the original expressions, // plus any correlated variables the input wants to pass along. final List<Pair<RexNode, String>> projects = new ArrayList<>(); List<RelDataTypeField> newInputOutput = newInput.getRowType().getFieldList(); int newPos = 0; // oldInput has the original group by keys in the front. final NavigableMap<Integer, RexLiteral> omittedConstants = new TreeMap<>(); for (int i = 0; i < oldGroupKeyCount; i++) { final RexLiteral constant = projectedLiteral(newInput, i); if (constant != null) { // Exclude constants. Aggregate({true}) occurs because Aggregate({}) // would generate 1 row even when applied to an empty table. omittedConstants.put(i, constant); continue; } // add mapping of group keys. outputMap.put(i, newPos); int newInputPos = frame.oldToNewOutputs.get(i); projects.add(RexInputRef.of2(newInputPos, newInputOutput)); mapNewInputToProjOutputs.put(newInputPos, newPos); newPos++; } final SortedMap<CorDef, Integer> corDefOutputs = new TreeMap<>(); if (!frame.corDefOutputs.isEmpty()) { // If input produces correlated variables, move them to the front, // right after any existing GROUP BY fields. // Now add the corVars from the input, starting from // position oldGroupKeyCount. for (Map.Entry<CorDef, Integer> entry : frame.corDefOutputs.entrySet()) { projects.add(RexInputRef.of2(entry.getValue(), newInputOutput)); corDefOutputs.put(entry.getKey(), newPos); mapNewInputToProjOutputs.put(entry.getValue(), newPos); newPos++; } } // add the remaining fields final int newGroupKeyCount = newPos; for (int i = 0; i < newInputOutput.size(); i++) { if (!mapNewInputToProjOutputs.containsKey(i)) { projects.add(RexInputRef.of2(i, newInputOutput)); mapNewInputToProjOutputs.put(i, newPos); newPos++; } } assert newPos == newInputOutput.size(); // This Project will be what the old input maps to, // replacing any previous mapping from old input). RelNode newProject = relBuilder.push(newInput) .projectNamed(Pair.left(projects), Pair.right(projects), true) .build(); // update mappings: // oldInput ----> newInput // // newProject // | // oldInput ----> newInput // // is transformed to // // oldInput ----> newProject // | // newInput Map<Integer, Integer> combinedMap = new HashMap<>(); for (Integer oldInputPos : frame.oldToNewOutputs.keySet()) { combinedMap.put(oldInputPos, mapNewInputToProjOutputs.get( frame.oldToNewOutputs.get(oldInputPos))); } register(oldInput, newProject, combinedMap, corDefOutputs); // now it's time to rewrite the Aggregate final ImmutableBitSet newGroupSet = ImmutableBitSet.range(newGroupKeyCount); List<AggregateCall> newAggCalls = new ArrayList<>(); List<AggregateCall> oldAggCalls = rel.getAggCallList(); ImmutableList<ImmutableBitSet> newGroupSets = null; if (rel.getGroupType() != Aggregate.Group.SIMPLE) { final ImmutableBitSet addedGroupSet = ImmutableBitSet.range(oldGroupKeyCount, newGroupKeyCount); final Iterable<ImmutableBitSet> tmpGroupSets = Iterables.transform(rel.getGroupSets(), bitSet -> bitSet.union(addedGroupSet)); newGroupSets = ImmutableBitSet.ORDERING.immutableSortedCopy(tmpGroupSets); } int oldInputOutputFieldCount = rel.getGroupSet().cardinality(); int newInputOutputFieldCount = newGroupSet.cardinality(); int i = -1; for (AggregateCall oldAggCall : oldAggCalls) { ++i; List<Integer> oldAggArgs = oldAggCall.getArgList(); List<Integer> aggArgs = new ArrayList<>(); // Adjust the Aggregate argument positions. // Note Aggregate does not change input ordering, so the input // output position mapping can be used to derive the new positions // for the argument. for (int oldPos : oldAggArgs) { aggArgs.add(combinedMap.get(oldPos)); } final int filterArg = oldAggCall.filterArg < 0 ? oldAggCall.filterArg : combinedMap.get(oldAggCall.filterArg); newAggCalls.add( oldAggCall.adaptTo(newProject, aggArgs, filterArg, oldGroupKeyCount, newGroupKeyCount)); // The old to new output position mapping will be the same as that // of newProject, plus any aggregates that the oldAgg produces. outputMap.put( oldInputOutputFieldCount + i, newInputOutputFieldCount + i); } relBuilder.push(newProject).aggregate( relBuilder.groupKey(newGroupSet, newGroupSets), newAggCalls); if (!omittedConstants.isEmpty()) { final List<RexNode> postProjects = new ArrayList<>(relBuilder.fields()); for (Map.Entry<Integer, RexLiteral> entry : omittedConstants.descendingMap().entrySet()) { int index = entry.getKey() + frame.corDefOutputs.size(); postProjects.add(index, entry.getValue()); // Shift the outputs whose index equals with or bigger than the added index // with 1 offset. shiftMapping(outputMap, index, 1); // Then add the constant key mapping. outputMap.put(entry.getKey(), index); } relBuilder.project(postProjects); } // Aggregate does not change input ordering so corVars will be // located at the same position as the input newProject. return register(rel, relBuilder.build(), outputMap, corDefOutputs); } /** * Shift the mapping to fixed offset from the {@code startIndex}. * @param mapping the original mapping * @param startIndex any output whose index equals with or bigger than the starting index * would be shift * @param offset shift offset */ private static void shiftMapping(Map<Integer, Integer> mapping, int startIndex, int offset) { for (Map.Entry<Integer, Integer> entry : mapping.entrySet()) { if (entry.getValue() >= startIndex) { mapping.put(entry.getKey(), entry.getValue() + offset); } else { mapping.put(entry.getKey(), entry.getValue()); } } } public Frame getInvoke(RelNode r, RelNode parent) { final Frame frame = dispatcher.invoke(r); if (frame != null) { map.put(r, frame); } currentRel = parent; return frame; } /** Returns a literal output field, or null if it is not literal. */ private static RexLiteral projectedLiteral(RelNode rel, int i) { if (rel instanceof Project) { final Project project = (Project) rel; final RexNode node = project.getProjects().get(i); if (node instanceof RexLiteral) { return (RexLiteral) node; } } return null; } public Frame decorrelateRel(LogicalProject rel) { return decorrelateRel((Project) rel); } public Frame decorrelateRel(Project rel) { // // Rewrite logic: // // 1. Pass along any correlated variables coming from the input. // final RelNode oldInput = rel.getInput(); Frame frame = getInvoke(oldInput, rel); if (frame == null) { // If input has not been rewritten, do not rewrite this rel. return null; } final List<RexNode> oldProjects = rel.getProjects(); final List<RelDataTypeField> relOutput = rel.getRowType().getFieldList(); // Project projects the original expressions, // plus any correlated variables the input wants to pass along. final List<Pair<RexNode, String>> projects = new ArrayList<>(); // If this Project has correlated reference, create value generator // and produce the correlated variables in the new output. if (cm.mapRefRelToCorRef.containsKey(rel)) { frame = decorrelateInputWithValueGenerator(rel, frame); } // Project projects the original expressions final Map<Integer, Integer> mapOldToNewOutputs = new HashMap<>(); int newPos; for (newPos = 0; newPos < oldProjects.size(); newPos++) { projects.add( newPos, Pair.of( decorrelateExpr(currentRel, map, cm, oldProjects.get(newPos)), relOutput.get(newPos).getName())); mapOldToNewOutputs.put(newPos, newPos); } // Project any correlated variables the input wants to pass along. final SortedMap<CorDef, Integer> corDefOutputs = new TreeMap<>(); for (Map.Entry<CorDef, Integer> entry : frame.corDefOutputs.entrySet()) { projects.add( RexInputRef.of2(entry.getValue(), frame.r.getRowType().getFieldList())); corDefOutputs.put(entry.getKey(), newPos); newPos++; } RelNode newProject = relBuilder.push(frame.r) .projectNamed(Pair.left(projects), Pair.right(projects), true) .build(); return register(rel, newProject, mapOldToNewOutputs, corDefOutputs); } /** * Create RelNode tree that produces a list of correlated variables. * * @param correlations correlated variables to generate * @param valueGenFieldOffset offset in the output that generated columns * will start * @param corDefOutputs output positions for the correlated variables * generated * @return RelNode the root of the resultant RelNode tree */ private RelNode createValueGenerator( Iterable<CorRef> correlations, int valueGenFieldOffset, SortedMap<CorDef, Integer> corDefOutputs) { final Map<RelNode, List<Integer>> mapNewInputToOutputs = new HashMap<>(); final Map<RelNode, Integer> mapNewInputToNewOffset = new HashMap<>(); // Input provides the definition of a correlated variable. // Add to map all the referenced positions (relative to each input rel). for (CorRef corVar : correlations) { final int oldCorVarOffset = corVar.field; final RelNode oldInput = getCorRel(corVar); assert oldInput != null; final Frame frame = getFrame(oldInput, true); assert frame != null; final RelNode newInput = frame.r; final List<Integer> newLocalOutputs; if (!mapNewInputToOutputs.containsKey(newInput)) { newLocalOutputs = new ArrayList<>(); } else { newLocalOutputs = mapNewInputToOutputs.get(newInput); } final int newCorVarOffset = frame.oldToNewOutputs.get(oldCorVarOffset); // Add all unique positions referenced. if (!newLocalOutputs.contains(newCorVarOffset)) { newLocalOutputs.add(newCorVarOffset); } mapNewInputToOutputs.put(newInput, newLocalOutputs); } int offset = 0; // Project only the correlated fields out of each input // and join the project together. // To make sure the plan does not change in terms of join order, // join these rels based on their occurrence in corVar list which // is sorted. final Set<RelNode> joinedInputs = new HashSet<>(); RelNode r = null; for (CorRef corVar : correlations) { final RelNode oldInput = getCorRel(corVar); assert oldInput != null; final RelNode newInput = getFrame(oldInput, true).r; assert newInput != null; if (!joinedInputs.contains(newInput)) { final List<Integer> positions = mapNewInputToOutputs.get(newInput); final List<String> fieldNames = newInput.getRowType().getFieldNames(); RelNode distinct = relBuilder.push(newInput) .project(relBuilder.fields(positions)) .distinct() .build(); RelOptCluster cluster = distinct.getCluster(); joinedInputs.add(newInput); mapNewInputToNewOffset.put(newInput, offset); offset += distinct.getRowType().getFieldCount(); if (r == null) { r = distinct; } else { r = relBuilder.push(r).push(distinct) .join(JoinRelType.INNER, cluster.getRexBuilder().makeLiteral(true)).build(); } } } // Translate the positions of correlated variables to be relative to // the join output, leaving room for valueGenFieldOffset because // valueGenerators are joined with the original left input of the rel // referencing correlated variables. for (CorRef corRef : correlations) { // The first input of a Correlate is always the rel defining // the correlated variables. final RelNode oldInput = getCorRel(corRef); assert oldInput != null; final Frame frame = getFrame(oldInput, true); final RelNode newInput = frame.r; assert newInput != null; final List<Integer> newLocalOutputs = mapNewInputToOutputs.get(newInput); final int newLocalOutput = frame.oldToNewOutputs.get(corRef.field); // newOutput is the index of the corVar in the referenced // position list plus the offset of referenced position list of // each newInput. final int newOutput = newLocalOutputs.indexOf(newLocalOutput) + mapNewInputToNewOffset.get(newInput) + valueGenFieldOffset; corDefOutputs.put(corRef.def(), newOutput); } return r; } private Frame getFrame(RelNode r, boolean safe) { final Frame frame = map.get(r); if (frame == null && safe) { return new Frame(r, r, ImmutableSortedMap.of(), identityMap(r.getRowType().getFieldCount())); } return frame; } private RelNode getCorRel(CorRef corVar) { final RelNode r = cm.mapCorToCorRel.get(corVar.corr); return r.getInput(0); } /** Adds a value generator to satisfy the correlating variables used by * a relational expression, if those variables are not already provided by * its input. */ private Frame maybeAddValueGenerator(RelNode rel, Frame frame) { final CorelMap cm1 = new CorelMapBuilder().build(frame.r, rel); if (!cm1.mapRefRelToCorRef.containsKey(rel)) { return frame; } final Collection<CorRef> needs = cm1.mapRefRelToCorRef.get(rel); final ImmutableSortedSet<CorDef> haves = frame.corDefOutputs.keySet(); if (hasAll(needs, haves)) { return frame; } return decorrelateInputWithValueGenerator(rel, frame); } /** Returns whether all of a collection of {@link CorRef}s are satisfied * by at least one of a collection of {@link CorDef}s. */ private boolean hasAll(Collection<CorRef> corRefs, Collection<CorDef> corDefs) { for (CorRef corRef : corRefs) { if (!has(corDefs, corRef)) { return false; } } return true; } /** Returns whether a {@link CorrelationId} is satisfied by at least one of a * collection of {@link CorDef}s. */ private boolean has(Collection<CorDef> corDefs, CorRef corr) { for (CorDef corDef : corDefs) { if (corDef.corr.equals(corr.corr) && corDef.field == corr.field) { return true; } } return false; } private Frame decorrelateInputWithValueGenerator(RelNode rel, Frame frame) { // currently only handles one input assert rel.getInputs().size() == 1; RelNode oldInput = frame.r; final SortedMap<CorDef, Integer> corDefOutputs = new TreeMap<>(frame.corDefOutputs); final Collection<CorRef> corVarList = cm.mapRefRelToCorRef.get(rel); // Try to populate correlation variables using local fields. // This means that we do not need a value generator. if (rel instanceof Filter) { SortedMap<CorDef, Integer> map = new TreeMap<>(); List<RexNode> projects = new ArrayList<>(); for (CorRef correlation : corVarList) { final CorDef def = correlation.def(); if (corDefOutputs.containsKey(def) || map.containsKey(def)) { continue; } try { findCorrelationEquivalent(correlation, ((Filter) rel).getCondition()); } catch (Util.FoundOne e) { if (e.getNode() instanceof RexInputRef) { map.put(def, ((RexInputRef) e.getNode()).getIndex()); } else { map.put(def, frame.r.getRowType().getFieldCount() + projects.size()); projects.add((RexNode) e.getNode()); } } } // If all correlation variables are now satisfied, skip creating a value // generator. if (map.size() == corVarList.size()) { map.putAll(frame.corDefOutputs); final RelNode r; if (!projects.isEmpty()) { relBuilder.push(oldInput) .project(Iterables.concat(relBuilder.fields(), projects)); r = relBuilder.build(); } else { r = oldInput; } return register(rel.getInput(0), r, frame.oldToNewOutputs, map); } } int leftInputOutputCount = frame.r.getRowType().getFieldCount(); // can directly add positions into corDefOutputs since join // does not change the output ordering from the inputs. RelNode valueGen = createValueGenerator(corVarList, leftInputOutputCount, corDefOutputs); RelNode join = relBuilder.push(frame.r).push(valueGen) .join(JoinRelType.INNER, relBuilder.literal(true), ImmutableSet.of()).build(); // Join or Filter does not change the old input ordering. All // input fields from newLeftInput (i.e. the original input to the old // Filter) are in the output and in the same position. return register(rel.getInput(0), join, frame.oldToNewOutputs, corDefOutputs); } /** Finds a {@link RexInputRef} that is equivalent to a {@link CorRef}, * and if found, throws a {@link org.apache.calcite.util.Util.FoundOne}. */ private void findCorrelationEquivalent(CorRef correlation, RexNode e) throws Util.FoundOne { switch (e.getKind()) { case EQUALS: final RexCall call = (RexCall) e; final List<RexNode> operands = call.getOperands(); if (references(operands.get(0), correlation)) { throw new Util.FoundOne(operands.get(1)); } if (references(operands.get(1), correlation)) { throw new Util.FoundOne(operands.get(0)); } break; case AND: for (RexNode operand : ((RexCall) e).getOperands()) { findCorrelationEquivalent(correlation, operand); } } } private boolean references(RexNode e, CorRef correlation) { switch (e.getKind()) { case CAST: final RexNode operand = ((RexCall) e).getOperands().get(0); if (isWidening(e.getType(), operand.getType())) { return references(operand, correlation); } return false; case FIELD_ACCESS: final RexFieldAccess f = (RexFieldAccess) e; if (f.getField().getIndex() == correlation.field && f.getReferenceExpr() instanceof RexCorrelVariable) { if (((RexCorrelVariable) f.getReferenceExpr()).id == correlation.corr) { return true; } } // fall through default: return false; } } /** Returns whether one type is just a widening of another. * * <p>For example:<ul> * <li>{@code VARCHAR(10)} is a widening of {@code VARCHAR(5)}. * <li>{@code VARCHAR(10)} is a widening of {@code VARCHAR(10) NOT NULL}. * </ul> */ private boolean isWidening(RelDataType type, RelDataType type1) { return type.getSqlTypeName() == type1.getSqlTypeName() && type.getPrecision() >= type1.getPrecision(); } public Frame decorrelateRel(LogicalSnapshot rel) { if (RexUtil.containsCorrelation(rel.getPeriod())) { return null; } return decorrelateRel((RelNode) rel); } public Frame decorrelateRel(LogicalFilter rel) { return decorrelateRel((Filter) rel); } public Frame decorrelateRel(Filter rel) { // // Rewrite logic: // // 1. If a Filter references a correlated field in its filter // condition, rewrite the Filter to be // Filter // Join(cross product) // originalFilterInput // ValueGenerator(produces distinct sets of correlated variables) // and rewrite the correlated fieldAccess in the filter condition to // reference the Join output. // // 2. If Filter does not reference correlated variables, simply // rewrite the filter condition using new input. // final RelNode oldInput = rel.getInput(); Frame frame = getInvoke(oldInput, rel); if (frame == null) { // If input has not been rewritten, do not rewrite this rel. return null; } // If this Filter has correlated reference, create value generator // and produce the correlated variables in the new output. if (false) { if (cm.mapRefRelToCorRef.containsKey(rel)) { frame = decorrelateInputWithValueGenerator(rel, frame); } } else { frame = maybeAddValueGenerator(rel, frame); } final CorelMap cm2 = new CorelMapBuilder().build(rel); // Replace the filter expression to reference output of the join // Map filter to the new filter over join relBuilder.push(frame.r) .filter(decorrelateExpr(currentRel, map, cm2, rel.getCondition())); // Filter does not change the input ordering. // Filter rel does not permute the input. // All corVars produced by filter will have the same output positions in the // input rel. return register(rel, relBuilder.build(), frame.oldToNewOutputs, frame.corDefOutputs); } public Frame decorrelateRel(LogicalCorrelate rel) { return decorrelateRel((Correlate) rel); } public Frame decorrelateRel(Correlate rel) { // // Rewrite logic: // // The original left input will be joined with the new right input that // has generated correlated variables propagated up. For any generated // corVars that are not used in the join key, pass them along to be // joined later with the Correlates that produce them. // // the right input to Correlate should produce correlated variables final RelNode oldLeft = rel.getInput(0); final RelNode oldRight = rel.getInput(1); final Frame leftFrame = getInvoke(oldLeft, rel); final Frame rightFrame = getInvoke(oldRight, rel); if (leftFrame == null || rightFrame == null) { // If any input has not been rewritten, do not rewrite this rel. return null; } if (rightFrame.corDefOutputs.isEmpty()) { return null; } assert rel.getRequiredColumns().cardinality() <= rightFrame.corDefOutputs.keySet().size(); // Change correlator rel into a join. // Join all the correlated variables produced by this correlator rel // with the values generated and propagated from the right input final SortedMap<CorDef, Integer> corDefOutputs = new TreeMap<>(rightFrame.corDefOutputs); final List<RexNode> conditions = new ArrayList<>(); final List<RelDataTypeField> newLeftOutput = leftFrame.r.getRowType().getFieldList(); int newLeftFieldCount = newLeftOutput.size(); final List<RelDataTypeField> newRightOutput = rightFrame.r.getRowType().getFieldList(); for (Map.Entry<CorDef, Integer> rightOutput : new ArrayList<>(corDefOutputs.entrySet())) { final CorDef corDef = rightOutput.getKey(); if (!corDef.corr.equals(rel.getCorrelationId())) { continue; } final int newLeftPos = leftFrame.oldToNewOutputs.get(corDef.field); final int newRightPos = rightOutput.getValue(); conditions.add( relBuilder.call(SqlStdOperatorTable.EQUALS, RexInputRef.of(newLeftPos, newLeftOutput), new RexInputRef(newLeftFieldCount + newRightPos, newRightOutput.get(newRightPos).getType()))); // remove this corVar from output position mapping corDefOutputs.remove(corDef); } // Update the output position for the corVars: only pass on the cor // vars that are not used in the join key. for (CorDef corDef : corDefOutputs.keySet()) { int newPos = corDefOutputs.get(corDef) + newLeftFieldCount; corDefOutputs.put(corDef, newPos); } // then add any corVar from the left input. Do not need to change // output positions. corDefOutputs.putAll(leftFrame.corDefOutputs); // Create the mapping between the output of the old correlation rel // and the new join rel final Map<Integer, Integer> mapOldToNewOutputs = new HashMap<>(); int oldLeftFieldCount = oldLeft.getRowType().getFieldCount(); int oldRightFieldCount = oldRight.getRowType().getFieldCount(); //noinspection AssertWithSideEffects assert rel.getRowType().getFieldCount() == oldLeftFieldCount + oldRightFieldCount; // Left input positions are not changed. mapOldToNewOutputs.putAll(leftFrame.oldToNewOutputs); // Right input positions are shifted by newLeftFieldCount. for (int i = 0; i < oldRightFieldCount; i++) { mapOldToNewOutputs.put(i + oldLeftFieldCount, rightFrame.oldToNewOutputs.get(i) + newLeftFieldCount); } final RexNode condition = RexUtil.composeConjunction(relBuilder.getRexBuilder(), conditions); RelNode newJoin = relBuilder.push(leftFrame.r).push(rightFrame.r) .join(rel.getJoinType(), condition, ImmutableSet.of()).build(); return register(rel, newJoin, mapOldToNewOutputs, corDefOutputs); } public Frame decorrelateRel(LogicalJoin rel) { return decorrelateRel((Join) rel); } public Frame decorrelateRel(Join rel) { // For SEMI/ANTI join decorrelate it's input directly, // because the correlate variables can only be propagated from // the left side, which is not supported yet. if (!rel.getJoinType().projectsRight()) { return decorrelateRel((RelNode) rel); } // // Rewrite logic: // // 1. rewrite join condition. // 2. map output positions and produce corVars if any. // final RelNode oldLeft = rel.getInput(0); final RelNode oldRight = rel.getInput(1); final Frame leftFrame = getInvoke(oldLeft, rel); final Frame rightFrame = getInvoke(oldRight, rel); if (leftFrame == null || rightFrame == null) { // If any input has not been rewritten, do not rewrite this rel. return null; } final RelNode newJoin = relBuilder .push(leftFrame.r) .push(rightFrame.r) .join(rel.getJoinType(), decorrelateExpr(currentRel, map, cm, rel.getCondition()), ImmutableSet.of()) .build(); // Create the mapping between the output of the old correlation rel // and the new join rel Map<Integer, Integer> mapOldToNewOutputs = new HashMap<>(); int oldLeftFieldCount = oldLeft.getRowType().getFieldCount(); int newLeftFieldCount = leftFrame.r.getRowType().getFieldCount(); int oldRightFieldCount = oldRight.getRowType().getFieldCount(); //noinspection AssertWithSideEffects assert rel.getRowType().getFieldCount() == oldLeftFieldCount + oldRightFieldCount; // Left input positions are not changed. mapOldToNewOutputs.putAll(leftFrame.oldToNewOutputs); // Right input positions are shifted by newLeftFieldCount. for (int i = 0; i < oldRightFieldCount; i++) { mapOldToNewOutputs.put(i + oldLeftFieldCount, rightFrame.oldToNewOutputs.get(i) + newLeftFieldCount); } final SortedMap<CorDef, Integer> corDefOutputs = new TreeMap<>(leftFrame.corDefOutputs); // Right input positions are shifted by newLeftFieldCount. for (Map.Entry<CorDef, Integer> entry : rightFrame.corDefOutputs.entrySet()) { corDefOutputs.put(entry.getKey(), entry.getValue() + newLeftFieldCount); } return register(rel, newJoin, mapOldToNewOutputs, corDefOutputs); } private static RexInputRef getNewForOldInputRef(RelNode currentRel, Map<RelNode, Frame> map, RexInputRef oldInputRef) { assert currentRel != null; int oldOrdinal = oldInputRef.getIndex(); int newOrdinal = 0; // determine which input rel oldOrdinal references, and adjust // oldOrdinal to be relative to that input rel RelNode oldInput = null; for (RelNode oldInput0 : currentRel.getInputs()) { RelDataType oldInputType = oldInput0.getRowType(); int n = oldInputType.getFieldCount(); if (oldOrdinal < n) { oldInput = oldInput0; break; } RelNode newInput = map.get(oldInput0).r; newOrdinal += newInput.getRowType().getFieldCount(); oldOrdinal -= n; } assert oldInput != null; final Frame frame = map.get(oldInput); assert frame != null; // now oldOrdinal is relative to oldInput int oldLocalOrdinal = oldOrdinal; // figure out the newLocalOrdinal, relative to the newInput. int newLocalOrdinal = oldLocalOrdinal; if (!frame.oldToNewOutputs.isEmpty()) { newLocalOrdinal = frame.oldToNewOutputs.get(oldLocalOrdinal); } newOrdinal += newLocalOrdinal; return new RexInputRef(newOrdinal, frame.r.getRowType().getFieldList().get(newLocalOrdinal).getType()); } /** * Pulls project above the join from its RHS input. Enforces nullability * for join output. * * @param join Join * @param project Original project as the right-hand input of the join * @param nullIndicatorPos Position of null indicator * @return the subtree with the new Project at the root */ private RelNode projectJoinOutputWithNullability( Join join, Project project, int nullIndicatorPos) { final RelDataTypeFactory typeFactory = join.getCluster().getTypeFactory(); final RelNode left = join.getLeft(); final JoinRelType joinType = join.getJoinType(); RexInputRef nullIndicator = new RexInputRef( nullIndicatorPos, typeFactory.createTypeWithNullability( join.getRowType().getFieldList().get(nullIndicatorPos) .getType(), true)); // now create the new project List<Pair<RexNode, String>> newProjExprs = new ArrayList<>(); // project everything from the LHS and then those from the original // projRel List<RelDataTypeField> leftInputFields = left.getRowType().getFieldList(); for (int i = 0; i < leftInputFields.size(); i++) { newProjExprs.add(RexInputRef.of2(i, leftInputFields)); } // Marked where the projected expr is coming from so that the types will // become nullable for the original projections which are now coming out // of the nullable side of the OJ. boolean projectPulledAboveLeftCorrelator = joinType.generatesNullsOnRight(); for (Pair<RexNode, String> pair : project.getNamedProjects()) { RexNode newProjExpr = removeCorrelationExpr( pair.left, projectPulledAboveLeftCorrelator, nullIndicator); newProjExprs.add(Pair.of(newProjExpr, pair.right)); } return relBuilder.push(join) .projectNamed(Pair.left(newProjExprs), Pair.right(newProjExprs), true) .build(); } /** * Pulls a {@link Project} above a {@link Correlate} from its RHS input. * Enforces nullability for join output. * * @param correlate Correlate * @param project the original project as the RHS input of the join * @param isCount Positions which are calls to the <code>COUNT</code> * aggregation function * @return the subtree with the new Project at the root */ private RelNode aggregateCorrelatorOutput( Correlate correlate, Project project, Set<Integer> isCount) { final RelNode left = correlate.getLeft(); final JoinRelType joinType = correlate.getJoinType(); // now create the new project final List<Pair<RexNode, String>> newProjects = new ArrayList<>(); // Project everything from the LHS and then those from the original // project final List<RelDataTypeField> leftInputFields = left.getRowType().getFieldList(); for (int i = 0; i < leftInputFields.size(); i++) { newProjects.add(RexInputRef.of2(i, leftInputFields)); } // Marked where the projected expr is coming from so that the types will // become nullable for the original projections which are now coming out // of the nullable side of the OJ. boolean projectPulledAboveLeftCorrelator = joinType.generatesNullsOnRight(); for (Pair<RexNode, String> pair : project.getNamedProjects()) { RexNode newProjExpr = removeCorrelationExpr( pair.left, projectPulledAboveLeftCorrelator, isCount); newProjects.add(Pair.of(newProjExpr, pair.right)); } return relBuilder.push(correlate) .projectNamed(Pair.left(newProjects), Pair.right(newProjects), true) .build(); } /** * Checks whether the correlations in projRel and filter are related to * the correlated variables provided by corRel. * * @param correlate Correlate * @param project The original Project as the RHS input of the join * @param filter Filter * @param correlatedJoinKeys Correlated join keys * @return true if filter and proj only references corVar provided by corRel */ private boolean checkCorVars( Correlate correlate, Project project, Filter filter, List<RexFieldAccess> correlatedJoinKeys) { if (filter != null) { assert correlatedJoinKeys != null; // check that all correlated refs in the filter condition are // used in the join(as field access). Set<CorRef> corVarInFilter = Sets.newHashSet(cm.mapRefRelToCorRef.get(filter)); for (RexFieldAccess correlatedJoinKey : correlatedJoinKeys) { corVarInFilter.remove(cm.mapFieldAccessToCorRef.get(correlatedJoinKey)); } if (!corVarInFilter.isEmpty()) { return false; } // Check that the correlated variables referenced in these // comparisons do come from the Correlate. corVarInFilter.addAll(cm.mapRefRelToCorRef.get(filter)); for (CorRef corVar : corVarInFilter) { if (cm.mapCorToCorRel.get(corVar.corr) != correlate) { return false; } } } // if project has any correlated reference, make sure they are also // provided by the current correlate. They will be projected out of the LHS // of the correlate. if ((project != null) && cm.mapRefRelToCorRef.containsKey(project)) { for (CorRef corVar : cm.mapRefRelToCorRef.get(project)) { if (cm.mapCorToCorRel.get(corVar.corr) != correlate) { return false; } } } return true; } /** * Remove correlated variables from the tree at root corRel * * @param correlate Correlate */ private void removeCorVarFromTree(Correlate correlate) { if (cm.mapCorToCorRel.get(correlate.getCorrelationId()) == correlate) { cm.mapCorToCorRel.remove(correlate.getCorrelationId()); } } /** * Projects all {@code input} output fields plus the additional expressions. * * @param input Input relational expression * @param additionalExprs Additional expressions and names * @return the new Project */ private RelNode createProjectWithAdditionalExprs( RelNode input, List<Pair<RexNode, String>> additionalExprs) { final List<RelDataTypeField> fieldList = input.getRowType().getFieldList(); List<Pair<RexNode, String>> projects = new ArrayList<>(); Ord.forEach(fieldList, (field, i) -> projects.add( Pair.of(relBuilder.getRexBuilder().makeInputRef(field.getType(), i), field.getName()))); projects.addAll(additionalExprs); return relBuilder.push(input) .projectNamed(Pair.left(projects), Pair.right(projects), true) .build(); } /* Returns an immutable map with the identity [0: 0, .., count-1: count-1]. */ static Map<Integer, Integer> identityMap(int count) { ImmutableMap.Builder<Integer, Integer> builder = ImmutableMap.builder(); for (int i = 0; i < count; i++) { builder.put(i, i); } return builder.build(); } /** Registers a relational expression and the relational expression it became * after decorrelation. */ Frame register(RelNode rel, RelNode newRel, Map<Integer, Integer> oldToNewOutputs, SortedMap<CorDef, Integer> corDefOutputs) { final Frame frame = new Frame(rel, newRel, corDefOutputs, oldToNewOutputs); map.put(rel, frame); return frame; } static boolean allLessThan(Collection<Integer> integers, int limit, Litmus ret) { for (int value : integers) { if (value >= limit) { return ret.fail("out of range; value: {}, limit: {}", value, limit); } } return ret.succeed(); } private static RelNode stripHep(RelNode rel) { if (rel instanceof HepRelVertex) { HepRelVertex hepRelVertex = (HepRelVertex) rel; rel = hepRelVertex.getCurrentRel(); } return rel; } //~ Inner Classes ---------------------------------------------------------- /** Shuttle that decorrelates. */ private static class DecorrelateRexShuttle extends RexShuttle { private final RelNode currentRel; private final Map<RelNode, Frame> map; private final CorelMap cm; private DecorrelateRexShuttle(RelNode currentRel, Map<RelNode, Frame> map, CorelMap cm) { this.currentRel = Objects.requireNonNull(currentRel); this.map = Objects.requireNonNull(map); this.cm = Objects.requireNonNull(cm); } @Override public RexNode visitFieldAccess(RexFieldAccess fieldAccess) { int newInputOutputOffset = 0; for (RelNode input : currentRel.getInputs()) { final Frame frame = map.get(input); if (frame != null) { // try to find in this input rel the position of corVar final CorRef corRef = cm.mapFieldAccessToCorRef.get(fieldAccess); if (corRef != null) { Integer newInputPos = frame.corDefOutputs.get(corRef.def()); if (newInputPos != null) { // This input does produce the corVar referenced. return new RexInputRef(newInputPos + newInputOutputOffset, frame.r.getRowType().getFieldList().get(newInputPos) .getType()); } } // this input does not produce the corVar needed newInputOutputOffset += frame.r.getRowType().getFieldCount(); } else { // this input is not rewritten newInputOutputOffset += input.getRowType().getFieldCount(); } } return fieldAccess; } @Override public RexNode visitInputRef(RexInputRef inputRef) { final RexInputRef ref = getNewForOldInputRef(currentRel, map, inputRef); if (ref.getIndex() == inputRef.getIndex() && ref.getType() == inputRef.getType()) { return inputRef; // re-use old object, to prevent needless expr cloning } return ref; } } /** Shuttle that removes correlations. */ private class RemoveCorrelationRexShuttle extends RexShuttle { final RexBuilder rexBuilder; final RelDataTypeFactory typeFactory; final boolean projectPulledAboveLeftCorrelator; final RexInputRef nullIndicator; final ImmutableSet<Integer> isCount; RemoveCorrelationRexShuttle( RexBuilder rexBuilder, boolean projectPulledAboveLeftCorrelator, RexInputRef nullIndicator, Set<Integer> isCount) { this.projectPulledAboveLeftCorrelator = projectPulledAboveLeftCorrelator; this.nullIndicator = nullIndicator; // may be null this.isCount = ImmutableSet.copyOf(isCount); this.rexBuilder = rexBuilder; this.typeFactory = rexBuilder.getTypeFactory(); } private RexNode createCaseExpression( RexInputRef nullInputRef, RexLiteral lit, RexNode rexNode) { RexNode[] caseOperands = new RexNode[3]; // Construct a CASE expression to handle the null indicator. // // This also covers the case where a left correlated sub-query // projects fields from outer relation. Since LOJ cannot produce // nulls on the LHS, the projection now need to make a nullable LHS // reference using a nullability indicator. If this this indicator // is null, it means the sub-query does not produce any value. As a // result, any RHS ref by this sub-query needs to produce null value. // WHEN indicator IS NULL caseOperands[0] = rexBuilder.makeCall( SqlStdOperatorTable.IS_NULL, new RexInputRef( nullInputRef.getIndex(), typeFactory.createTypeWithNullability( nullInputRef.getType(), true))); // THEN CAST(NULL AS newInputTypeNullable) caseOperands[1] = lit == null ? rexBuilder.makeNullLiteral(rexNode.getType()) : rexBuilder.makeCast(rexNode.getType(), lit); // ELSE cast (newInput AS newInputTypeNullable) END caseOperands[2] = rexBuilder.makeCast( typeFactory.createTypeWithNullability( rexNode.getType(), true), rexNode); return rexBuilder.makeCall( SqlStdOperatorTable.CASE, caseOperands); } @Override public RexNode visitFieldAccess(RexFieldAccess fieldAccess) { if (cm.mapFieldAccessToCorRef.containsKey(fieldAccess)) { // if it is a corVar, change it to be input ref. CorRef corVar = cm.mapFieldAccessToCorRef.get(fieldAccess); // corVar offset should point to the leftInput of currentRel, // which is the Correlate. RexNode newRexNode = new RexInputRef(corVar.field, fieldAccess.getType()); if (projectPulledAboveLeftCorrelator && (nullIndicator != null)) { // need to enforce nullability by applying an additional // cast operator over the transformed expression. newRexNode = createCaseExpression(nullIndicator, null, newRexNode); } return newRexNode; } return fieldAccess; } @Override public RexNode visitInputRef(RexInputRef inputRef) { if (currentRel instanceof Correlate) { // if this rel references corVar // and now it needs to be rewritten // it must have been pulled above the Correlate // replace the input ref to account for the LHS of the // Correlate final int leftInputFieldCount = ((Correlate) currentRel).getLeft().getRowType() .getFieldCount(); RelDataType newType = inputRef.getType(); if (projectPulledAboveLeftCorrelator) { newType = typeFactory.createTypeWithNullability(newType, true); } int pos = inputRef.getIndex(); RexInputRef newInputRef = new RexInputRef(leftInputFieldCount + pos, newType); if ((isCount != null) && isCount.contains(pos)) { return createCaseExpression( newInputRef, rexBuilder.makeExactLiteral(BigDecimal.ZERO), newInputRef); } else { return newInputRef; } } return inputRef; } @Override public RexNode visitLiteral(RexLiteral literal) { // Use nullIndicator to decide whether to project null. // Do nothing if the literal is null. if (!RexUtil.isNull(literal) && projectPulledAboveLeftCorrelator && (nullIndicator != null)) { return createCaseExpression(nullIndicator, null, literal); } return literal; } @Override public RexNode visitCall(final RexCall call) { RexNode newCall; boolean[] update = {false}; List<RexNode> clonedOperands = visitList(call.operands, update); if (update[0]) { SqlOperator operator = call.getOperator(); boolean isSpecialCast = false; if (operator instanceof SqlFunction) { SqlFunction function = (SqlFunction) operator; if (function.getKind() == SqlKind.CAST) { if (call.operands.size() < 2) { isSpecialCast = true; } } } final RelDataType newType; if (!isSpecialCast) { // TODO: ideally this only needs to be called if the result // type will also change. However, since that requires // support from type inference rules to tell whether a rule // decides return type based on input types, for now all // operators will be recreated with new type if any operand // changed, unless the operator has "built-in" type. newType = rexBuilder.deriveReturnType(operator, clonedOperands); } else { // Use the current return type when creating a new call, for // operators with return type built into the operator // definition, and with no type inference rules, such as // cast function with less than 2 operands. // TODO: Comments in RexShuttle.visitCall() mention other // types in this category. Need to resolve those together // and preferably in the base class RexShuttle. newType = call.getType(); } newCall = rexBuilder.makeCall( newType, operator, clonedOperands); } else { newCall = call; } if (projectPulledAboveLeftCorrelator && (nullIndicator != null)) { return createCaseExpression(nullIndicator, null, newCall); } return newCall; } } /** * Rule to remove single_value rel. For cases like * * <blockquote>AggRel single_value proj/filter/agg/ join on unique LHS key * AggRel single group</blockquote> */ private final class RemoveSingleAggregateRule extends RelOptRule { RemoveSingleAggregateRule(RelBuilderFactory relBuilderFactory) { super( operand( Aggregate.class, operand( Project.class, operand(Aggregate.class, any()))), relBuilderFactory, null); } public void onMatch(RelOptRuleCall call) { Aggregate singleAggregate = call.rel(0); Project project = call.rel(1); Aggregate aggregate = call.rel(2); // check singleAggRel is single_value agg if ((!singleAggregate.getGroupSet().isEmpty()) || (singleAggregate.getAggCallList().size() != 1) || !(singleAggregate.getAggCallList().get(0).getAggregation() instanceof SqlSingleValueAggFunction)) { return; } // check projRel only projects one expression // check this project only projects one expression, i.e. scalar // sub-queries. List<RexNode> projExprs = project.getProjects(); if (projExprs.size() != 1) { return; } // check the input to project is an aggregate on the entire input if (!aggregate.getGroupSet().isEmpty()) { return; } // singleAggRel produces a nullable type, so create the new // projection that casts proj expr to a nullable type. final RelBuilder relBuilder = call.builder(); final RelDataType type = relBuilder.getTypeFactory() .createTypeWithNullability(projExprs.get(0).getType(), true); final RexNode cast = relBuilder.getRexBuilder().makeCast(type, projExprs.get(0)); relBuilder.push(aggregate) .project(cast); call.transformTo(relBuilder.build()); } } /** Planner rule that removes correlations for scalar projects. */ private final class RemoveCorrelationForScalarProjectRule extends RelOptRule { RemoveCorrelationForScalarProjectRule(RelBuilderFactory relBuilderFactory) { super( operand(Correlate.class, operand(RelNode.class, any()), operand(Aggregate.class, operand(Project.class, operand(RelNode.class, any())))), relBuilderFactory, null); } public void onMatch(RelOptRuleCall call) { final Correlate correlate = call.rel(0); final RelNode left = call.rel(1); final Aggregate aggregate = call.rel(2); final Project project = call.rel(3); RelNode right = call.rel(4); final RelOptCluster cluster = correlate.getCluster(); setCurrent(call.getPlanner().getRoot(), correlate); // Check for this pattern. // The pattern matching could be simplified if rules can be applied // during decorrelation. // // Correlate(left correlation, condition = true) // leftInput // Aggregate (groupby (0) single_value()) // Project-A (may reference corVar) // rightInput final JoinRelType joinType = correlate.getJoinType(); // corRel.getCondition was here, however Correlate was updated so it // never includes a join condition. The code was not modified for brevity. RexNode joinCond = relBuilder.literal(true); if ((joinType != JoinRelType.LEFT) || (joinCond != relBuilder.literal(true))) { return; } // check that the agg is of the following type: // doing a single_value() on the entire input if ((!aggregate.getGroupSet().isEmpty()) || (aggregate.getAggCallList().size() != 1) || !(aggregate.getAggCallList().get(0).getAggregation() instanceof SqlSingleValueAggFunction)) { return; } // check this project only projects one expression, i.e. scalar // sub-queries. if (project.getProjects().size() != 1) { return; } int nullIndicatorPos; if ((right instanceof Filter) && cm.mapRefRelToCorRef.containsKey(right)) { // rightInput has this shape: // // Filter (references corVar) // filterInput // If rightInput is a filter and contains correlated // reference, make sure the correlated keys in the filter // condition forms a unique key of the RHS. Filter filter = (Filter) right; right = filter.getInput(); assert right instanceof HepRelVertex; right = ((HepRelVertex) right).getCurrentRel(); // check filter input contains no correlation if (RelOptUtil.getVariablesUsed(right).size() > 0) { return; } // extract the correlation out of the filter // First breaking up the filter conditions into equality // comparisons between rightJoinKeys (from the original // filterInput) and correlatedJoinKeys. correlatedJoinKeys // can be expressions, while rightJoinKeys need to be input // refs. These comparisons are AND'ed together. List<RexNode> tmpRightJoinKeys = new ArrayList<>(); List<RexNode> correlatedJoinKeys = new ArrayList<>(); RelOptUtil.splitCorrelatedFilterCondition( filter, tmpRightJoinKeys, correlatedJoinKeys, false); // check that the columns referenced in these comparisons form // an unique key of the filterInput final List<RexInputRef> rightJoinKeys = new ArrayList<>(); for (RexNode key : tmpRightJoinKeys) { assert key instanceof RexInputRef; rightJoinKeys.add((RexInputRef) key); } // check that the columns referenced in rightJoinKeys form an // unique key of the filterInput if (rightJoinKeys.isEmpty()) { return; } // The join filters out the nulls. So, it's ok if there are // nulls in the join keys. final RelMetadataQuery mq = call.getMetadataQuery(); if (!RelMdUtil.areColumnsDefinitelyUniqueWhenNullsFiltered(mq, right, rightJoinKeys)) { SQL2REL_LOGGER.debug("{} are not unique keys for {}", rightJoinKeys, right); return; } RexUtil.FieldAccessFinder visitor = new RexUtil.FieldAccessFinder(); RexUtil.apply(visitor, correlatedJoinKeys, null); List<RexFieldAccess> correlatedKeyList = visitor.getFieldAccessList(); if (!checkCorVars(correlate, project, filter, correlatedKeyList)) { return; } // Change the plan to this structure. // Note that the Aggregate is removed. // // Project-A' (replace corVar to input ref from the Join) // Join (replace corVar to input ref from leftInput) // leftInput // rightInput (previously filterInput) // Change the filter condition into a join condition joinCond = removeCorrelationExpr(filter.getCondition(), false); nullIndicatorPos = left.getRowType().getFieldCount() + rightJoinKeys.get(0).getIndex(); } else if (cm.mapRefRelToCorRef.containsKey(project)) { // check filter input contains no correlation if (RelOptUtil.getVariablesUsed(right).size() > 0) { return; } if (!checkCorVars(correlate, project, null, null)) { return; } // Change the plan to this structure. // // Project-A' (replace corVar to input ref from Join) // Join (left, condition = true) // leftInput // Aggregate(groupby(0), single_value(0), s_v(1)....) // Project-B (everything from input plus literal true) // projectInput // make the new Project to provide a null indicator right = createProjectWithAdditionalExprs(right, ImmutableList.of( Pair.of(relBuilder.literal(true), "nullIndicator"))); // make the new aggRel right = RelOptUtil.createSingleValueAggRel(cluster, right); // The last field: // single_value(true) // is the nullIndicator nullIndicatorPos = left.getRowType().getFieldCount() + right.getRowType().getFieldCount() - 1; } else { return; } // make the new join rel Join join = (Join) relBuilder.push(left).push(right) .join(joinType, joinCond).build(); RelNode newProject = projectJoinOutputWithNullability(join, project, nullIndicatorPos); call.transformTo(newProject); removeCorVarFromTree(correlate); } } /** Planner rule that removes correlations for scalar aggregates. */ private final class RemoveCorrelationForScalarAggregateRule extends RelOptRule { RemoveCorrelationForScalarAggregateRule(RelBuilderFactory relBuilderFactory) { super( operand(Correlate.class, operand(RelNode.class, any()), operand(Project.class, operandJ(Aggregate.class, null, Aggregate::isSimple, operand(Project.class, operand(RelNode.class, any()))))), relBuilderFactory, null); } public void onMatch(RelOptRuleCall call) { final Correlate correlate = call.rel(0); final RelNode left = call.rel(1); final Project aggOutputProject = call.rel(2); final Aggregate aggregate = call.rel(3); final Project aggInputProject = call.rel(4); RelNode right = call.rel(5); final RelBuilder builder = call.builder(); final RexBuilder rexBuilder = builder.getRexBuilder(); final RelOptCluster cluster = correlate.getCluster(); setCurrent(call.getPlanner().getRoot(), correlate); // check for this pattern // The pattern matching could be simplified if rules can be applied // during decorrelation, // // CorrelateRel(left correlation, condition = true) // leftInput // Project-A (a RexNode) // Aggregate (groupby (0), agg0(), agg1()...) // Project-B (references coVar) // rightInput // check aggOutputProject projects only one expression final List<RexNode> aggOutputProjects = aggOutputProject.getProjects(); if (aggOutputProjects.size() != 1) { return; } final JoinRelType joinType = correlate.getJoinType(); // corRel.getCondition was here, however Correlate was updated so it // never includes a join condition. The code was not modified for brevity. RexNode joinCond = rexBuilder.makeLiteral(true); if ((joinType != JoinRelType.LEFT) || (joinCond != rexBuilder.makeLiteral(true))) { return; } // check that the agg is on the entire input if (!aggregate.getGroupSet().isEmpty()) { return; } final List<RexNode> aggInputProjects = aggInputProject.getProjects(); final List<AggregateCall> aggCalls = aggregate.getAggCallList(); final Set<Integer> isCountStar = new HashSet<>(); // mark if agg produces count(*) which needs to reference the // nullIndicator after the transformation. int k = -1; for (AggregateCall aggCall : aggCalls) { ++k; if ((aggCall.getAggregation() instanceof SqlCountAggFunction) && (aggCall.getArgList().size() == 0)) { isCountStar.add(k); } } if ((right instanceof Filter) && cm.mapRefRelToCorRef.containsKey(right)) { // rightInput has this shape: // // Filter (references corVar) // filterInput Filter filter = (Filter) right; right = filter.getInput(); assert right instanceof HepRelVertex; right = ((HepRelVertex) right).getCurrentRel(); // check filter input contains no correlation if (RelOptUtil.getVariablesUsed(right).size() > 0) { return; } // check filter condition type First extract the correlation out // of the filter // First breaking up the filter conditions into equality // comparisons between rightJoinKeys(from the original // filterInput) and correlatedJoinKeys. correlatedJoinKeys // can only be RexFieldAccess, while rightJoinKeys can be // expressions. These comparisons are AND'ed together. List<RexNode> rightJoinKeys = new ArrayList<>(); List<RexNode> tmpCorrelatedJoinKeys = new ArrayList<>(); RelOptUtil.splitCorrelatedFilterCondition( filter, rightJoinKeys, tmpCorrelatedJoinKeys, true); // make sure the correlated reference forms a unique key check // that the columns referenced in these comparisons form an // unique key of the leftInput List<RexFieldAccess> correlatedJoinKeys = new ArrayList<>(); List<RexInputRef> correlatedInputRefJoinKeys = new ArrayList<>(); for (RexNode joinKey : tmpCorrelatedJoinKeys) { assert joinKey instanceof RexFieldAccess; correlatedJoinKeys.add((RexFieldAccess) joinKey); RexNode correlatedInputRef = removeCorrelationExpr(joinKey, false); assert correlatedInputRef instanceof RexInputRef; correlatedInputRefJoinKeys.add( (RexInputRef) correlatedInputRef); } // check that the columns referenced in rightJoinKeys form an // unique key of the filterInput if (correlatedInputRefJoinKeys.isEmpty()) { return; } // The join filters out the nulls. So, it's ok if there are // nulls in the join keys. final RelMetadataQuery mq = call.getMetadataQuery(); if (!RelMdUtil.areColumnsDefinitelyUniqueWhenNullsFiltered(mq, left, correlatedInputRefJoinKeys)) { SQL2REL_LOGGER.debug("{} are not unique keys for {}", correlatedJoinKeys, left); return; } // check corVar references are valid if (!checkCorVars(correlate, aggInputProject, filter, correlatedJoinKeys)) { return; } // Rewrite the above plan: // // Correlate(left correlation, condition = true) // leftInput // Project-A (a RexNode) // Aggregate (groupby(0), agg0(),agg1()...) // Project-B (may reference corVar) // Filter (references corVar) // rightInput (no correlated reference) // // to this plan: // // Project-A' (all gby keys + rewritten nullable ProjExpr) // Aggregate (groupby(all left input refs) // agg0(rewritten expression), // agg1()...) // Project-B' (rewritten original projected exprs) // Join(replace corVar w/ input ref from leftInput) // leftInput // rightInput // // In the case where agg is count(*) or count($corVar), it is // changed to count(nullIndicator). // Note: any non-nullable field from the RHS can be used as // the indicator however a "true" field is added to the // projection list from the RHS for simplicity to avoid // searching for non-null fields. // // Project-A' (all gby keys + rewritten nullable ProjExpr) // Aggregate (groupby(all left input refs), // count(nullIndicator), other aggs...) // Project-B' (all left input refs plus // the rewritten original projected exprs) // Join(replace corVar to input ref from leftInput) // leftInput // Project (everything from rightInput plus // the nullIndicator "true") // rightInput // // first change the filter condition into a join condition joinCond = removeCorrelationExpr(filter.getCondition(), false); } else if (cm.mapRefRelToCorRef.containsKey(aggInputProject)) { // check rightInput contains no correlation if (RelOptUtil.getVariablesUsed(right).size() > 0) { return; } // check corVar references are valid if (!checkCorVars(correlate, aggInputProject, null, null)) { return; } int nFields = left.getRowType().getFieldCount(); ImmutableBitSet allCols = ImmutableBitSet.range(nFields); // leftInput contains unique keys // i.e. each row is distinct and can group by on all the left // fields final RelMetadataQuery mq = call.getMetadataQuery(); if (!RelMdUtil.areColumnsDefinitelyUnique(mq, left, allCols)) { SQL2REL_LOGGER.debug("There are no unique keys for {}", left); return; } // // Rewrite the above plan: // // CorrelateRel(left correlation, condition = true) // leftInput // Project-A (a RexNode) // Aggregate (groupby(0), agg0(), agg1()...) // Project-B (references coVar) // rightInput (no correlated reference) // // to this plan: // // Project-A' (all gby keys + rewritten nullable ProjExpr) // Aggregate (groupby(all left input refs) // agg0(rewritten expression), // agg1()...) // Project-B' (rewritten original projected exprs) // Join (LOJ cond = true) // leftInput // rightInput // // In the case where agg is count($corVar), it is changed to // count(nullIndicator). // Note: any non-nullable field from the RHS can be used as // the indicator however a "true" field is added to the // projection list from the RHS for simplicity to avoid // searching for non-null fields. // // Project-A' (all gby keys + rewritten nullable ProjExpr) // Aggregate (groupby(all left input refs), // count(nullIndicator), other aggs...) // Project-B' (all left input refs plus // the rewritten original projected exprs) // Join (replace corVar to input ref from leftInput) // leftInput // Project (everything from rightInput plus // the nullIndicator "true") // rightInput } else { return; } RelDataType leftInputFieldType = left.getRowType(); int leftInputFieldCount = leftInputFieldType.getFieldCount(); int joinOutputProjExprCount = leftInputFieldCount + aggInputProjects.size() + 1; right = createProjectWithAdditionalExprs(right, ImmutableList.of( Pair.of(rexBuilder.makeLiteral(true), "nullIndicator"))); Join join = (Join) relBuilder.push(left).push(right) .join(joinType, joinCond, ImmutableSet.of()).build(); // To the consumer of joinOutputProjRel, nullIndicator is located // at the end int nullIndicatorPos = join.getRowType().getFieldCount() - 1; RexInputRef nullIndicator = new RexInputRef( nullIndicatorPos, cluster.getTypeFactory().createTypeWithNullability( join.getRowType().getFieldList() .get(nullIndicatorPos).getType(), true)); // first project all group-by keys plus the transformed agg input List<RexNode> joinOutputProjects = new ArrayList<>(); // LOJ Join preserves LHS types for (int i = 0; i < leftInputFieldCount; i++) { joinOutputProjects.add( rexBuilder.makeInputRef( leftInputFieldType.getFieldList().get(i).getType(), i)); } for (RexNode aggInputProjExpr : aggInputProjects) { joinOutputProjects.add( removeCorrelationExpr(aggInputProjExpr, joinType.generatesNullsOnRight(), nullIndicator)); } joinOutputProjects.add( rexBuilder.makeInputRef(join, nullIndicatorPos)); final RelNode joinOutputProject = builder.push(join) .project(joinOutputProjects) .build(); // nullIndicator is now at a different location in the output of // the join nullIndicatorPos = joinOutputProjExprCount - 1; final int groupCount = leftInputFieldCount; List<AggregateCall> newAggCalls = new ArrayList<>(); k = -1; for (AggregateCall aggCall : aggCalls) { ++k; final List<Integer> argList; if (isCountStar.contains(k)) { // this is a count(*), transform it to count(nullIndicator) // the null indicator is located at the end argList = Collections.singletonList(nullIndicatorPos); } else { argList = new ArrayList<>(); for (int aggArg : aggCall.getArgList()) { argList.add(aggArg + groupCount); } } int filterArg = aggCall.filterArg < 0 ? aggCall.filterArg : aggCall.filterArg + groupCount; newAggCalls.add( aggCall.adaptTo(joinOutputProject, argList, filterArg, aggregate.getGroupCount(), groupCount)); } ImmutableBitSet groupSet = ImmutableBitSet.range(groupCount); builder.push(joinOutputProject).aggregate(builder.groupKey(groupSet, null), newAggCalls); List<RexNode> newAggOutputProjectList = new ArrayList<>(); for (int i : groupSet) { newAggOutputProjectList.add( rexBuilder.makeInputRef(builder.peek(), i)); } RexNode newAggOutputProjects = removeCorrelationExpr(aggOutputProjects.get(0), false); newAggOutputProjectList.add( rexBuilder.makeCast( cluster.getTypeFactory().createTypeWithNullability( newAggOutputProjects.getType(), true), newAggOutputProjects)); builder.project(newAggOutputProjectList); call.transformTo(builder.build()); removeCorVarFromTree(correlate); } } // REVIEW jhyde 29-Oct-2007: This rule is non-static, depends on the state // of members in RelDecorrelator, and has side-effects in the decorrelator. // This breaks the contract of a planner rule, and the rule will not be // reusable in other planners. // REVIEW jvs 29-Oct-2007: Shouldn't it also be incorporating // the flavor attribute into the description? /** Planner rule that adjusts projects when counts are added. */ private final class AdjustProjectForCountAggregateRule extends RelOptRule { final boolean flavor; AdjustProjectForCountAggregateRule(boolean flavor, RelBuilderFactory relBuilderFactory) { super( flavor ? operand(Correlate.class, operand(RelNode.class, any()), operand(Project.class, operand(Aggregate.class, any()))) : operand(Correlate.class, operand(RelNode.class, any()), operand(Aggregate.class, any())), relBuilderFactory, null); this.flavor = flavor; } public void onMatch(RelOptRuleCall call) { final Correlate correlate = call.rel(0); final RelNode left = call.rel(1); final Project aggOutputProject; final Aggregate aggregate; if (flavor) { aggOutputProject = call.rel(2); aggregate = call.rel(3); } else { aggregate = call.rel(2); // Create identity projection final List<Pair<RexNode, String>> projects = new ArrayList<>(); final List<RelDataTypeField> fields = aggregate.getRowType().getFieldList(); for (int i = 0; i < fields.size(); i++) { projects.add(RexInputRef.of2(projects.size(), fields)); } final RelBuilder relBuilder = call.builder(); relBuilder.push(aggregate) .projectNamed(Pair.left(projects), Pair.right(projects), true); aggOutputProject = (Project) relBuilder.build(); } onMatch2(call, correlate, left, aggOutputProject, aggregate); } private void onMatch2( RelOptRuleCall call, Correlate correlate, RelNode leftInput, Project aggOutputProject, Aggregate aggregate) { if (generatedCorRels.contains(correlate)) { // This Correlate was generated by a previous invocation of // this rule. No further work to do. return; } setCurrent(call.getPlanner().getRoot(), correlate); // check for this pattern // The pattern matching could be simplified if rules can be applied // during decorrelation, // // CorrelateRel(left correlation, condition = true) // leftInput // Project-A (a RexNode) // Aggregate (groupby (0), agg0(), agg1()...) // check aggOutputProj projects only one expression List<RexNode> aggOutputProjExprs = aggOutputProject.getProjects(); if (aggOutputProjExprs.size() != 1) { return; } JoinRelType joinType = correlate.getJoinType(); // corRel.getCondition was here, however Correlate was updated so it // never includes a join condition. The code was not modified for brevity. RexNode joinCond = relBuilder.literal(true); if ((joinType != JoinRelType.LEFT) || (joinCond != relBuilder.literal(true))) { return; } // check that the agg is on the entire input if (!aggregate.getGroupSet().isEmpty()) { return; } List<AggregateCall> aggCalls = aggregate.getAggCallList(); Set<Integer> isCount = new HashSet<>(); // remember the count() positions int i = -1; for (AggregateCall aggCall : aggCalls) { ++i; if (aggCall.getAggregation() instanceof SqlCountAggFunction) { isCount.add(i); } } // now rewrite the plan to // // Project-A' (all LHS plus transformed original projections, // replacing references to count() with case statement) // Correlate(left correlation, condition = true) // leftInput // Aggregate(groupby (0), agg0(), agg1()...) // List<RexNode> requiredNodes = correlate.getRequiredColumns().asList().stream() .map(ord -> relBuilder.getRexBuilder().makeInputRef(correlate, ord)) .collect(Collectors.toList()); Correlate newCorrelate = (Correlate) relBuilder.push(leftInput) .push(aggregate).correlate(correlate.getJoinType(), correlate.getCorrelationId(), requiredNodes).build(); // remember this rel so we don't fire rule on it again // REVIEW jhyde 29-Oct-2007: rules should not save state; rule // should recognize patterns where it does or does not need to do // work generatedCorRels.add(newCorrelate); // need to update the mapCorToCorRel Update the output position // for the corVars: only pass on the corVars that are not used in // the join key. if (cm.mapCorToCorRel.get(correlate.getCorrelationId()) == correlate) { cm.mapCorToCorRel.put(correlate.getCorrelationId(), newCorrelate); } RelNode newOutput = aggregateCorrelatorOutput(newCorrelate, aggOutputProject, isCount); call.transformTo(newOutput); } } /** * A unique reference to a correlation field. * * <p>For instance, if a RelNode references emp.name multiple times, it would * result in multiple {@code CorRef} objects that differ just in * {@link CorRef#uniqueKey}. */ static class CorRef implements Comparable<CorRef> { public final int uniqueKey; public final CorrelationId corr; public final int field; CorRef(CorrelationId corr, int field, int uniqueKey) { this.corr = corr; this.field = field; this.uniqueKey = uniqueKey; } @Override public String toString() { return corr.getName() + '.' + field; } @Override public int hashCode() { return Objects.hash(uniqueKey, corr, field); } @Override public boolean equals(Object o) { return this == o || o instanceof CorRef && uniqueKey == ((CorRef) o).uniqueKey && corr == ((CorRef) o).corr && field == ((CorRef) o).field; } public int compareTo(@Nonnull CorRef o) { int c = corr.compareTo(o.corr); if (c != 0) { return c; } c = Integer.compare(field, o.field); if (c != 0) { return c; } return Integer.compare(uniqueKey, o.uniqueKey); } public CorDef def() { return new CorDef(corr, field); } } /** A correlation and a field. */ static class CorDef implements Comparable<CorDef> { public final CorrelationId corr; public final int field; CorDef(CorrelationId corr, int field) { this.corr = corr; this.field = field; } @Override public String toString() { return corr.getName() + '.' + field; } @Override public int hashCode() { return Objects.hash(corr, field); } @Override public boolean equals(Object o) { return this == o || o instanceof CorDef && corr == ((CorDef) o).corr && field == ((CorDef) o).field; } public int compareTo(@Nonnull CorDef o) { int c = corr.compareTo(o.corr); if (c != 0) { return c; } return Integer.compare(field, o.field); } } /** A map of the locations of * {@link org.apache.calcite.rel.core.Correlate} * in a tree of {@link RelNode}s. * * <p>It is used to drive the decorrelation process. * Treat it as immutable; rebuild if you modify the tree. * * <p>There are three maps:<ol> * * <li>{@link #mapRefRelToCorRef} maps a {@link RelNode} to the correlated * variables it references; * * <li>{@link #mapCorToCorRel} maps a correlated variable to the * {@link Correlate} providing it; * * <li>{@link #mapFieldAccessToCorRef} maps a rex field access to * the corVar it represents. Because typeFlattener does not clone or * modify a correlated field access this map does not need to be * updated. * * </ol> */ protected static class CorelMap { private final Multimap<RelNode, CorRef> mapRefRelToCorRef; private final SortedMap<CorrelationId, RelNode> mapCorToCorRel; private final Map<RexFieldAccess, CorRef> mapFieldAccessToCorRef; // TODO: create immutable copies of all maps private CorelMap(Multimap<RelNode, CorRef> mapRefRelToCorRef, SortedMap<CorrelationId, RelNode> mapCorToCorRel, Map<RexFieldAccess, CorRef> mapFieldAccessToCorRef) { this.mapRefRelToCorRef = mapRefRelToCorRef; this.mapCorToCorRel = mapCorToCorRel; this.mapFieldAccessToCorRef = ImmutableMap.copyOf(mapFieldAccessToCorRef); } @Override public String toString() { return "mapRefRelToCorRef=" + mapRefRelToCorRef + "\nmapCorToCorRel=" + mapCorToCorRel + "\nmapFieldAccessToCorRef=" + mapFieldAccessToCorRef + "\n"; } @Override public boolean equals(Object obj) { return obj == this || obj instanceof CorelMap && mapRefRelToCorRef.equals(((CorelMap) obj).mapRefRelToCorRef) && mapCorToCorRel.equals(((CorelMap) obj).mapCorToCorRel) && mapFieldAccessToCorRef.equals( ((CorelMap) obj).mapFieldAccessToCorRef); } @Override public int hashCode() { return Objects.hash(mapRefRelToCorRef, mapCorToCorRel, mapFieldAccessToCorRef); } /** Creates a CorelMap with given contents. */ public static CorelMap of( SortedSetMultimap<RelNode, CorRef> mapRefRelToCorVar, SortedMap<CorrelationId, RelNode> mapCorToCorRel, Map<RexFieldAccess, CorRef> mapFieldAccessToCorVar) { return new CorelMap(mapRefRelToCorVar, mapCorToCorRel, mapFieldAccessToCorVar); } public SortedMap<CorrelationId, RelNode> getMapCorToCorRel() { return mapCorToCorRel; } /** * Returns whether there are any correlating variables in this statement. * * @return whether there are any correlating variables */ public boolean hasCorrelation() { return !mapCorToCorRel.isEmpty(); } } /** Builds a {@link org.apache.calcite.sql2rel.RelDecorrelator.CorelMap}. */ public static class CorelMapBuilder extends RelHomogeneousShuttle { final SortedMap<CorrelationId, RelNode> mapCorToCorRel = new TreeMap<>(); final SortedSetMultimap<RelNode, CorRef> mapRefRelToCorRef = MultimapBuilder.SortedSetMultimapBuilder.hashKeys() .treeSetValues() .build(); final Map<RexFieldAccess, CorRef> mapFieldAccessToCorVar = new HashMap<>(); final Holder<Integer> offset = Holder.of(0); int corrIdGenerator = 0; /** Creates a CorelMap by iterating over a {@link RelNode} tree. */ public CorelMap build(RelNode... rels) { for (RelNode rel : rels) { stripHep(rel).accept(this); } return new CorelMap(mapRefRelToCorRef, mapCorToCorRel, mapFieldAccessToCorVar); } @Override public RelNode visit(RelNode other) { if (other instanceof Join) { Join join = (Join) other; try { stack.push(join); join.getCondition().accept(rexVisitor(join)); } finally { stack.pop(); } return visitJoin(join); } else if (other instanceof Correlate) { Correlate correlate = (Correlate) other; mapCorToCorRel.put(correlate.getCorrelationId(), correlate); return visitJoin(correlate); } else if (other instanceof Filter) { Filter filter = (Filter) other; try { stack.push(filter); filter.getCondition().accept(rexVisitor(filter)); } finally { stack.pop(); } } else if (other instanceof Project) { Project project = (Project) other; try { stack.push(project); for (RexNode node : project.getProjects()) { node.accept(rexVisitor(project)); } } finally { stack.pop(); } } return super.visit(other); } @Override protected RelNode visitChild(RelNode parent, int i, RelNode input) { return super.visitChild(parent, i, stripHep(input)); } private RelNode visitJoin(BiRel join) { final int x = offset.get(); visitChild(join, 0, join.getLeft()); offset.set(x + join.getLeft().getRowType().getFieldCount()); visitChild(join, 1, join.getRight()); offset.set(x); return join; } private RexVisitorImpl<Void> rexVisitor(final RelNode rel) { return new RexVisitorImpl<Void>(true) { @Override public Void visitFieldAccess(RexFieldAccess fieldAccess) { final RexNode ref = fieldAccess.getReferenceExpr(); if (ref instanceof RexCorrelVariable) { final RexCorrelVariable var = (RexCorrelVariable) ref; if (mapFieldAccessToCorVar.containsKey(fieldAccess)) { // for cases where different Rel nodes are referring to // same correlation var (e.g. in case of NOT IN) // avoid generating another correlation var // and record the 'rel' is using the same correlation mapRefRelToCorRef.put(rel, mapFieldAccessToCorVar.get(fieldAccess)); } else { final CorRef correlation = new CorRef(var.id, fieldAccess.getField().getIndex(), corrIdGenerator++); mapFieldAccessToCorVar.put(fieldAccess, correlation); mapRefRelToCorRef.put(rel, correlation); } } return super.visitFieldAccess(fieldAccess); } @Override public Void visitSubQuery(RexSubQuery subQuery) { subQuery.rel.accept(CorelMapBuilder.this); return super.visitSubQuery(subQuery); } }; } } /** Frame describing the relational expression after decorrelation * and where to find the output fields and correlation variables * among its output fields. */ static class Frame { final RelNode r; final ImmutableSortedMap<CorDef, Integer> corDefOutputs; final ImmutableSortedMap<Integer, Integer> oldToNewOutputs; Frame(RelNode oldRel, RelNode r, SortedMap<CorDef, Integer> corDefOutputs, Map<Integer, Integer> oldToNewOutputs) { this.r = Objects.requireNonNull(r); this.corDefOutputs = ImmutableSortedMap.copyOf(corDefOutputs); this.oldToNewOutputs = ImmutableSortedMap.copyOf(oldToNewOutputs); assert allLessThan(this.corDefOutputs.values(), r.getRowType().getFieldCount(), Litmus.THROW); assert allLessThan(this.oldToNewOutputs.keySet(), oldRel.getRowType().getFieldCount(), Litmus.THROW); assert allLessThan(this.oldToNewOutputs.values(), r.getRowType().getFieldCount(), Litmus.THROW); } } } // End RelDecorrelator.java
Adjust code indentation (Axis) Current indentation would confuse developers. Close apache/calcite#1649
core/src/main/java/org/apache/calcite/sql2rel/RelDecorrelator.java
Adjust code indentation (Axis)
Java
bsd-3-clause
de2e620d6fcb5309e47f61510698a651d0df8698
0
NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive
package gov.nih.nci.nbia.textsupport; import java.util.ArrayList; import java.util.List; import org.apache.log4j.Logger; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrInputDocument; import org.hibernate.SessionFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import gov.nih.nci.nbia.dao.PatientDAOImpl; import gov.nih.nci.nbia.dynamicsearch.QueryHandler; import gov.nih.nci.nbia.dynamicsearch.QueryHandlerImpl; import gov.nih.nci.nbia.internaldomain.Patient; import gov.nih.nci.nbia.util.SpringApplicationContext; import gov.nih.nci.nbia.dao.*; @Transactional public class PatientUpdater { @Autowired static Logger log = Logger.getLogger(PatientUpdater.class); private SessionFactory sessionFactory; private static String lastRan; private static boolean stillRunning=false; private static List<String> collectionList = new ArrayList<String>(); private static SolrServer server = NBIAEmbeddedSolrServer.getInstance().GetServer(); public void setSessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; } public void runUpdates() { try{ if (stillRunning) { log.info("Previous update is still running"); } updateSubmittedPatients(); updateCollections(); } catch (Exception e) { stillRunning = false; // I'm dead! e.printStackTrace(); } } @Transactional(propagation=Propagation.REQUIRED) protected void updateSubmittedPatients() throws Exception { TextSupportDAO support = (TextSupportDAO)SpringApplicationContext.getBean("textSupportDAO"); //this.sessionFactory= support.getSessionFactory(); log.error("Solr update submitted patients has been called"); String maxTimeStamp; if (lastRan==null) // either new installation or server restarted we will look for it in Solr { String term = "id:NBIAsolrIndexingRun"; SolrQuery query = new SolrQuery(term); QueryResponse rsp = server.query( query ); SolrDocumentList docs = rsp.getResults(); if (docs.size()<1) { // can't find it, we need to re-index to be sure log.error("Can find last ran doc, we need to reindex"); lastRan = "2003-04-04 05:54:01"; } else // get the value { if (docs.get(0).get("lastRan") == null) { log.error("Can find last ran doc, we need to reindex"); System.out.println(docs.get(0)); lastRan = "2003-04-04 05:54:01"; } else { lastRan = docs.get(0).get("lastRan").toString(); log.error("The patient updater was last run - "+lastRan); } } } PatientAccessDAO patientAccess = new PatientAccessDAO(); maxTimeStamp = support.getMaxTimeStamp(); if (maxTimeStamp.length()<2) { log.error("It appears the submission log is empty"); return; //nothing to do } List<Object>rs = support.getUpdatedPatients(lastRan, maxTimeStamp); if (rs.size()==0) { log.error("No new items in submission log"); return; //nothing to do } for (Object result : rs) { String patientId = result.toString(); log.error("Updated patient-"+patientId+" Solr Update request made"); PatientDocument doc = patientAccess.getPatientDocument(patientId); SolrStorage.addPatientDocument(doc); } SolrInputDocument solrDoc = new SolrInputDocument(); solrDoc.addField( "id", "NBIAsolrIndexingRun"); solrDoc.addField( "lastRan", maxTimeStamp); log.debug("Last ran = "+solrDoc.toString()); server.add(solrDoc); server.commit(); } @Transactional protected void updateCollections() throws Exception { TextSupportDAO support = (TextSupportDAO)SpringApplicationContext.getBean("textSupportDAO"); // this.sessionFactory=support.getSessionFactory(); PatientAccessDAO patientAccess = new PatientAccessDAO(); patientAccess.setSessionFactory(sessionFactory); List<String> localList = new ArrayList<String>(); localList.addAll(collectionList); collectionList.clear(); for (String collection:localList) { { log.error("updating all patients in collection "+collection); List<Object> rs = support.getPatientsForCollection(collection); if (rs.size()==0) return; //nothing to do for (Object result : rs) { String patientId = result.toString(); log.error("Calling to update patient from collection " + patientId); PatientDocument doc = patientAccess.getPatientDocument(patientId); SolrStorage.addPatientDocument(doc); } } } } public void addCollectionUpdate(String collection) { if (collection==null) return; // nothing to do; if (collectionList.contains(collection)) return; //already got it collectionList.add(collection); } }
software/nbia-dao/src/gov/nih/nci/nbia/textsupport/PatientUpdater.java
package gov.nih.nci.nbia.textsupport; import java.util.ArrayList; import java.util.List; import org.apache.log4j.Logger; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrInputDocument; import org.hibernate.SessionFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import gov.nih.nci.nbia.dao.PatientDAOImpl; import gov.nih.nci.nbia.dynamicsearch.QueryHandler; import gov.nih.nci.nbia.dynamicsearch.QueryHandlerImpl; import gov.nih.nci.nbia.internaldomain.Patient; import gov.nih.nci.nbia.util.SpringApplicationContext; import gov.nih.nci.nbia.dao.*; @Transactional public class PatientUpdater { @Autowired static Logger log = Logger.getLogger(PatientUpdater.class); private SessionFactory sessionFactory; private static String lastRan; private static boolean stillRunning=false; private static List<String> collectionList = new ArrayList<String>(); private static SolrServer server = NBIAEmbeddedSolrServer.getInstance().GetServer(); public void setSessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; } public void runUpdates() { try{ if (stillRunning) { log.info("Previous update is still running"); } updateSubmittedPatients(); updateCollections(); } catch (Exception e) { stillRunning = false; // I'm dead! e.printStackTrace(); } } @Transactional(propagation=Propagation.REQUIRED) protected void updateSubmittedPatients() throws Exception { TextSupportDAO support = (TextSupportDAO)SpringApplicationContext.getBean("textSupportDAO"); //this.sessionFactory= support.getSessionFactory(); log.error("Solr update submitted patients has been called"); String maxTimeStamp; if (lastRan==null) // either new installation or server restarted we will look for it in Solr { String term = "id:NBIAsolrIndexingRun"; SolrQuery query = new SolrQuery(term); QueryResponse rsp = server.query( query ); SolrDocumentList docs = rsp.getResults(); if (docs.size()<1) { // can't find it, we need to re-index to be sure log.error("Can find last ran doc, we need to reindex"); lastRan = "2003-04-04 05:54:01"; } else // get the value { if (docs.get(0).get("lastRan") == null) { log.error("Can find last ran doc, we need to reindex"); System.out.println(docs.get(0)); lastRan = "2003-04-04 05:54:01"; } else { lastRan = docs.get(0).get("lastRan").toString(); log.error("The patient updater was last run - "+lastRan); } } } PatientAccessDAO patientAccess = new PatientAccessDAO(); maxTimeStamp = support.getMaxTimeStamp(); if (maxTimeStamp.length()<2) { log.error("It appears the submission log is empty"); return; //nothing to do } List<Object>rs = support.getUpdatedPatients(maxTimeStamp, lastRan); if (rs.size()==0) { log.error("No new items in submission log"); return; //nothing to do } for (Object result : rs) { String patientId = result.toString(); log.error("Updated patient-"+patientId+" Solr Update request made"); PatientDocument doc = patientAccess.getPatientDocument(patientId); SolrStorage.addPatientDocument(doc); } SolrInputDocument solrDoc = new SolrInputDocument(); solrDoc.addField( "id", "NBIAsolrIndexingRun"); solrDoc.addField( "lastRan", maxTimeStamp); log.debug("Last ran = "+solrDoc.toString()); server.add(solrDoc); server.commit(); } @Transactional protected void updateCollections() throws Exception { TextSupportDAO support = (TextSupportDAO)SpringApplicationContext.getBean("textSupportDAO"); // this.sessionFactory=support.getSessionFactory(); PatientAccessDAO patientAccess = new PatientAccessDAO(); patientAccess.setSessionFactory(sessionFactory); List<String> localList = new ArrayList<String>(); localList.addAll(collectionList); collectionList.clear(); for (String collection:localList) { { log.error("updating all patients in collection "+collection); List<Object> rs = support.getPatientsForCollection(collection); if (rs.size()==0) return; //nothing to do for (Object result : rs) { String patientId = result.toString(); log.error("Calling to update patient from collection " + patientId); PatientDocument doc = patientAccess.getPatientDocument(patientId); SolrStorage.addPatientDocument(doc); } } } } public void addCollectionUpdate(String collection) { if (collection==null) return; // nothing to do; if (collectionList.contains(collection)) return; //already got it collectionList.add(collection); } }
Fixed submission log query
software/nbia-dao/src/gov/nih/nci/nbia/textsupport/PatientUpdater.java
Fixed submission log query
Java
bsd-3-clause
e9cd371d641e524ca419c665a36fd4622399dff6
0
ursjoss/scipamato,ursjoss/scipamato,ursjoss/scipamato,ursjoss/scipamato
package ch.difty.sipamato.persistance.jooq.search; import static ch.difty.sipamato.db.tables.Code.CODE; import static ch.difty.sipamato.db.tables.CodeClass.CODE_CLASS; import static ch.difty.sipamato.db.tables.CodeClassTr.CODE_CLASS_TR; import static ch.difty.sipamato.db.tables.CodeTr.CODE_TR; import static ch.difty.sipamato.db.tables.SearchCondition.SEARCH_CONDITION; import static ch.difty.sipamato.db.tables.SearchConditionCode.SEARCH_CONDITION_CODE; import static ch.difty.sipamato.db.tables.SearchExclusion.SEARCH_EXCLUSION; import static ch.difty.sipamato.db.tables.SearchOrder.SEARCH_ORDER; import static ch.difty.sipamato.db.tables.SearchTerm.SEARCH_TERM; import static org.jooq.impl.DSL.row; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.stream.Collectors; import org.apache.commons.collections4.CollectionUtils; import org.jooq.Condition; import org.jooq.Configuration; import org.jooq.DSLContext; import org.jooq.InsertValuesStep4; import org.jooq.InsertValuesStep6; import org.jooq.TableField; import org.jooq.impl.DSL; import org.jooq.impl.SQLDataType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import ch.difty.sipamato.db.tables.records.SearchConditionCodeRecord; import ch.difty.sipamato.db.tables.records.SearchConditionRecord; import ch.difty.sipamato.db.tables.records.SearchOrderRecord; import ch.difty.sipamato.db.tables.records.SearchTermRecord; import ch.difty.sipamato.entity.Code; import ch.difty.sipamato.entity.SearchOrder; import ch.difty.sipamato.entity.filter.BooleanSearchTerm; import ch.difty.sipamato.entity.filter.IntegerSearchTerm; import ch.difty.sipamato.entity.filter.SearchCondition; import ch.difty.sipamato.entity.filter.SearchTerm; import ch.difty.sipamato.entity.filter.StringSearchTerm; import ch.difty.sipamato.lib.DateTimeService; import ch.difty.sipamato.lib.TranslationUtils; import ch.difty.sipamato.persistance.jooq.GenericFilterConditionMapper; import ch.difty.sipamato.persistance.jooq.InsertSetStepSetter; import ch.difty.sipamato.persistance.jooq.JooqEntityRepo; import ch.difty.sipamato.persistance.jooq.JooqSortMapper; import ch.difty.sipamato.persistance.jooq.UpdateSetStepSetter; import ch.difty.sipamato.service.Localization; /** * The repository to manage {@link SearchOrder}s - including the nested list of {@link SearchCondition}s and excluded paper ids. * * @author u.joss */ @Repository public class JooqSearchOrderRepo extends JooqEntityRepo<SearchOrderRecord, SearchOrder, Long, ch.difty.sipamato.db.tables.SearchOrder, SearchOrderRecordMapper, SearchOrderFilter> implements SearchOrderRepository { private static final long serialVersionUID = 1L; private boolean migrationDone = false; private static final Logger LOGGER = LoggerFactory.getLogger(JooqSearchOrderRepo.class); @Autowired public JooqSearchOrderRepo(DSLContext dsl, SearchOrderRecordMapper mapper, JooqSortMapper<SearchOrderRecord, SearchOrder, ch.difty.sipamato.db.tables.SearchOrder> sortMapper, GenericFilterConditionMapper<SearchOrderFilter> filterConditionMapper, DateTimeService dateTimeService, Localization localization, InsertSetStepSetter<SearchOrderRecord, SearchOrder> insertSetStepSetter, UpdateSetStepSetter<SearchOrderRecord, SearchOrder> updateSetStepSetter, Configuration jooqConfig) { super(dsl, mapper, sortMapper, filterConditionMapper, dateTimeService, localization, insertSetStepSetter, updateSetStepSetter, jooqConfig); } @Override protected Logger getLogger() { return LOGGER; } @Override protected Class<? extends SearchOrder> getEntityClass() { return SearchOrder.class; } @Override protected Class<? extends SearchOrderRecord> getRecordClass() { return SearchOrderRecord.class; } @Override protected ch.difty.sipamato.db.tables.SearchOrder getTable() { return SEARCH_ORDER; } @Override protected TableField<SearchOrderRecord, Long> getTableId() { return SEARCH_ORDER.ID; } @Override protected Long getIdFrom(SearchOrderRecord record) { return record.getId(); } @Override protected Long getIdFrom(SearchOrder entity) { return entity.getId(); } /** * Enriches the plain {@link SearchOrder} with nested entities, i.e. the {@link SearchCondition}s. */ @Override protected void enrichAssociatedEntitiesOf(final SearchOrder searchOrder) { adHocMigrationOfSearchCondition(); if (searchOrder != null && searchOrder.getId() != null) { fillSearchTermsInto(searchOrder, mapSearchTermsToSearchConditions(searchOrder)); addSearchTermLessConditionsOf(searchOrder); fillExcludedPaperIdsInto(searchOrder); fillCodesIntoSearchConditionsOf(searchOrder); } } private Map<Long, List<SearchTerm<?>>> mapSearchTermsToSearchConditions(final SearchOrder searchOrder) { final List<SearchTerm<?>> searchTerms = fetchSearchTermsForSearchOrderWithId(searchOrder.getId()); return searchTerms.stream().collect(Collectors.groupingBy(st -> st.getSearchConditionId())); } protected List<SearchTerm<?>> fetchSearchTermsForSearchOrderWithId(final long searchOrderId) { // @formatter:off return getDsl() .select( SEARCH_TERM.ID.as("id"), SEARCH_TERM.SEARCH_TERM_TYPE.as("stt"), SEARCH_TERM.SEARCH_CONDITION_ID.as("scid"), SEARCH_TERM.FIELD_NAME.as("fn"), SEARCH_TERM.RAW_VALUE.as("rv")) .from(SEARCH_TERM) .innerJoin(SEARCH_CONDITION) .on(SEARCH_CONDITION.SEARCH_CONDITION_ID.equal(SEARCH_TERM.SEARCH_CONDITION_ID)) .where(SEARCH_CONDITION.SEARCH_ORDER_ID.equal(searchOrderId)) .fetch(r -> SearchTerm.of((long) r.get("id"), (int) r.get("stt"), (long) r.get("scid"), (String) r.get("fn"), (String) r.get("rv"))); // @formatter:on } /* * Note: This method only adds searchConditions that have searchTerms. It will not add conditions that e.g. only have createdTerms or modifiedTerms. */ private void fillSearchTermsInto(SearchOrder searchOrder, Map<Long, List<SearchTerm<?>>> map) { for (final Entry<Long, List<SearchTerm<?>>> entry : map.entrySet()) { final SearchCondition sc = new SearchCondition(entry.getKey()); for (final SearchTerm<?> st : entry.getValue()) { sc.addSearchTerm(st); } searchOrder.add(sc); } enrichSearchConditionsOf(searchOrder); } private Map<Long, List<SearchTerm<?>>> mapSearchTermsToSearchConditions(final SearchCondition searchCondition) { final List<SearchTerm<?>> searchTerms = fetchSearchTermsForSearchConditionWithId(searchCondition.getSearchConditionId()); return searchTerms.stream().collect(Collectors.groupingBy(st -> st.getSearchConditionId())); } protected List<SearchTerm<?>> fetchSearchTermsForSearchConditionWithId(final long searchConditionId) { // @formatter:off return getDsl() .select( SEARCH_TERM.ID.as("id"), SEARCH_TERM.SEARCH_TERM_TYPE.as("stt"), SEARCH_TERM.SEARCH_CONDITION_ID.as("scid"), SEARCH_TERM.FIELD_NAME.as("fn"), SEARCH_TERM.RAW_VALUE.as("rv")) .from(SEARCH_TERM) .innerJoin(SEARCH_CONDITION) .on(SEARCH_CONDITION.SEARCH_CONDITION_ID.equal(SEARCH_TERM.SEARCH_CONDITION_ID)) .where(SEARCH_CONDITION.SEARCH_CONDITION_ID.equal(searchConditionId)) .fetch(r -> SearchTerm.of((long) r.get("id"), (int) r.get("stt"), (long) r.get("scid"), (String) r.get("fn"), (String) r.get("rv"))); // @formatter:on } private void fillSearchTermsInto(SearchCondition searchCondition, Map<Long, List<SearchTerm<?>>> map) { for (final Entry<Long, List<SearchTerm<?>>> entry : map.entrySet()) { for (final SearchTerm<?> st : entry.getValue()) { searchCondition.addSearchTerm(st); } } } private void enrichSearchConditionsOf(final SearchOrder searchOrder) { if (searchOrder.getSearchConditions() != null) { for (final SearchCondition sc : searchOrder.getSearchConditions()) { if (sc != null && sc.getSearchConditionId() != null) { final SearchCondition persisted = fetchSearchConditionWithId(sc.getSearchConditionId()); if (persisted != null) { sc.setCreatedDisplayValue(persisted.getCreatedDisplayValue()); sc.setModifiedDisplayValue(persisted.getModifiedDisplayValue()); } } } } } protected SearchCondition fetchSearchConditionWithId(final Long scId) { return getDsl().selectFrom(SEARCH_CONDITION).where(SEARCH_CONDITION.SEARCH_CONDITION_ID.eq(scId)).fetchOneInto(SearchCondition.class); } /* * Taking care of searchConditions that do not have searchTerms */ private void addSearchTermLessConditionsOf(SearchOrder searchOrder) { if (searchOrder != null && searchOrder.getId() != null) { final Long searchOrderId = searchOrder.getId(); final List<Long> conditionIdsWithSearchTerms = findConditionIdsWithSearchTerms(searchOrderId); final List<SearchCondition> termLessConditions = findTermLessConditions(searchOrderId, conditionIdsWithSearchTerms); for (final SearchCondition sc : termLessConditions) { searchOrder.add(sc); } } } protected List<Long> findConditionIdsWithSearchTerms(final Long searchOrderId) { final List<Long> conditionIdsWithSearchTerms = getDsl().select(SEARCH_TERM.SEARCH_CONDITION_ID) .from(SEARCH_TERM) .innerJoin(SEARCH_CONDITION) .on(SEARCH_TERM.SEARCH_CONDITION_ID.eq(SEARCH_CONDITION.SEARCH_CONDITION_ID)) .where(SEARCH_CONDITION.SEARCH_ORDER_ID.eq(searchOrderId)) .fetchInto(Long.class); return conditionIdsWithSearchTerms; } protected List<SearchCondition> findTermLessConditions(final Long searchOrderId, final List<Long> conditionIdsWithSearchTerms) { final List<SearchCondition> termLessConditions = getDsl().selectFrom(SEARCH_CONDITION) .where(SEARCH_CONDITION.SEARCH_ORDER_ID.eq(searchOrderId)) .and(SEARCH_CONDITION.SEARCH_CONDITION_ID.notIn(conditionIdsWithSearchTerms)) .fetchInto(SearchCondition.class); return termLessConditions; } private void fillExcludedPaperIdsInto(SearchOrder searchOrder) { final List<Long> excludedPaperIds = fetchExcludedPaperIdsForSearchOrderWithId(searchOrder.getId()); searchOrder.setExcludedPaperIds(excludedPaperIds); } protected List<Long> fetchExcludedPaperIdsForSearchOrderWithId(final long searchOrderId) { // @formatter:off return getDsl() .select(SEARCH_EXCLUSION.PAPER_ID) .from(SEARCH_EXCLUSION) .where(SEARCH_EXCLUSION.SEARCH_ORDER_ID.equal(searchOrderId)) .fetch(r -> (Long) r.get(0)); // @formatter:on } private void fillCodesIntoSearchConditionsOf(SearchOrder searchOrder) { for (SearchCondition sc : searchOrder.getSearchConditions()) { fillCodesInto(sc); } } @Override protected void updateAssociatedEntities(final SearchOrder searchOrder) { storeSearchConditionsOf(searchOrder); storeExcludedIdsOf(searchOrder); } @Override protected void saveAssociatedEntitiesOf(final SearchOrder searchOrder) { storeSearchConditionsOf(searchOrder); storeExcludedIdsOf(searchOrder); } private void storeSearchConditionsOf(SearchOrder searchOrder) { storeExistingConditionsOf(searchOrder); deleteObsoleteConditionsFrom(searchOrder); } private void storeExistingConditionsOf(SearchOrder searchOrder) { final Long searchOrderId = searchOrder.getId(); for (final SearchCondition sc : searchOrder.getSearchConditions()) { Long searchConditionId = sc.getSearchConditionId(); if (searchConditionId == null) { addSearchCondition(sc, searchOrderId); } else { updateSearchCondition(sc, searchOrderId); } } } private void updateSearchTerm(final SearchTerm<?> st, final Long searchTermId, final Long searchConditionId) { final Condition idMatches = SEARCH_TERM.ID.eq(searchTermId); getDsl().update(SEARCH_TERM) .set(row(SEARCH_TERM.SEARCH_CONDITION_ID, SEARCH_TERM.SEARCH_TERM_TYPE, SEARCH_TERM.FIELD_NAME, SEARCH_TERM.RAW_VALUE, SEARCH_TERM.LAST_MODIFIED, SEARCH_TERM.LAST_MODIFIED_BY, SEARCH_TERM.VERSION), row(searchConditionId, st.getSearchTermType().getId(), st.getFieldName(), st.getRawSearchTerm(), getTs(), getUserId(), getDsl().select(SEARCH_TERM.VERSION).from(SEARCH_TERM).where(idMatches).fetchOneInto(Integer.class) + 1)) .where(idMatches) .execute(); } private void deleteObsoleteConditionsFrom(SearchOrder searchOrder) { final List<Long> conditionIds = searchOrder.getSearchConditions().stream().map(SearchCondition::getSearchConditionId).collect(Collectors.toList()); getDsl().deleteFrom(SEARCH_CONDITION).where(SEARCH_CONDITION.SEARCH_ORDER_ID.equal(searchOrder.getId()).and(SEARCH_CONDITION.SEARCH_CONDITION_ID.notIn(conditionIds))).execute(); for (final SearchCondition sc : searchOrder.getSearchConditions()) { removeObsoleteSearchTerms(sc, sc.getSearchConditionId()); } } private void storeExcludedIdsOf(SearchOrder searchOrder) { storeExistingExclusionsOf(searchOrder); deleteObsoleteExclusionsOf(searchOrder); } private void storeExistingExclusionsOf(SearchOrder searchOrder) { final long searchOrderId = searchOrder.getId(); final List<Long> saved = getDsl().select(SEARCH_EXCLUSION.PAPER_ID) .from(SEARCH_EXCLUSION) .where(SEARCH_EXCLUSION.SEARCH_ORDER_ID.eq(searchOrderId)) .and(SEARCH_EXCLUSION.PAPER_ID.in(searchOrder.getExcludedPaperIds())) .fetchInto(Long.class); final List<Long> unsaved = new ArrayList<>(searchOrder.getExcludedPaperIds()); unsaved.removeAll(saved); final Integer userId = getUserId(); for (final Long excludedId : unsaved) { getDsl().insertInto(SEARCH_EXCLUSION, SEARCH_EXCLUSION.SEARCH_ORDER_ID, SEARCH_EXCLUSION.PAPER_ID, SEARCH_EXCLUSION.CREATED_BY, SEARCH_EXCLUSION.LAST_MODIFIED_BY) .values(searchOrderId, excludedId, userId, userId) .execute(); } } private void deleteObsoleteExclusionsOf(SearchOrder searchOrder) { getDsl().deleteFrom(SEARCH_EXCLUSION).where(SEARCH_EXCLUSION.SEARCH_ORDER_ID.eq(searchOrder.getId())).and(SEARCH_EXCLUSION.PAPER_ID.notIn(searchOrder.getExcludedPaperIds())).execute(); } /** {@inheritDoc} */ @Override public SearchCondition addSearchCondition(SearchCondition searchCondition, long searchOrderId) { final Optional<SearchCondition> optionalPersisted = findEquivalentPersisted(searchCondition, searchOrderId); if (optionalPersisted.isPresent()) { return optionalPersisted.get(); } else { final Integer userId = getUserId(); final SearchConditionRecord searchConditionRecord = getDsl() .insertInto(SEARCH_CONDITION, SEARCH_CONDITION.SEARCH_ORDER_ID, SEARCH_CONDITION.CREATED_TERM, SEARCH_CONDITION.MODIFIED_TERM, SEARCH_CONDITION.CREATED_BY, SEARCH_CONDITION.LAST_MODIFIED_BY) .values(searchOrderId, searchCondition.getCreatedDisplayValue(), searchCondition.getModifiedDisplayValue(), userId, userId) .returning() .fetchOne(); persistSearchTerms(searchCondition, searchConditionRecord.getSearchConditionId()); persistCodes(searchCondition, searchConditionRecord.getSearchConditionId()); final SearchCondition persistedSearchCondition = getDsl().selectFrom(SEARCH_CONDITION) .where(SEARCH_CONDITION.SEARCH_CONDITION_ID.eq(searchConditionRecord.getSearchConditionId())) .fetchOneInto(SearchCondition.class); fillSearchTermsInto(persistedSearchCondition, mapSearchTermsToSearchConditions(persistedSearchCondition)); fillCodesInto(persistedSearchCondition); return persistedSearchCondition; } } /** * Tries to load an already persisted instance of {@link SearchCondition} for the given search order (identified by the * <code>searchOrderId</code>) semantically covering the same searchConditions. * @param searchCondition the search condition we're trying to find the semantically identical persisted version for. * @param searchOrderId identifying the search order * @return optional of the persisted version (if found - empty othewise) */ private Optional<SearchCondition> findEquivalentPersisted(final SearchCondition searchCondition, final long searchOrderId) { final List<SearchCondition> persisted = getDsl().selectFrom(SEARCH_CONDITION).where(SEARCH_CONDITION.SEARCH_ORDER_ID.eq(searchOrderId)).fetchInto(SearchCondition.class); for (final SearchCondition sc : persisted) { Long searchConditionId = sc.getSearchConditionId(); fillSearchTermsInto(sc, mapSearchTermsToSearchConditions(sc)); fillCodesInto(sc); sc.setSearchConditionId(null); if (searchCondition.equals(sc)) { sc.setSearchConditionId(searchConditionId); return Optional.ofNullable(sc); } } return Optional.empty(); } private void persistSearchTerms(SearchCondition searchCondition, Long searchConditionId) { saveOrUpdateValidTerms(searchCondition, searchConditionId); removeObsoleteSearchTerms(searchCondition, searchConditionId); } private void fillCodesInto(SearchCondition searchCondition) { final List<Code> codes = fetchCodesForSearchConditionWithId(searchCondition); if (CollectionUtils.isNotEmpty(codes)) { searchCondition.addCodes(codes); } } protected List<Code> fetchCodesForSearchConditionWithId(final SearchCondition searchCondition) { final String localizationCode = getLocalization().getLocalization(); final List<Code> codes = getDsl() // @formatter:off .select(CODE.CODE_.as("C_ID") , DSL.coalesce(CODE_TR.NAME, TranslationUtils.NOT_TRANSL).as("C_NAME") , CODE_TR.COMMENT.as("C_COMMENT") , CODE.INTERNAL.as("C_INTERNAL") , CODE_CLASS.ID.as("CC_ID") , DSL.coalesce(CODE_CLASS_TR.NAME, TranslationUtils.NOT_TRANSL).as("CC_NAME") , DSL.coalesce(CODE_CLASS_TR.DESCRIPTION, TranslationUtils.NOT_TRANSL).as("CC_DESCRIPTION") , CODE.SORT) .from(SEARCH_CONDITION_CODE) .join(SEARCH_CONDITION).on(SEARCH_CONDITION_CODE.SEARCH_CONDITION_ID.equal(SEARCH_CONDITION.SEARCH_CONDITION_ID)) .join(CODE).on(SEARCH_CONDITION_CODE.CODE.equal(CODE.CODE_)) .join(CODE_CLASS).on(CODE.CODE_CLASS_ID.equal(CODE_CLASS.ID)) .leftOuterJoin(CODE_TR).on(CODE.CODE_.equal(CODE_TR.CODE).and(CODE_TR.LANG_CODE.equal(localizationCode))) .leftOuterJoin(CODE_CLASS_TR).on(CODE_CLASS.ID.equal(CODE_CLASS_TR.CODE_CLASS_ID).and(CODE_CLASS_TR.LANG_CODE.equal(localizationCode))) .where(SEARCH_CONDITION_CODE.SEARCH_CONDITION_ID.equal(searchCondition.getSearchConditionId())) .fetchInto(Code.class); // @formatter:on return codes; } private void saveOrUpdateValidTerms(SearchCondition searchCondition, Long searchConditionId) { InsertValuesStep6<SearchTermRecord, Long, Integer, String, String, Integer, Integer> insertStep = getDsl().insertInto(SEARCH_TERM, SEARCH_TERM.SEARCH_CONDITION_ID, SEARCH_TERM.SEARCH_TERM_TYPE, SEARCH_TERM.FIELD_NAME, SEARCH_TERM.RAW_VALUE, SEARCH_TERM.CREATED_BY, SEARCH_TERM.LAST_MODIFIED_BY); final Integer userId = getUserId(); boolean hasPendingInsert = false; for (final BooleanSearchTerm bst : searchCondition.getBooleanSearchTerms()) { final int typeId = bst.getSearchTermType().getId(); final String fieldName = bst.getFieldName(); final BooleanSearchTerm pbst = (BooleanSearchTerm) getPersistedTerm(searchConditionId, fieldName, BooleanSearchTerm.class, typeId); if (pbst != null) { updateSearchTerm(bst, pbst.getId(), searchConditionId); } else { insertStep = insertStep.values(searchConditionId, typeId, fieldName, bst.getRawSearchTerm(), userId, userId); hasPendingInsert = true; } } for (final IntegerSearchTerm ist : searchCondition.getIntegerSearchTerms()) { final int typeId = ist.getSearchTermType().getId(); final String fieldName = ist.getFieldName(); final IntegerSearchTerm pist = (IntegerSearchTerm) getPersistedTerm(searchConditionId, fieldName, BooleanSearchTerm.class, typeId); if (pist != null) { updateSearchTerm(ist, pist.getId(), searchConditionId); } else { insertStep = insertStep.values(searchConditionId, typeId, fieldName, ist.getRawSearchTerm(), userId, userId); hasPendingInsert = true; } } for (final StringSearchTerm sst : searchCondition.getStringSearchTerms()) { final int typeId = sst.getSearchTermType().getId(); final String fieldName = sst.getFieldName(); final StringSearchTerm pist = (StringSearchTerm) getPersistedTerm(searchConditionId, fieldName, BooleanSearchTerm.class, typeId); if (pist != null) { updateSearchTerm(sst, pist.getId(), searchConditionId); } else { insertStep = insertStep.values(searchConditionId, typeId, fieldName, sst.getRawSearchTerm(), userId, userId); hasPendingInsert = true; } } if (hasPendingInsert) insertStep.execute(); } private SearchTerm<?> getPersistedTerm(final Long searchConditionId, final String fieldName, final Class<BooleanSearchTerm> termClass, final int typeId) { return getDsl().select(SEARCH_TERM.ID, SEARCH_TERM.SEARCH_CONDITION_ID, SEARCH_TERM.FIELD_NAME, SEARCH_TERM.RAW_VALUE) .from(SEARCH_TERM) .where(SEARCH_TERM.SEARCH_CONDITION_ID.eq(searchConditionId)) .and(SEARCH_TERM.SEARCH_TERM_TYPE.eq(typeId)) .and(SEARCH_TERM.FIELD_NAME.eq(fieldName)) .fetchOneInto(termClass); } private void removeObsoleteSearchTerms(SearchCondition searchCondition, Long searchConditionId) { if (!searchCondition.getRemovedKeys().isEmpty()) { getDsl().deleteFrom(SEARCH_TERM).where(SEARCH_TERM.SEARCH_CONDITION_ID.eq(searchConditionId)).and(SEARCH_TERM.FIELD_NAME.in(searchCondition.getRemovedKeys())).execute(); searchCondition.clearRemovedKeys(); } } private void persistCodes(SearchCondition searchCondition, Long searchConditionId) { saveOrUpdateCodes(searchCondition, searchConditionId); removeObsoleteCodesFrom(searchCondition, searchConditionId); } private void saveOrUpdateCodes(SearchCondition searchCondition, Long searchConditionId) { if (!CollectionUtils.isEmpty(searchCondition.getCodes())) { InsertValuesStep4<SearchConditionCodeRecord, Long, String, Integer, Integer> step = getDsl().insertInto(SEARCH_CONDITION_CODE, SEARCH_CONDITION_CODE.SEARCH_CONDITION_ID, SEARCH_CONDITION_CODE.CODE, SEARCH_CONDITION_CODE.CREATED_BY, SEARCH_CONDITION_CODE.LAST_MODIFIED_BY); final Integer userId = getUserId(); for (final Code c : searchCondition.getCodes()) { step = step.values(searchConditionId, c.getCode(), userId, userId); } step.onDuplicateKeyIgnore().execute(); } } private void removeObsoleteCodesFrom(SearchCondition searchCondition, Long searchConditionId) { final List<String> codes = searchCondition.getCodes().stream().map(Code::getCode).collect(Collectors.toList()); getDsl().deleteFrom(SEARCH_CONDITION_CODE).where(SEARCH_CONDITION_CODE.SEARCH_CONDITION_ID.equal(searchConditionId).and(SEARCH_CONDITION_CODE.CODE.notIn(codes))).execute(); } /** {@inheritDoc} */ @Override public SearchCondition updateSearchCondition(SearchCondition searchCondition, long searchOrderId) { final Condition idMatches = SEARCH_CONDITION.SEARCH_CONDITION_ID.eq(searchCondition.getSearchConditionId()); getDsl().update(SEARCH_CONDITION) .set(row(SEARCH_CONDITION.SEARCH_ORDER_ID, SEARCH_CONDITION.CREATED_TERM, SEARCH_CONDITION.MODIFIED_TERM, SEARCH_CONDITION.LAST_MODIFIED, SEARCH_CONDITION.LAST_MODIFIED_BY, SEARCH_CONDITION.VERSION), row(searchOrderId, searchCondition.getCreatedDisplayValue(), searchCondition.getModifiedDisplayValue(), getTs(), getUserId(), getDsl().select(SEARCH_CONDITION.VERSION).from(SEARCH_CONDITION).where(idMatches).fetchOneInto(Integer.class) + 1)) .where(idMatches) .execute(); persistSearchTerms(searchCondition, searchCondition.getSearchConditionId()); persistCodes(searchCondition, searchCondition.getSearchConditionId()); SearchCondition persistedSearchCondition = fetchSearchConditionWithId(searchCondition.getSearchConditionId()); fillSearchTermsInto(persistedSearchCondition, mapSearchTermsToSearchConditions(persistedSearchCondition)); fillCodesInto(persistedSearchCondition); return persistedSearchCondition; } /** {@inheritDoc} */ @Override public void deleteSearchConditionWithId(long searchConditionId) { getDsl().deleteFrom(SEARCH_CONDITION).where(SEARCH_CONDITION.SEARCH_CONDITION_ID.eq(searchConditionId)).execute(); } // Ad hoc migration until the test users databases are up to date. TODO remove in next version private void adHocMigrationOfSearchCondition() { if (!migrationDone) { boolean did = false; try { getDsl().select(SEARCH_CONDITION.CREATED_TERM).from(SEARCH_CONDITION).where(SEARCH_CONDITION.SEARCH_CONDITION_ID.eq(-1l)).fetch(); // all good, columns exists } catch (Exception ex) { getDsl().alterTable(SEARCH_CONDITION).addColumn(SEARCH_CONDITION.CREATED_TERM, SQLDataType.VARCHAR.nullable(true)).execute(); did = true; } try { getDsl().select(SEARCH_CONDITION.MODIFIED_TERM).from(SEARCH_CONDITION).where(SEARCH_CONDITION.SEARCH_CONDITION_ID.eq(-1l)).fetch(); // all good, columns exists } catch (Exception ex) { getDsl().alterTable(SEARCH_CONDITION).addColumn(SEARCH_CONDITION.MODIFIED_TERM, SQLDataType.VARCHAR.nullable(true)).execute(); did = true; } if (did) LOGGER.info("Fields CreatedTerm and ModifiedTerm added to SearchCondition"); } migrationDone = true; } }
implementation/sipamato/src/main/java/ch/difty/sipamato/persistance/jooq/search/JooqSearchOrderRepo.java
package ch.difty.sipamato.persistance.jooq.search; import static ch.difty.sipamato.db.tables.Code.CODE; import static ch.difty.sipamato.db.tables.CodeClass.CODE_CLASS; import static ch.difty.sipamato.db.tables.CodeClassTr.CODE_CLASS_TR; import static ch.difty.sipamato.db.tables.CodeTr.CODE_TR; import static ch.difty.sipamato.db.tables.SearchCondition.SEARCH_CONDITION; import static ch.difty.sipamato.db.tables.SearchConditionCode.SEARCH_CONDITION_CODE; import static ch.difty.sipamato.db.tables.SearchExclusion.SEARCH_EXCLUSION; import static ch.difty.sipamato.db.tables.SearchOrder.SEARCH_ORDER; import static ch.difty.sipamato.db.tables.SearchTerm.SEARCH_TERM; import static org.jooq.impl.DSL.row; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.stream.Collectors; import org.apache.commons.collections4.CollectionUtils; import org.jooq.Condition; import org.jooq.Configuration; import org.jooq.DSLContext; import org.jooq.InsertValuesStep4; import org.jooq.InsertValuesStep6; import org.jooq.TableField; import org.jooq.impl.DSL; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import ch.difty.sipamato.db.tables.records.SearchConditionCodeRecord; import ch.difty.sipamato.db.tables.records.SearchConditionRecord; import ch.difty.sipamato.db.tables.records.SearchOrderRecord; import ch.difty.sipamato.db.tables.records.SearchTermRecord; import ch.difty.sipamato.entity.Code; import ch.difty.sipamato.entity.SearchOrder; import ch.difty.sipamato.entity.filter.BooleanSearchTerm; import ch.difty.sipamato.entity.filter.IntegerSearchTerm; import ch.difty.sipamato.entity.filter.SearchCondition; import ch.difty.sipamato.entity.filter.SearchTerm; import ch.difty.sipamato.entity.filter.StringSearchTerm; import ch.difty.sipamato.lib.DateTimeService; import ch.difty.sipamato.lib.TranslationUtils; import ch.difty.sipamato.persistance.jooq.GenericFilterConditionMapper; import ch.difty.sipamato.persistance.jooq.InsertSetStepSetter; import ch.difty.sipamato.persistance.jooq.JooqEntityRepo; import ch.difty.sipamato.persistance.jooq.JooqSortMapper; import ch.difty.sipamato.persistance.jooq.UpdateSetStepSetter; import ch.difty.sipamato.service.Localization; /** * The repository to manage {@link SearchOrder}s - including the nested list of {@link SearchCondition}s and excluded paper ids. * * @author u.joss */ @Repository public class JooqSearchOrderRepo extends JooqEntityRepo<SearchOrderRecord, SearchOrder, Long, ch.difty.sipamato.db.tables.SearchOrder, SearchOrderRecordMapper, SearchOrderFilter> implements SearchOrderRepository { private static final long serialVersionUID = 1L; private static final Logger LOGGER = LoggerFactory.getLogger(JooqSearchOrderRepo.class); @Autowired public JooqSearchOrderRepo(DSLContext dsl, SearchOrderRecordMapper mapper, JooqSortMapper<SearchOrderRecord, SearchOrder, ch.difty.sipamato.db.tables.SearchOrder> sortMapper, GenericFilterConditionMapper<SearchOrderFilter> filterConditionMapper, DateTimeService dateTimeService, Localization localization, InsertSetStepSetter<SearchOrderRecord, SearchOrder> insertSetStepSetter, UpdateSetStepSetter<SearchOrderRecord, SearchOrder> updateSetStepSetter, Configuration jooqConfig) { super(dsl, mapper, sortMapper, filterConditionMapper, dateTimeService, localization, insertSetStepSetter, updateSetStepSetter, jooqConfig); } @Override protected Logger getLogger() { return LOGGER; } @Override protected Class<? extends SearchOrder> getEntityClass() { return SearchOrder.class; } @Override protected Class<? extends SearchOrderRecord> getRecordClass() { return SearchOrderRecord.class; } @Override protected ch.difty.sipamato.db.tables.SearchOrder getTable() { return SEARCH_ORDER; } @Override protected TableField<SearchOrderRecord, Long> getTableId() { return SEARCH_ORDER.ID; } @Override protected Long getIdFrom(SearchOrderRecord record) { return record.getId(); } @Override protected Long getIdFrom(SearchOrder entity) { return entity.getId(); } /** * Enriches the plain {@link SearchOrder} with nested entities, i.e. the {@link SearchCondition}s. */ @Override protected void enrichAssociatedEntitiesOf(final SearchOrder searchOrder) { if (searchOrder != null && searchOrder.getId() != null) { fillSearchTermsInto(searchOrder, mapSearchTermsToSearchConditions(searchOrder)); addSearchTermLessConditionsOf(searchOrder); fillExcludedPaperIdsInto(searchOrder); fillCodesIntoSearchConditionsOf(searchOrder); } } private Map<Long, List<SearchTerm<?>>> mapSearchTermsToSearchConditions(final SearchOrder searchOrder) { final List<SearchTerm<?>> searchTerms = fetchSearchTermsForSearchOrderWithId(searchOrder.getId()); return searchTerms.stream().collect(Collectors.groupingBy(st -> st.getSearchConditionId())); } protected List<SearchTerm<?>> fetchSearchTermsForSearchOrderWithId(final long searchOrderId) { // @formatter:off return getDsl() .select( SEARCH_TERM.ID.as("id"), SEARCH_TERM.SEARCH_TERM_TYPE.as("stt"), SEARCH_TERM.SEARCH_CONDITION_ID.as("scid"), SEARCH_TERM.FIELD_NAME.as("fn"), SEARCH_TERM.RAW_VALUE.as("rv")) .from(SEARCH_TERM) .innerJoin(SEARCH_CONDITION) .on(SEARCH_CONDITION.SEARCH_CONDITION_ID.equal(SEARCH_TERM.SEARCH_CONDITION_ID)) .where(SEARCH_CONDITION.SEARCH_ORDER_ID.equal(searchOrderId)) .fetch(r -> SearchTerm.of((long) r.get("id"), (int) r.get("stt"), (long) r.get("scid"), (String) r.get("fn"), (String) r.get("rv"))); // @formatter:on } /* * Note: This method only adds searchConditions that have searchTerms. It will not add conditions that e.g. only have createdTerms or modifiedTerms. */ private void fillSearchTermsInto(SearchOrder searchOrder, Map<Long, List<SearchTerm<?>>> map) { for (final Entry<Long, List<SearchTerm<?>>> entry : map.entrySet()) { final SearchCondition sc = new SearchCondition(entry.getKey()); for (final SearchTerm<?> st : entry.getValue()) { sc.addSearchTerm(st); } searchOrder.add(sc); } enrichSearchConditionsOf(searchOrder); } private Map<Long, List<SearchTerm<?>>> mapSearchTermsToSearchConditions(final SearchCondition searchCondition) { final List<SearchTerm<?>> searchTerms = fetchSearchTermsForSearchConditionWithId(searchCondition.getSearchConditionId()); return searchTerms.stream().collect(Collectors.groupingBy(st -> st.getSearchConditionId())); } protected List<SearchTerm<?>> fetchSearchTermsForSearchConditionWithId(final long searchConditionId) { // @formatter:off return getDsl() .select( SEARCH_TERM.ID.as("id"), SEARCH_TERM.SEARCH_TERM_TYPE.as("stt"), SEARCH_TERM.SEARCH_CONDITION_ID.as("scid"), SEARCH_TERM.FIELD_NAME.as("fn"), SEARCH_TERM.RAW_VALUE.as("rv")) .from(SEARCH_TERM) .innerJoin(SEARCH_CONDITION) .on(SEARCH_CONDITION.SEARCH_CONDITION_ID.equal(SEARCH_TERM.SEARCH_CONDITION_ID)) .where(SEARCH_CONDITION.SEARCH_CONDITION_ID.equal(searchConditionId)) .fetch(r -> SearchTerm.of((long) r.get("id"), (int) r.get("stt"), (long) r.get("scid"), (String) r.get("fn"), (String) r.get("rv"))); // @formatter:on } private void fillSearchTermsInto(SearchCondition searchCondition, Map<Long, List<SearchTerm<?>>> map) { for (final Entry<Long, List<SearchTerm<?>>> entry : map.entrySet()) { for (final SearchTerm<?> st : entry.getValue()) { searchCondition.addSearchTerm(st); } } } private void enrichSearchConditionsOf(final SearchOrder searchOrder) { if (searchOrder.getSearchConditions() != null) { for (final SearchCondition sc : searchOrder.getSearchConditions()) { if (sc != null && sc.getSearchConditionId() != null) { final SearchCondition persisted = fetchSearchConditionWithId(sc.getSearchConditionId()); if (persisted != null) { sc.setCreatedDisplayValue(persisted.getCreatedDisplayValue()); sc.setModifiedDisplayValue(persisted.getModifiedDisplayValue()); } } } } } protected SearchCondition fetchSearchConditionWithId(final Long scId) { return getDsl().selectFrom(SEARCH_CONDITION).where(SEARCH_CONDITION.SEARCH_CONDITION_ID.eq(scId)).fetchOneInto(SearchCondition.class); } /* * Taking care of searchConditions that do not have searchTerms */ private void addSearchTermLessConditionsOf(SearchOrder searchOrder) { if (searchOrder != null && searchOrder.getId() != null) { final Long searchOrderId = searchOrder.getId(); final List<Long> conditionIdsWithSearchTerms = findConditionIdsWithSearchTerms(searchOrderId); final List<SearchCondition> termLessConditions = findTermLessConditions(searchOrderId, conditionIdsWithSearchTerms); for (final SearchCondition sc : termLessConditions) { searchOrder.add(sc); } } } protected List<Long> findConditionIdsWithSearchTerms(final Long searchOrderId) { final List<Long> conditionIdsWithSearchTerms = getDsl().select(SEARCH_TERM.SEARCH_CONDITION_ID) .from(SEARCH_TERM) .innerJoin(SEARCH_CONDITION) .on(SEARCH_TERM.SEARCH_CONDITION_ID.eq(SEARCH_CONDITION.SEARCH_CONDITION_ID)) .where(SEARCH_CONDITION.SEARCH_ORDER_ID.eq(searchOrderId)) .fetchInto(Long.class); return conditionIdsWithSearchTerms; } protected List<SearchCondition> findTermLessConditions(final Long searchOrderId, final List<Long> conditionIdsWithSearchTerms) { final List<SearchCondition> termLessConditions = getDsl().selectFrom(SEARCH_CONDITION) .where(SEARCH_CONDITION.SEARCH_ORDER_ID.eq(searchOrderId)) .and(SEARCH_CONDITION.SEARCH_CONDITION_ID.notIn(conditionIdsWithSearchTerms)) .fetchInto(SearchCondition.class); return termLessConditions; } private void fillExcludedPaperIdsInto(SearchOrder searchOrder) { final List<Long> excludedPaperIds = fetchExcludedPaperIdsForSearchOrderWithId(searchOrder.getId()); searchOrder.setExcludedPaperIds(excludedPaperIds); } protected List<Long> fetchExcludedPaperIdsForSearchOrderWithId(final long searchOrderId) { // @formatter:off return getDsl() .select(SEARCH_EXCLUSION.PAPER_ID) .from(SEARCH_EXCLUSION) .where(SEARCH_EXCLUSION.SEARCH_ORDER_ID.equal(searchOrderId)) .fetch(r -> (Long) r.get(0)); // @formatter:on } private void fillCodesIntoSearchConditionsOf(SearchOrder searchOrder) { for (SearchCondition sc : searchOrder.getSearchConditions()) { fillCodesInto(sc); } } @Override protected void updateAssociatedEntities(final SearchOrder searchOrder) { storeSearchConditionsOf(searchOrder); storeExcludedIdsOf(searchOrder); } @Override protected void saveAssociatedEntitiesOf(final SearchOrder searchOrder) { storeSearchConditionsOf(searchOrder); storeExcludedIdsOf(searchOrder); } private void storeSearchConditionsOf(SearchOrder searchOrder) { storeExistingConditionsOf(searchOrder); deleteObsoleteConditionsFrom(searchOrder); } private void storeExistingConditionsOf(SearchOrder searchOrder) { final Long searchOrderId = searchOrder.getId(); for (final SearchCondition sc : searchOrder.getSearchConditions()) { Long searchConditionId = sc.getSearchConditionId(); if (searchConditionId == null) { addSearchCondition(sc, searchOrderId); } else { updateSearchCondition(sc, searchOrderId); } } } private void updateSearchTerm(final SearchTerm<?> st, final Long searchTermId, final Long searchConditionId) { final Condition idMatches = SEARCH_TERM.ID.eq(searchTermId); getDsl().update(SEARCH_TERM) .set(row(SEARCH_TERM.SEARCH_CONDITION_ID, SEARCH_TERM.SEARCH_TERM_TYPE, SEARCH_TERM.FIELD_NAME, SEARCH_TERM.RAW_VALUE, SEARCH_TERM.LAST_MODIFIED, SEARCH_TERM.LAST_MODIFIED_BY, SEARCH_TERM.VERSION), row(searchConditionId, st.getSearchTermType().getId(), st.getFieldName(), st.getRawSearchTerm(), getTs(), getUserId(), getDsl().select(SEARCH_TERM.VERSION).from(SEARCH_TERM).where(idMatches).fetchOneInto(Integer.class) + 1)) .where(idMatches) .execute(); } private void deleteObsoleteConditionsFrom(SearchOrder searchOrder) { final List<Long> conditionIds = searchOrder.getSearchConditions().stream().map(SearchCondition::getSearchConditionId).collect(Collectors.toList()); getDsl().deleteFrom(SEARCH_CONDITION).where(SEARCH_CONDITION.SEARCH_ORDER_ID.equal(searchOrder.getId()).and(SEARCH_CONDITION.SEARCH_CONDITION_ID.notIn(conditionIds))).execute(); for (final SearchCondition sc : searchOrder.getSearchConditions()) { removeObsoleteSearchTerms(sc, sc.getSearchConditionId()); } } private void storeExcludedIdsOf(SearchOrder searchOrder) { storeExistingExclusionsOf(searchOrder); deleteObsoleteExclusionsOf(searchOrder); } private void storeExistingExclusionsOf(SearchOrder searchOrder) { final long searchOrderId = searchOrder.getId(); final List<Long> saved = getDsl().select(SEARCH_EXCLUSION.PAPER_ID) .from(SEARCH_EXCLUSION) .where(SEARCH_EXCLUSION.SEARCH_ORDER_ID.eq(searchOrderId)) .and(SEARCH_EXCLUSION.PAPER_ID.in(searchOrder.getExcludedPaperIds())) .fetchInto(Long.class); final List<Long> unsaved = new ArrayList<>(searchOrder.getExcludedPaperIds()); unsaved.removeAll(saved); final Integer userId = getUserId(); for (final Long excludedId : unsaved) { getDsl().insertInto(SEARCH_EXCLUSION, SEARCH_EXCLUSION.SEARCH_ORDER_ID, SEARCH_EXCLUSION.PAPER_ID, SEARCH_EXCLUSION.CREATED_BY, SEARCH_EXCLUSION.LAST_MODIFIED_BY) .values(searchOrderId, excludedId, userId, userId) .execute(); } } private void deleteObsoleteExclusionsOf(SearchOrder searchOrder) { getDsl().deleteFrom(SEARCH_EXCLUSION).where(SEARCH_EXCLUSION.SEARCH_ORDER_ID.eq(searchOrder.getId())).and(SEARCH_EXCLUSION.PAPER_ID.notIn(searchOrder.getExcludedPaperIds())).execute(); } /** {@inheritDoc} */ @Override public SearchCondition addSearchCondition(SearchCondition searchCondition, long searchOrderId) { final Optional<SearchCondition> optionalPersisted = findEquivalentPersisted(searchCondition, searchOrderId); if (optionalPersisted.isPresent()) { return optionalPersisted.get(); } else { final Integer userId = getUserId(); final SearchConditionRecord searchConditionRecord = getDsl() .insertInto(SEARCH_CONDITION, SEARCH_CONDITION.SEARCH_ORDER_ID, SEARCH_CONDITION.CREATED_TERM, SEARCH_CONDITION.MODIFIED_TERM, SEARCH_CONDITION.CREATED_BY, SEARCH_CONDITION.LAST_MODIFIED_BY) .values(searchOrderId, searchCondition.getCreatedDisplayValue(), searchCondition.getModifiedDisplayValue(), userId, userId) .returning() .fetchOne(); persistSearchTerms(searchCondition, searchConditionRecord.getSearchConditionId()); persistCodes(searchCondition, searchConditionRecord.getSearchConditionId()); final SearchCondition persistedSearchCondition = getDsl().selectFrom(SEARCH_CONDITION) .where(SEARCH_CONDITION.SEARCH_CONDITION_ID.eq(searchConditionRecord.getSearchConditionId())) .fetchOneInto(SearchCondition.class); fillSearchTermsInto(persistedSearchCondition, mapSearchTermsToSearchConditions(persistedSearchCondition)); fillCodesInto(persistedSearchCondition); return persistedSearchCondition; } } /** * Tries to load an already persisted instance of {@link SearchCondition} for the given search order (identified by the * <code>searchOrderId</code>) semantically covering the same searchConditions. * @param searchCondition the search condition we're trying to find the semantically identical persisted version for. * @param searchOrderId identifying the search order * @return optional of the persisted version (if found - empty othewise) */ private Optional<SearchCondition> findEquivalentPersisted(final SearchCondition searchCondition, final long searchOrderId) { final List<SearchCondition> persisted = getDsl().selectFrom(SEARCH_CONDITION).where(SEARCH_CONDITION.SEARCH_ORDER_ID.eq(searchOrderId)).fetchInto(SearchCondition.class); for (final SearchCondition sc : persisted) { Long searchConditionId = sc.getSearchConditionId(); fillSearchTermsInto(sc, mapSearchTermsToSearchConditions(sc)); fillCodesInto(sc); sc.setSearchConditionId(null); if (searchCondition.equals(sc)) { sc.setSearchConditionId(searchConditionId); return Optional.ofNullable(sc); } } return Optional.empty(); } private void persistSearchTerms(SearchCondition searchCondition, Long searchConditionId) { saveOrUpdateValidTerms(searchCondition, searchConditionId); removeObsoleteSearchTerms(searchCondition, searchConditionId); } private void fillCodesInto(SearchCondition searchCondition) { final List<Code> codes = fetchCodesForSearchConditionWithId(searchCondition); if (CollectionUtils.isNotEmpty(codes)) { searchCondition.addCodes(codes); } } protected List<Code> fetchCodesForSearchConditionWithId(final SearchCondition searchCondition) { final String localizationCode = getLocalization().getLocalization(); final List<Code> codes = getDsl() // @formatter:off .select(CODE.CODE_.as("C_ID") , DSL.coalesce(CODE_TR.NAME, TranslationUtils.NOT_TRANSL).as("C_NAME") , CODE_TR.COMMENT.as("C_COMMENT") , CODE.INTERNAL.as("C_INTERNAL") , CODE_CLASS.ID.as("CC_ID") , DSL.coalesce(CODE_CLASS_TR.NAME, TranslationUtils.NOT_TRANSL).as("CC_NAME") , DSL.coalesce(CODE_CLASS_TR.DESCRIPTION, TranslationUtils.NOT_TRANSL).as("CC_DESCRIPTION") , CODE.SORT) .from(SEARCH_CONDITION_CODE) .join(SEARCH_CONDITION).on(SEARCH_CONDITION_CODE.SEARCH_CONDITION_ID.equal(SEARCH_CONDITION.SEARCH_CONDITION_ID)) .join(CODE).on(SEARCH_CONDITION_CODE.CODE.equal(CODE.CODE_)) .join(CODE_CLASS).on(CODE.CODE_CLASS_ID.equal(CODE_CLASS.ID)) .leftOuterJoin(CODE_TR).on(CODE.CODE_.equal(CODE_TR.CODE).and(CODE_TR.LANG_CODE.equal(localizationCode))) .leftOuterJoin(CODE_CLASS_TR).on(CODE_CLASS.ID.equal(CODE_CLASS_TR.CODE_CLASS_ID).and(CODE_CLASS_TR.LANG_CODE.equal(localizationCode))) .where(SEARCH_CONDITION_CODE.SEARCH_CONDITION_ID.equal(searchCondition.getSearchConditionId())) .fetchInto(Code.class); // @formatter:on return codes; } private void saveOrUpdateValidTerms(SearchCondition searchCondition, Long searchConditionId) { InsertValuesStep6<SearchTermRecord, Long, Integer, String, String, Integer, Integer> insertStep = getDsl().insertInto(SEARCH_TERM, SEARCH_TERM.SEARCH_CONDITION_ID, SEARCH_TERM.SEARCH_TERM_TYPE, SEARCH_TERM.FIELD_NAME, SEARCH_TERM.RAW_VALUE, SEARCH_TERM.CREATED_BY, SEARCH_TERM.LAST_MODIFIED_BY); final Integer userId = getUserId(); boolean hasPendingInsert = false; for (final BooleanSearchTerm bst : searchCondition.getBooleanSearchTerms()) { final int typeId = bst.getSearchTermType().getId(); final String fieldName = bst.getFieldName(); final BooleanSearchTerm pbst = (BooleanSearchTerm) getPersistedTerm(searchConditionId, fieldName, BooleanSearchTerm.class, typeId); if (pbst != null) { updateSearchTerm(bst, pbst.getId(), searchConditionId); } else { insertStep = insertStep.values(searchConditionId, typeId, fieldName, bst.getRawSearchTerm(), userId, userId); hasPendingInsert = true; } } for (final IntegerSearchTerm ist : searchCondition.getIntegerSearchTerms()) { final int typeId = ist.getSearchTermType().getId(); final String fieldName = ist.getFieldName(); final IntegerSearchTerm pist = (IntegerSearchTerm) getPersistedTerm(searchConditionId, fieldName, BooleanSearchTerm.class, typeId); if (pist != null) { updateSearchTerm(ist, pist.getId(), searchConditionId); } else { insertStep = insertStep.values(searchConditionId, typeId, fieldName, ist.getRawSearchTerm(), userId, userId); hasPendingInsert = true; } } for (final StringSearchTerm sst : searchCondition.getStringSearchTerms()) { final int typeId = sst.getSearchTermType().getId(); final String fieldName = sst.getFieldName(); final StringSearchTerm pist = (StringSearchTerm) getPersistedTerm(searchConditionId, fieldName, BooleanSearchTerm.class, typeId); if (pist != null) { updateSearchTerm(sst, pist.getId(), searchConditionId); } else { insertStep = insertStep.values(searchConditionId, typeId, fieldName, sst.getRawSearchTerm(), userId, userId); hasPendingInsert = true; } } if (hasPendingInsert) insertStep.execute(); } private SearchTerm<?> getPersistedTerm(final Long searchConditionId, final String fieldName, final Class<BooleanSearchTerm> termClass, final int typeId) { return getDsl().select(SEARCH_TERM.ID, SEARCH_TERM.SEARCH_CONDITION_ID, SEARCH_TERM.FIELD_NAME, SEARCH_TERM.RAW_VALUE) .from(SEARCH_TERM) .where(SEARCH_TERM.SEARCH_CONDITION_ID.eq(searchConditionId)) .and(SEARCH_TERM.SEARCH_TERM_TYPE.eq(typeId)) .and(SEARCH_TERM.FIELD_NAME.eq(fieldName)) .fetchOneInto(termClass); } private void removeObsoleteSearchTerms(SearchCondition searchCondition, Long searchConditionId) { if (!searchCondition.getRemovedKeys().isEmpty()) { getDsl().deleteFrom(SEARCH_TERM).where(SEARCH_TERM.SEARCH_CONDITION_ID.eq(searchConditionId)).and(SEARCH_TERM.FIELD_NAME.in(searchCondition.getRemovedKeys())).execute(); searchCondition.clearRemovedKeys(); } } private void persistCodes(SearchCondition searchCondition, Long searchConditionId) { saveOrUpdateCodes(searchCondition, searchConditionId); removeObsoleteCodesFrom(searchCondition, searchConditionId); } private void saveOrUpdateCodes(SearchCondition searchCondition, Long searchConditionId) { if (!CollectionUtils.isEmpty(searchCondition.getCodes())) { InsertValuesStep4<SearchConditionCodeRecord, Long, String, Integer, Integer> step = getDsl().insertInto(SEARCH_CONDITION_CODE, SEARCH_CONDITION_CODE.SEARCH_CONDITION_ID, SEARCH_CONDITION_CODE.CODE, SEARCH_CONDITION_CODE.CREATED_BY, SEARCH_CONDITION_CODE.LAST_MODIFIED_BY); final Integer userId = getUserId(); for (final Code c : searchCondition.getCodes()) { step = step.values(searchConditionId, c.getCode(), userId, userId); } step.onDuplicateKeyIgnore().execute(); } } private void removeObsoleteCodesFrom(SearchCondition searchCondition, Long searchConditionId) { final List<String> codes = searchCondition.getCodes().stream().map(Code::getCode).collect(Collectors.toList()); getDsl().deleteFrom(SEARCH_CONDITION_CODE).where(SEARCH_CONDITION_CODE.SEARCH_CONDITION_ID.equal(searchConditionId).and(SEARCH_CONDITION_CODE.CODE.notIn(codes))).execute(); } /** {@inheritDoc} */ @Override public SearchCondition updateSearchCondition(SearchCondition searchCondition, long searchOrderId) { final Condition idMatches = SEARCH_CONDITION.SEARCH_CONDITION_ID.eq(searchCondition.getSearchConditionId()); getDsl().update(SEARCH_CONDITION) .set(row(SEARCH_CONDITION.SEARCH_ORDER_ID, SEARCH_CONDITION.CREATED_TERM, SEARCH_CONDITION.MODIFIED_TERM, SEARCH_CONDITION.LAST_MODIFIED, SEARCH_CONDITION.LAST_MODIFIED_BY, SEARCH_CONDITION.VERSION), row(searchOrderId, searchCondition.getCreatedDisplayValue(), searchCondition.getModifiedDisplayValue(), getTs(), getUserId(), getDsl().select(SEARCH_CONDITION.VERSION).from(SEARCH_CONDITION).where(idMatches).fetchOneInto(Integer.class) + 1)) .where(idMatches) .execute(); persistSearchTerms(searchCondition, searchCondition.getSearchConditionId()); persistCodes(searchCondition, searchCondition.getSearchConditionId()); SearchCondition persistedSearchCondition = fetchSearchConditionWithId(searchCondition.getSearchConditionId()); fillSearchTermsInto(persistedSearchCondition, mapSearchTermsToSearchConditions(persistedSearchCondition)); fillCodesInto(persistedSearchCondition); return persistedSearchCondition; } /** {@inheritDoc} */ @Override public void deleteSearchConditionWithId(long searchConditionId) { getDsl().deleteFrom(SEARCH_CONDITION).where(SEARCH_CONDITION.SEARCH_CONDITION_ID.eq(searchConditionId)).execute(); } }
Ad hoc db migration of SEARCH_CONDITION
implementation/sipamato/src/main/java/ch/difty/sipamato/persistance/jooq/search/JooqSearchOrderRepo.java
Ad hoc db migration of SEARCH_CONDITION
Java
mit
43ccc0e276d4db5de560b8de06c8c66fc69591d3
0
domingos86/AntennaPod,mfietz/AntennaPod,mfietz/AntennaPod,mfietz/AntennaPod,twiceyuan/AntennaPod,johnjohndoe/AntennaPod,mfietz/AntennaPod,twiceyuan/AntennaPod,johnjohndoe/AntennaPod,domingos86/AntennaPod,domingos86/AntennaPod,domingos86/AntennaPod,twiceyuan/AntennaPod,twiceyuan/AntennaPod,johnjohndoe/AntennaPod,johnjohndoe/AntennaPod
package de.danoeh.antennapod.core.syndication.namespace; import android.text.TextUtils; import android.util.Log; import org.xml.sax.Attributes; import java.util.concurrent.TimeUnit; import de.danoeh.antennapod.core.feed.FeedImage; import de.danoeh.antennapod.core.syndication.handler.HandlerState; public class NSITunes extends Namespace { public static final String NSTAG = "itunes"; public static final String NSURI = "http://www.itunes.com/dtds/podcast-1.0.dtd"; private static final String IMAGE = "image"; private static final String IMAGE_TITLE = "image"; private static final String IMAGE_HREF = "href"; private static final String AUTHOR = "author"; public static final String DURATION = "duration"; public static final String SUBTITLE = "subtitle"; public static final String SUMMARY = "summary"; @Override public SyndElement handleElementStart(String localName, HandlerState state, Attributes attributes) { if (IMAGE.equals(localName)) { FeedImage image = new FeedImage(); image.setTitle(IMAGE_TITLE); image.setDownload_url(attributes.getValue(IMAGE_HREF)); if (state.getCurrentItem() != null) { // this is an items image image.setTitle(state.getCurrentItem().getTitle() + IMAGE_TITLE); image.setOwner(state.getCurrentItem()); state.getCurrentItem().setImage(image); } else { // this is the feed image // prefer to all other images if (!TextUtils.isEmpty(image.getDownload_url())) { image.setOwner(state.getFeed()); state.getFeed().setImage(image); } } } return new SyndElement(localName, this); } @Override public void handleElementEnd(String localName, HandlerState state) { if(state.getContentBuf() == null) { return; } if (AUTHOR.equals(localName)) { if (state.getFeed() != null) { String author = state.getContentBuf().toString(); state.getFeed().setAuthor(author); } } else if (DURATION.equals(localName)) { String durationStr = state.getContentBuf().toString(); if(TextUtils.isEmpty(durationStr)) { return; } String[] parts = durationStr.trim().split(":"); try { int durationMs = 0; if (parts.length == 2) { durationMs += TimeUnit.MINUTES.toMillis(Long.parseLong(parts[0])) + TimeUnit.SECONDS.toMillis((long)Float.parseFloat(parts[1])); } else if (parts.length >= 3) { durationMs += TimeUnit.HOURS.toMillis(Long.parseLong(parts[0])) + TimeUnit.MINUTES.toMillis(Long.parseLong(parts[1])) + TimeUnit.SECONDS.toMillis((long)Float.parseFloat(parts[2])); } else { return; } state.getTempObjects().put(DURATION, durationMs); } catch (NumberFormatException e) { Log.e(NSTAG, "Duration \"" + durationStr + "\" could not be parsed"); } } else if (SUBTITLE.equals(localName)) { String subtitle = state.getContentBuf().toString(); if (TextUtils.isEmpty(subtitle)) { return; } if (state.getCurrentItem() != null) { if (TextUtils.isEmpty(state.getCurrentItem().getDescription())) { state.getCurrentItem().setDescription(subtitle); } } else { if (state.getFeed() != null && TextUtils.isEmpty(state.getFeed().getDescription())) { state.getFeed().setDescription(subtitle); } } } else if (SUMMARY.equals(localName)) { String summary = state.getContentBuf().toString(); if (TextUtils.isEmpty(summary)) { return; } if (state.getCurrentItem() != null && (TextUtils.isEmpty(state.getCurrentItem().getDescription()) || state.getCurrentItem().getDescription().length() * 1.25 < summary.length()) ) { state.getCurrentItem().setDescription(summary); } else if (state.getFeed() != null) { state.getFeed().setDescription(summary); } } } }
core/src/main/java/de/danoeh/antennapod/core/syndication/namespace/NSITunes.java
package de.danoeh.antennapod.core.syndication.namespace; import android.text.TextUtils; import android.util.Log; import org.xml.sax.Attributes; import java.util.concurrent.TimeUnit; import de.danoeh.antennapod.core.feed.FeedImage; import de.danoeh.antennapod.core.syndication.handler.HandlerState; public class NSITunes extends Namespace { public static final String NSTAG = "itunes"; public static final String NSURI = "http://www.itunes.com/dtds/podcast-1.0.dtd"; private static final String IMAGE = "image"; private static final String IMAGE_TITLE = "image"; private static final String IMAGE_HREF = "href"; private static final String AUTHOR = "author"; public static final String DURATION = "duration"; public static final String SUBTITLE = "subtitle"; public static final String SUMMARY = "summary"; @Override public SyndElement handleElementStart(String localName, HandlerState state, Attributes attributes) { if (IMAGE.equals(localName)) { FeedImage image = new FeedImage(); image.setTitle(IMAGE_TITLE); image.setDownload_url(attributes.getValue(IMAGE_HREF)); if (state.getCurrentItem() != null) { // this is an items image image.setTitle(state.getCurrentItem().getTitle() + IMAGE_TITLE); image.setOwner(state.getCurrentItem()); state.getCurrentItem().setImage(image); } else { // this is the feed image // prefer to all other images if (!TextUtils.isEmpty(image.getDownload_url())) { image.setOwner(state.getFeed()); state.getFeed().setImage(image); } } } return new SyndElement(localName, this); } @Override public void handleElementEnd(String localName, HandlerState state) { if(state.getContentBuf() == null) { return; } if (AUTHOR.equals(localName)) { if (state.getFeed() != null) { String author = state.getContentBuf().toString(); state.getFeed().setAuthor(author); } } else if (DURATION.equals(localName)) { String durationStr = state.getContentBuf().toString(); if(TextUtils.isEmpty(durationStr)) { return; } String[] parts = durationStr.trim().split(":"); try { int durationMs = 0; if (parts.length == 2) { durationMs += TimeUnit.MINUTES.toMillis(Long.parseLong(parts[0])) + TimeUnit.SECONDS.toMillis((long)Float.parseFloat(parts[1])); } else if (parts.length >= 3) { durationMs += TimeUnit.HOURS.toMillis(Long.parseLong(parts[0])) + TimeUnit.MINUTES.toMillis(Long.parseLong(parts[1])) + TimeUnit.SECONDS.toMillis((long)Float.parseFloat(parts[2])); } else { return; } state.getTempObjects().put(DURATION, durationMs); } catch (NumberFormatException e) { Log.e(NSTAG, "Duration \"" + durationStr + "\" could not be parsed"); } } else if (SUBTITLE.equals(localName)) { String subtitle = state.getContentBuf().toString(); if (TextUtils.isEmpty(subtitle)) { return; } if (state.getCurrentItem() != null) { if (TextUtils.isEmpty(state.getCurrentItem().getDescription())) { state.getCurrentItem().setDescription(subtitle); } } else { if (state.getFeed() != null && TextUtils.isEmpty(state.getFeed().getDescription())) { state.getFeed().setDescription(subtitle); } } } else if (SUMMARY.equals(localName)) { String summary = state.getContentBuf().toString(); if (TextUtils.isEmpty(summary)) { return; } if (state.getCurrentItem() != null) { state.getCurrentItem().setDescription(summary); } else if (state.getFeed() != null) { state.getFeed().setDescription(summary); } } } }
Only use iTunes summary when * no description is set yet OR * it is clearly more detailed (i.e. longer) than the current description
core/src/main/java/de/danoeh/antennapod/core/syndication/namespace/NSITunes.java
Only use iTunes summary when * no description is set yet OR * it is clearly more detailed (i.e. longer) than the current description
Java
mit
39d2dcd91e502590a6f4e08985df2d649caf9657
0
Nunnery/MythicDrops
package net.nunnerycode.bukkit.mythicdrops.names; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import net.nunnerycode.bukkit.mythicdrops.api.tiers.Tier; import org.apache.commons.lang.math.RandomUtils; public final class TierLoreMap extends ConcurrentHashMap<Tier, List<String>> { private static final TierLoreMap _INSTANCE = new TierLoreMap(); private TierLoreMap() { // do nothing } public static TierLoreMap getInstance() { return _INSTANCE; } public String getRandom(Tier tier) { if (!containsKey(tier)) { return null; } List<String> tierLore = get(tier); return tierLore.get(RandomUtils.nextInt(tierLore.size())); } }
MythicDrops/src/main/java/net/nunnerycode/bukkit/mythicdrops/names/TierLoreMap.java
package net.nunnerycode.bukkit.mythicdrops.names; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import net.nunnerycode.bukkit.mythicdrops.api.tiers.Tier; import org.apache.commons.lang.math.RandomUtils; public final class TierLoreMap extends ConcurrentHashMap<Tier, List<String>> { private static final TierLoreMap _INSTANCE = new TierLoreMap(); private TierLoreMap() { // do nothing } public static TierLoreMap getInstance() { return _INSTANCE; } public String getRandom(Tier tier) { if (!containsKey(tier)) { return null; } List<String> enchantmentLore = get(tier); return enchantmentLore.get(RandomUtils.nextInt(enchantmentLore.size())); } }
renaming a variable
MythicDrops/src/main/java/net/nunnerycode/bukkit/mythicdrops/names/TierLoreMap.java
renaming a variable
Java
mit
1484f569e7d82a4c1ec14f24992ec3f27508e803
0
delight-im/Android-AdvancedWebView
package im.delight.android.webview; /* * Android-AdvancedWebView (https://github.com/delight-im/Android-AdvancedWebView) * Copyright (c) delight.im (https://www.delight.im/) * Licensed under the MIT License (https://opensource.org/licenses/MIT) */ import android.view.ViewGroup; import android.app.DownloadManager; import android.app.DownloadManager.Request; import android.os.Environment; import android.webkit.CookieManager; import java.util.Arrays; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager; import java.util.HashMap; import android.net.http.SslError; import android.view.InputEvent; import android.view.KeyEvent; import android.webkit.ClientCertRequest; import android.webkit.HttpAuthHandler; import android.webkit.SslErrorHandler; import android.webkit.URLUtil; import android.webkit.WebResourceRequest; import android.webkit.WebResourceResponse; import android.os.Message; import android.view.View; import android.webkit.ConsoleMessage; import android.webkit.GeolocationPermissions.Callback; import android.webkit.JsPromptResult; import android.webkit.JsResult; import android.webkit.PermissionRequest; import android.webkit.WebStorage.QuotaUpdater; import android.app.Fragment; import android.util.Base64; import android.os.Build; import android.webkit.DownloadListener; import android.graphics.Bitmap; import android.app.Activity; import android.content.Intent; import android.net.Uri; import android.webkit.ValueCallback; import android.webkit.WebChromeClient; import android.webkit.WebViewClient; import android.webkit.WebSettings; import android.annotation.SuppressLint; import android.content.Context; import android.util.AttributeSet; import android.webkit.WebView; import java.util.MissingResourceException; import java.util.Locale; import java.util.LinkedList; import java.util.Collection; import java.util.List; import java.io.UnsupportedEncodingException; import java.lang.ref.WeakReference; import java.util.Map; /** Advanced WebView component for Android that works as intended out of the box */ @SuppressWarnings("deprecation") public class AdvancedWebView extends WebView { public interface Listener { void onPageStarted(String url, Bitmap favicon); void onPageFinished(String url); void onPageError(int errorCode, String description, String failingUrl); void onDownloadRequested(String url, String suggestedFilename, String mimeType, long contentLength, String contentDisposition, String userAgent); void onExternalPageRequest(String url); } public static final String PACKAGE_NAME_DOWNLOAD_MANAGER = "com.android.providers.downloads"; protected static final int REQUEST_CODE_FILE_PICKER = 51426; protected static final String DATABASES_SUB_FOLDER = "/databases"; protected static final String LANGUAGE_DEFAULT_ISO3 = "eng"; protected static final String CHARSET_DEFAULT = "UTF-8"; /** Alternative browsers that have their own rendering engine and *may* be installed on this device */ protected static final String[] ALTERNATIVE_BROWSERS = new String[] { "org.mozilla.firefox", "com.android.chrome", "com.opera.browser", "org.mozilla.firefox_beta", "com.chrome.beta", "com.opera.browser.beta" }; protected WeakReference<Activity> mActivity; protected WeakReference<Fragment> mFragment; protected Listener mListener; protected final List<String> mPermittedHostnames = new LinkedList<String>(); /** File upload callback for platform versions prior to Android 5.0 */ protected ValueCallback<Uri> mFileUploadCallbackFirst; /** File upload callback for Android 5.0+ */ protected ValueCallback<Uri[]> mFileUploadCallbackSecond; protected long mLastError; protected String mLanguageIso3; protected int mRequestCodeFilePicker = REQUEST_CODE_FILE_PICKER; protected WebViewClient mCustomWebViewClient; protected WebChromeClient mCustomWebChromeClient; protected boolean mGeolocationEnabled; protected String mUploadableFileTypes = "*/*"; protected final Map<String, String> mHttpHeaders = new HashMap<String, String>(); public AdvancedWebView(Context context) { super(context); init(context); } public AdvancedWebView(Context context, AttributeSet attrs) { super(context, attrs); init(context); } public AdvancedWebView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(context); } public void setListener(final Activity activity, final Listener listener) { setListener(activity, listener, REQUEST_CODE_FILE_PICKER); } public void setListener(final Activity activity, final Listener listener, final int requestCodeFilePicker) { if (activity != null) { mActivity = new WeakReference<Activity>(activity); } else { mActivity = null; } setListener(listener, requestCodeFilePicker); } public void setListener(final Fragment fragment, final Listener listener) { setListener(fragment, listener, REQUEST_CODE_FILE_PICKER); } public void setListener(final Fragment fragment, final Listener listener, final int requestCodeFilePicker) { if (fragment != null) { mFragment = new WeakReference<Fragment>(fragment); } else { mFragment = null; } setListener(listener, requestCodeFilePicker); } protected void setListener(final Listener listener, final int requestCodeFilePicker) { mListener = listener; mRequestCodeFilePicker = requestCodeFilePicker; } @Override public void setWebViewClient(final WebViewClient client) { mCustomWebViewClient = client; } @Override public void setWebChromeClient(final WebChromeClient client) { mCustomWebChromeClient = client; } @SuppressLint("SetJavaScriptEnabled") public void setGeolocationEnabled(final boolean enabled) { if (enabled) { getSettings().setJavaScriptEnabled(true); getSettings().setGeolocationEnabled(true); setGeolocationDatabasePath(); } mGeolocationEnabled = enabled; } @SuppressLint("NewApi") protected void setGeolocationDatabasePath() { final Activity activity; if (mFragment != null && mFragment.get() != null && Build.VERSION.SDK_INT >= 11 && mFragment.get().getActivity() != null) { activity = mFragment.get().getActivity(); } else if (mActivity != null && mActivity.get() != null) { activity = mActivity.get(); } else { return; } getSettings().setGeolocationDatabasePath(activity.getFilesDir().getPath()); } public void setUploadableFileTypes(final String mimeType) { mUploadableFileTypes = mimeType; } /** * Loads and displays the provided HTML source text * * @param html the HTML source text to load */ public void loadHtml(final String html) { loadHtml(html, null); } /** * Loads and displays the provided HTML source text * * @param html the HTML source text to load * @param baseUrl the URL to use as the page's base URL */ public void loadHtml(final String html, final String baseUrl) { loadHtml(html, baseUrl, null); } /** * Loads and displays the provided HTML source text * * @param html the HTML source text to load * @param baseUrl the URL to use as the page's base URL * @param historyUrl the URL to use for the page's history entry */ public void loadHtml(final String html, final String baseUrl, final String historyUrl) { loadHtml(html, baseUrl, historyUrl, "utf-8"); } /** * Loads and displays the provided HTML source text * * @param html the HTML source text to load * @param baseUrl the URL to use as the page's base URL * @param historyUrl the URL to use for the page's history entry * @param encoding the encoding or charset of the HTML source text */ public void loadHtml(final String html, final String baseUrl, final String historyUrl, final String encoding) { loadDataWithBaseURL(baseUrl, html, "text/html", encoding, historyUrl); } @SuppressLint("NewApi") @SuppressWarnings("all") public void onResume() { if (Build.VERSION.SDK_INT >= 11) { super.onResume(); } resumeTimers(); } @SuppressLint("NewApi") @SuppressWarnings("all") public void onPause() { pauseTimers(); if (Build.VERSION.SDK_INT >= 11) { super.onPause(); } } public void onDestroy() { // try to remove this view from its parent first try { ((ViewGroup) getParent()).removeView(this); } catch (Exception ignored) { } // then try to remove all child views from this view try { removeAllViews(); } catch (Exception ignored) { } // and finally destroy this view destroy(); } public void onActivityResult(final int requestCode, final int resultCode, final Intent intent) { if (requestCode == mRequestCodeFilePicker) { if (resultCode == Activity.RESULT_OK) { if (intent != null) { if (mFileUploadCallbackFirst != null) { mFileUploadCallbackFirst.onReceiveValue(intent.getData()); mFileUploadCallbackFirst = null; } else if (mFileUploadCallbackSecond != null) { Uri[] dataUris = null; try { if (intent.getDataString() != null) { dataUris = new Uri[] { Uri.parse(intent.getDataString()) }; } else { if (Build.VERSION.SDK_INT >= 16) { if (intent.getClipData() != null) { final int numSelectedFiles = intent.getClipData().getItemCount(); dataUris = new Uri[numSelectedFiles]; for (int i = 0; i < numSelectedFiles; i++) { dataUris[i] = intent.getClipData().getItemAt(i).getUri(); } } } } } catch (Exception ignored) { } mFileUploadCallbackSecond.onReceiveValue(dataUris); mFileUploadCallbackSecond = null; } } } else { if (mFileUploadCallbackFirst != null) { mFileUploadCallbackFirst.onReceiveValue(null); mFileUploadCallbackFirst = null; } else if (mFileUploadCallbackSecond != null) { mFileUploadCallbackSecond.onReceiveValue(null); mFileUploadCallbackSecond = null; } } } } /** * Adds an additional HTTP header that will be sent along with every HTTP `GET` request * * This does only affect the main requests, not the requests to included resources (e.g. images) * * If you later want to delete an HTTP header that was previously added this way, call `removeHttpHeader()` * * The `WebView` implementation may in some cases overwrite headers that you set or unset * * @param name the name of the HTTP header to add * @param value the value of the HTTP header to send */ public void addHttpHeader(final String name, final String value) { mHttpHeaders.put(name, value); } /** * Removes one of the HTTP headers that have previously been added via `addHttpHeader()` * * If you want to unset a pre-defined header, set it to an empty string with `addHttpHeader()` instead * * The `WebView` implementation may in some cases overwrite headers that you set or unset * * @param name the name of the HTTP header to remove */ public void removeHttpHeader(final String name) { mHttpHeaders.remove(name); } public void addPermittedHostname(String hostname) { mPermittedHostnames.add(hostname); } public void addPermittedHostnames(Collection<? extends String> collection) { mPermittedHostnames.addAll(collection); } public List<String> getPermittedHostnames() { return mPermittedHostnames; } public void removePermittedHostname(String hostname) { mPermittedHostnames.remove(hostname); } public void clearPermittedHostnames() { mPermittedHostnames.clear(); } public boolean onBackPressed() { if (canGoBack()) { goBack(); return false; } else { return true; } } @SuppressLint("NewApi") protected static void setAllowAccessFromFileUrls(final WebSettings webSettings, final boolean allowed) { if (Build.VERSION.SDK_INT >= 16) { webSettings.setAllowFileAccessFromFileURLs(allowed); webSettings.setAllowUniversalAccessFromFileURLs(allowed); } } @SuppressWarnings("static-method") public void setCookiesEnabled(final boolean enabled) { CookieManager.getInstance().setAcceptCookie(enabled); } @SuppressLint("NewApi") public void setThirdPartyCookiesEnabled(final boolean enabled) { if (Build.VERSION.SDK_INT >= 21) { CookieManager.getInstance().setAcceptThirdPartyCookies(this, enabled); } } public void setMixedContentAllowed(final boolean allowed) { setMixedContentAllowed(getSettings(), allowed); } @SuppressWarnings("static-method") @SuppressLint("NewApi") protected void setMixedContentAllowed(final WebSettings webSettings, final boolean allowed) { if (Build.VERSION.SDK_INT >= 21) { webSettings.setMixedContentMode(allowed ? WebSettings.MIXED_CONTENT_ALWAYS_ALLOW : WebSettings.MIXED_CONTENT_NEVER_ALLOW); } } public void setDesktopMode(final boolean enabled) { final WebSettings webSettings = getSettings(); final String newUserAgent; if (enabled) { newUserAgent = webSettings.getUserAgentString().replace("Mobile", "eliboM").replace("Android", "diordnA"); } else { newUserAgent = webSettings.getUserAgentString().replace("eliboM", "Mobile").replace("diordnA", "Android"); } webSettings.setUserAgentString(newUserAgent); webSettings.setUseWideViewPort(enabled); webSettings.setLoadWithOverviewMode(enabled); webSettings.setSupportZoom(enabled); webSettings.setBuiltInZoomControls(enabled); } @SuppressLint({ "SetJavaScriptEnabled" }) protected void init(Context context) { // in IDE's preview mode if (isInEditMode()) { // do not run the code from this method return; } if (context instanceof Activity) { mActivity = new WeakReference<Activity>((Activity) context); } mLanguageIso3 = getLanguageIso3(); setFocusable(true); setFocusableInTouchMode(true); setSaveEnabled(true); final String filesDir = context.getFilesDir().getPath(); final String databaseDir = filesDir.substring(0, filesDir.lastIndexOf("/")) + DATABASES_SUB_FOLDER; final WebSettings webSettings = getSettings(); webSettings.setAllowFileAccess(false); setAllowAccessFromFileUrls(webSettings, false); webSettings.setBuiltInZoomControls(false); webSettings.setJavaScriptEnabled(true); webSettings.setDomStorageEnabled(true); if (Build.VERSION.SDK_INT < 18) { webSettings.setRenderPriority(WebSettings.RenderPriority.HIGH); } webSettings.setDatabaseEnabled(true); if (Build.VERSION.SDK_INT < 19) { webSettings.setDatabasePath(databaseDir); } setMixedContentAllowed(webSettings, true); setThirdPartyCookiesEnabled(true); super.setWebViewClient(new WebViewClient() { @Override public void onPageStarted(WebView view, String url, Bitmap favicon) { if (!hasError()) { if (mListener != null) { mListener.onPageStarted(url, favicon); } } if (mCustomWebViewClient != null) { mCustomWebViewClient.onPageStarted(view, url, favicon); } } @Override public void onPageFinished(WebView view, String url) { if (!hasError()) { if (mListener != null) { mListener.onPageFinished(url); } } if (mCustomWebViewClient != null) { mCustomWebViewClient.onPageFinished(view, url); } } @Override public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) { setLastError(); if (mListener != null) { mListener.onPageError(errorCode, description, failingUrl); } if (mCustomWebViewClient != null) { mCustomWebViewClient.onReceivedError(view, errorCode, description, failingUrl); } } @Override public boolean shouldOverrideUrlLoading(final WebView view, final String url) { // if the hostname may not be accessed if (!isHostnameAllowed(url)) { // if a listener is available if (mListener != null) { // inform the listener about the request mListener.onExternalPageRequest(url); } // cancel the original request return true; } // if there is a user-specified handler available if (mCustomWebViewClient != null) { // if the user-specified handler asks to override the request if (mCustomWebViewClient.shouldOverrideUrlLoading(view, url)) { // cancel the original request return true; } } // route the request through the custom URL loading method view.loadUrl(url); // cancel the original request return true; } @Override public void onLoadResource(WebView view, String url) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onLoadResource(view, url); } else { super.onLoadResource(view, url); } } @SuppressLint("NewApi") @SuppressWarnings("all") public WebResourceResponse shouldInterceptRequest(WebView view, String url) { if (Build.VERSION.SDK_INT >= 11) { if (mCustomWebViewClient != null) { return mCustomWebViewClient.shouldInterceptRequest(view, url); } else { return super.shouldInterceptRequest(view, url); } } else { return null; } } @SuppressLint("NewApi") @SuppressWarnings("all") public WebResourceResponse shouldInterceptRequest(WebView view, WebResourceRequest request) { if (Build.VERSION.SDK_INT >= 21) { if (mCustomWebViewClient != null) { return mCustomWebViewClient.shouldInterceptRequest(view, request); } else { return super.shouldInterceptRequest(view, request); } } else { return null; } } @Override public void onFormResubmission(WebView view, Message dontResend, Message resend) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onFormResubmission(view, dontResend, resend); } else { super.onFormResubmission(view, dontResend, resend); } } @Override public void doUpdateVisitedHistory(WebView view, String url, boolean isReload) { if (mCustomWebViewClient != null) { mCustomWebViewClient.doUpdateVisitedHistory(view, url, isReload); } else { super.doUpdateVisitedHistory(view, url, isReload); } } @Override public void onReceivedSslError(WebView view, SslErrorHandler handler, SslError error) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onReceivedSslError(view, handler, error); } else { super.onReceivedSslError(view, handler, error); } } @SuppressLint("NewApi") @SuppressWarnings("all") public void onReceivedClientCertRequest(WebView view, ClientCertRequest request) { if (Build.VERSION.SDK_INT >= 21) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onReceivedClientCertRequest(view, request); } else { super.onReceivedClientCertRequest(view, request); } } } @Override public void onReceivedHttpAuthRequest(WebView view, HttpAuthHandler handler, String host, String realm) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onReceivedHttpAuthRequest(view, handler, host, realm); } else { super.onReceivedHttpAuthRequest(view, handler, host, realm); } } @Override public boolean shouldOverrideKeyEvent(WebView view, KeyEvent event) { if (mCustomWebViewClient != null) { return mCustomWebViewClient.shouldOverrideKeyEvent(view, event); } else { return super.shouldOverrideKeyEvent(view, event); } } @Override public void onUnhandledKeyEvent(WebView view, KeyEvent event) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onUnhandledKeyEvent(view, event); } else { super.onUnhandledKeyEvent(view, event); } } @SuppressLint("NewApi") @SuppressWarnings("all") public void onUnhandledInputEvent(WebView view, InputEvent event) { if (Build.VERSION.SDK_INT >= 21) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onUnhandledInputEvent(view, event); } else { super.onUnhandledInputEvent(view, event); } } } @Override public void onScaleChanged(WebView view, float oldScale, float newScale) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onScaleChanged(view, oldScale, newScale); } else { super.onScaleChanged(view, oldScale, newScale); } } @SuppressLint("NewApi") @SuppressWarnings("all") public void onReceivedLoginRequest(WebView view, String realm, String account, String args) { if (Build.VERSION.SDK_INT >= 12) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onReceivedLoginRequest(view, realm, account, args); } else { super.onReceivedLoginRequest(view, realm, account, args); } } } }); super.setWebChromeClient(new WebChromeClient() { // file upload callback (Android 2.2 (API level 8) -- Android 2.3 (API level 10)) (hidden method) @SuppressWarnings("unused") public void openFileChooser(ValueCallback<Uri> uploadMsg) { openFileChooser(uploadMsg, null); } // file upload callback (Android 3.0 (API level 11) -- Android 4.0 (API level 15)) (hidden method) public void openFileChooser(ValueCallback<Uri> uploadMsg, String acceptType) { openFileChooser(uploadMsg, acceptType, null); } // file upload callback (Android 4.1 (API level 16) -- Android 4.3 (API level 18)) (hidden method) @SuppressWarnings("unused") public void openFileChooser(ValueCallback<Uri> uploadMsg, String acceptType, String capture) { openFileInput(uploadMsg, null, false); } // file upload callback (Android 5.0 (API level 21) -- current) (public method) @SuppressWarnings("all") public boolean onShowFileChooser(WebView webView, ValueCallback<Uri[]> filePathCallback, WebChromeClient.FileChooserParams fileChooserParams) { if (Build.VERSION.SDK_INT >= 21) { final boolean allowMultiple = fileChooserParams.getMode() == FileChooserParams.MODE_OPEN_MULTIPLE; openFileInput(null, filePathCallback, allowMultiple); return true; } else { return false; } } @Override public void onProgressChanged(WebView view, int newProgress) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onProgressChanged(view, newProgress); } else { super.onProgressChanged(view, newProgress); } } @Override public void onReceivedTitle(WebView view, String title) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onReceivedTitle(view, title); } else { super.onReceivedTitle(view, title); } } @Override public void onReceivedIcon(WebView view, Bitmap icon) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onReceivedIcon(view, icon); } else { super.onReceivedIcon(view, icon); } } @Override public void onReceivedTouchIconUrl(WebView view, String url, boolean precomposed) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onReceivedTouchIconUrl(view, url, precomposed); } else { super.onReceivedTouchIconUrl(view, url, precomposed); } } @Override public void onShowCustomView(View view, CustomViewCallback callback) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onShowCustomView(view, callback); } else { super.onShowCustomView(view, callback); } } @SuppressLint("NewApi") @SuppressWarnings("all") public void onShowCustomView(View view, int requestedOrientation, CustomViewCallback callback) { if (Build.VERSION.SDK_INT >= 14) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onShowCustomView(view, requestedOrientation, callback); } else { super.onShowCustomView(view, requestedOrientation, callback); } } } @Override public void onHideCustomView() { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onHideCustomView(); } else { super.onHideCustomView(); } } @Override public boolean onCreateWindow(WebView view, boolean isDialog, boolean isUserGesture, Message resultMsg) { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onCreateWindow(view, isDialog, isUserGesture, resultMsg); } else { return super.onCreateWindow(view, isDialog, isUserGesture, resultMsg); } } @Override public void onRequestFocus(WebView view) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onRequestFocus(view); } else { super.onRequestFocus(view); } } @Override public void onCloseWindow(WebView window) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onCloseWindow(window); } else { super.onCloseWindow(window); } } @Override public boolean onJsAlert(WebView view, String url, String message, JsResult result) { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onJsAlert(view, url, message, result); } else { return super.onJsAlert(view, url, message, result); } } @Override public boolean onJsConfirm(WebView view, String url, String message, JsResult result) { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onJsConfirm(view, url, message, result); } else { return super.onJsConfirm(view, url, message, result); } } @Override public boolean onJsPrompt(WebView view, String url, String message, String defaultValue, JsPromptResult result) { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onJsPrompt(view, url, message, defaultValue, result); } else { return super.onJsPrompt(view, url, message, defaultValue, result); } } @Override public boolean onJsBeforeUnload(WebView view, String url, String message, JsResult result) { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onJsBeforeUnload(view, url, message, result); } else { return super.onJsBeforeUnload(view, url, message, result); } } @Override public void onGeolocationPermissionsShowPrompt(String origin, Callback callback) { if (mGeolocationEnabled) { callback.invoke(origin, true, false); } else { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onGeolocationPermissionsShowPrompt(origin, callback); } else { super.onGeolocationPermissionsShowPrompt(origin, callback); } } } @Override public void onGeolocationPermissionsHidePrompt() { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onGeolocationPermissionsHidePrompt(); } else { super.onGeolocationPermissionsHidePrompt(); } } @SuppressLint("NewApi") @SuppressWarnings("all") public void onPermissionRequest(PermissionRequest request) { if (Build.VERSION.SDK_INT >= 21) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onPermissionRequest(request); } else { super.onPermissionRequest(request); } } } @SuppressLint("NewApi") @SuppressWarnings("all") public void onPermissionRequestCanceled(PermissionRequest request) { if (Build.VERSION.SDK_INT >= 21) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onPermissionRequestCanceled(request); } else { super.onPermissionRequestCanceled(request); } } } @Override public boolean onJsTimeout() { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onJsTimeout(); } else { return super.onJsTimeout(); } } @Override public void onConsoleMessage(String message, int lineNumber, String sourceID) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onConsoleMessage(message, lineNumber, sourceID); } else { super.onConsoleMessage(message, lineNumber, sourceID); } } @Override public boolean onConsoleMessage(ConsoleMessage consoleMessage) { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onConsoleMessage(consoleMessage); } else { return super.onConsoleMessage(consoleMessage); } } @Override public Bitmap getDefaultVideoPoster() { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.getDefaultVideoPoster(); } else { return super.getDefaultVideoPoster(); } } @Override public View getVideoLoadingProgressView() { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.getVideoLoadingProgressView(); } else { return super.getVideoLoadingProgressView(); } } @Override public void getVisitedHistory(ValueCallback<String[]> callback) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.getVisitedHistory(callback); } else { super.getVisitedHistory(callback); } } @Override public void onExceededDatabaseQuota(String url, String databaseIdentifier, long quota, long estimatedDatabaseSize, long totalQuota, QuotaUpdater quotaUpdater) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onExceededDatabaseQuota(url, databaseIdentifier, quota, estimatedDatabaseSize, totalQuota, quotaUpdater); } else { super.onExceededDatabaseQuota(url, databaseIdentifier, quota, estimatedDatabaseSize, totalQuota, quotaUpdater); } } @Override public void onReachedMaxAppCacheSize(long requiredStorage, long quota, QuotaUpdater quotaUpdater) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onReachedMaxAppCacheSize(requiredStorage, quota, quotaUpdater); } else { super.onReachedMaxAppCacheSize(requiredStorage, quota, quotaUpdater); } } }); setDownloadListener(new DownloadListener() { @Override public void onDownloadStart(final String url, final String userAgent, final String contentDisposition, final String mimeType, final long contentLength) { final String suggestedFilename = URLUtil.guessFileName(url, contentDisposition, mimeType); if (mListener != null) { mListener.onDownloadRequested(url, suggestedFilename, mimeType, contentLength, contentDisposition, userAgent); } } }); } @Override public void loadUrl(final String url, Map<String, String> additionalHttpHeaders) { if (additionalHttpHeaders == null) { additionalHttpHeaders = mHttpHeaders; } else if (mHttpHeaders.size() > 0) { additionalHttpHeaders.putAll(mHttpHeaders); } super.loadUrl(url, additionalHttpHeaders); } @Override public void loadUrl(final String url) { if (mHttpHeaders.size() > 0) { super.loadUrl(url, mHttpHeaders); } else { super.loadUrl(url); } } public void loadUrl(String url, final boolean preventCaching) { if (preventCaching) { url = makeUrlUnique(url); } loadUrl(url); } public void loadUrl(String url, final boolean preventCaching, final Map<String,String> additionalHttpHeaders) { if (preventCaching) { url = makeUrlUnique(url); } loadUrl(url, additionalHttpHeaders); } protected static String makeUrlUnique(final String url) { StringBuilder unique = new StringBuilder(); unique.append(url); if (url.contains("?")) { unique.append('&'); } else { if (url.lastIndexOf('/') <= 7) { unique.append('/'); } unique.append('?'); } unique.append(System.currentTimeMillis()); unique.append('='); unique.append(1); return unique.toString(); } protected boolean isHostnameAllowed(final String url) { // if the permitted hostnames have not been restricted to a specific set if (mPermittedHostnames.size() == 0) { // all hostnames are allowed return true; } // get the actual hostname of the URL that is to be checked final String actualHost = Uri.parse(url).getHost(); // for every hostname in the set of permitted hosts for (String expectedHost : mPermittedHostnames) { // if the two hostnames match or if the actual host is a subdomain of the expected host if (actualHost.equals(expectedHost) || actualHost.endsWith("."+expectedHost)) { // the actual hostname of the URL to be checked is allowed return true; } } // the actual hostname of the URL to be checked is not allowed since there were no matches return false; } protected void setLastError() { mLastError = System.currentTimeMillis(); } protected boolean hasError() { return (mLastError + 500) >= System.currentTimeMillis(); } protected static String getLanguageIso3() { try { return Locale.getDefault().getISO3Language().toLowerCase(Locale.US); } catch (MissingResourceException e) { return LANGUAGE_DEFAULT_ISO3; } } /** Provides localizations for the 25 most widely spoken languages that have a ISO 639-2/T code */ protected String getFileUploadPromptLabel() { try { if (mLanguageIso3.equals("zho")) return decodeBase64("6YCJ5oup5LiA5Liq5paH5Lu2"); else if (mLanguageIso3.equals("spa")) return decodeBase64("RWxpamEgdW4gYXJjaGl2bw=="); else if (mLanguageIso3.equals("hin")) return decodeBase64("4KSP4KSVIOCkq+CkvOCkvuCkh+CksiDgpJrgpYHgpKjgpYfgpII="); else if (mLanguageIso3.equals("ben")) return decodeBase64("4KaP4KaV4Kaf4Ka/IOCmq+CmvuCmh+CmsiDgpqjgpr/gprDgp43gpqzgpr7gpprgpqg="); else if (mLanguageIso3.equals("ara")) return decodeBase64("2KfYrtiq2YrYp9ixINmF2YTZgSDZiNin2K3Yrw=="); else if (mLanguageIso3.equals("por")) return decodeBase64("RXNjb2xoYSB1bSBhcnF1aXZv"); else if (mLanguageIso3.equals("rus")) return decodeBase64("0JLRi9Cx0LXRgNC40YLQtSDQvtC00LjQvSDRhNCw0LnQuw=="); else if (mLanguageIso3.equals("jpn")) return decodeBase64("MeODleOCoeOCpOODq+OCkumBuOaKnuOBl+OBpuOBj+OBoOOBleOBhA=="); else if (mLanguageIso3.equals("pan")) return decodeBase64("4KiH4Kmx4KiVIOCoq+CovuCoh+CosiDgqJrgqYHgqKPgqYs="); else if (mLanguageIso3.equals("deu")) return decodeBase64("V8OkaGxlIGVpbmUgRGF0ZWk="); else if (mLanguageIso3.equals("jav")) return decodeBase64("UGlsaWggc2lqaSBiZXJrYXM="); else if (mLanguageIso3.equals("msa")) return decodeBase64("UGlsaWggc2F0dSBmYWls"); else if (mLanguageIso3.equals("tel")) return decodeBase64("4LCS4LCVIOCwq+CxhuCxluCwsuCxjeCwqOCxgSDgsI7gsILgsJrgsYHgsJXgsYvgsILgsKHgsL8="); else if (mLanguageIso3.equals("vie")) return decodeBase64("Q2jhu41uIG3hu5l0IHThuq1wIHRpbg=="); else if (mLanguageIso3.equals("kor")) return decodeBase64("7ZWY64KY7J2YIO2MjOydvOydhCDshKDtg50="); else if (mLanguageIso3.equals("fra")) return decodeBase64("Q2hvaXNpc3NleiB1biBmaWNoaWVy"); else if (mLanguageIso3.equals("mar")) return decodeBase64("4KSr4KS+4KSH4KSyIOCkqOCkv+CkteCkoeCkvg=="); else if (mLanguageIso3.equals("tam")) return decodeBase64("4K6S4K6w4K+BIOCuleCvh+CuvuCuquCvjeCuquCviCDgrqTgr4fgrrDgr43grrXgr4E="); else if (mLanguageIso3.equals("urd")) return decodeBase64("2KfbjNqpINmB2KfYptmEINmF24zauiDYs9uSINin2YbYqtiu2KfYqCDaqdix24zaug=="); else if (mLanguageIso3.equals("fas")) return decodeBase64("2LHYpyDYp9mG2KrYrtin2Kgg2qnZhtuM2K8g24zaqSDZgdin24zZhA=="); else if (mLanguageIso3.equals("tur")) return decodeBase64("QmlyIGRvc3lhIHNlw6dpbg=="); else if (mLanguageIso3.equals("ita")) return decodeBase64("U2NlZ2xpIHVuIGZpbGU="); else if (mLanguageIso3.equals("tha")) return decodeBase64("4LmA4Lil4Li34Lit4LiB4LmE4Lif4Lil4LmM4Lir4LiZ4Li24LmI4LiH"); else if (mLanguageIso3.equals("guj")) return decodeBase64("4KqP4KqVIOCqq+CqvuCqh+CqsuCqqOCrhyDgqqrgqrjgqoLgqqY="); } catch (Exception ignored) { } // return English translation by default return "Choose a file"; } protected static String decodeBase64(final String base64) throws IllegalArgumentException, UnsupportedEncodingException { final byte[] bytes = Base64.decode(base64, Base64.DEFAULT); return new String(bytes, CHARSET_DEFAULT); } @SuppressLint("NewApi") protected void openFileInput(final ValueCallback<Uri> fileUploadCallbackFirst, final ValueCallback<Uri[]> fileUploadCallbackSecond, final boolean allowMultiple) { if (mFileUploadCallbackFirst != null) { mFileUploadCallbackFirst.onReceiveValue(null); } mFileUploadCallbackFirst = fileUploadCallbackFirst; if (mFileUploadCallbackSecond != null) { mFileUploadCallbackSecond.onReceiveValue(null); } mFileUploadCallbackSecond = fileUploadCallbackSecond; Intent i = new Intent(Intent.ACTION_GET_CONTENT); i.addCategory(Intent.CATEGORY_OPENABLE); if (allowMultiple) { if (Build.VERSION.SDK_INT >= 18) { i.putExtra(Intent.EXTRA_ALLOW_MULTIPLE, true); } } i.setType(mUploadableFileTypes); if (mFragment != null && mFragment.get() != null && Build.VERSION.SDK_INT >= 11) { mFragment.get().startActivityForResult(Intent.createChooser(i, getFileUploadPromptLabel()), mRequestCodeFilePicker); } else if (mActivity != null && mActivity.get() != null) { mActivity.get().startActivityForResult(Intent.createChooser(i, getFileUploadPromptLabel()), mRequestCodeFilePicker); } } /** * Returns whether file uploads can be used on the current device (generally all platform versions except for 4.4) * * @return whether file uploads can be used */ public static boolean isFileUploadAvailable() { return isFileUploadAvailable(false); } /** * Returns whether file uploads can be used on the current device (generally all platform versions except for 4.4) * * On Android 4.4.3/4.4.4, file uploads may be possible but will come with a wrong MIME type * * @param needsCorrectMimeType whether a correct MIME type is required for file uploads or `application/octet-stream` is acceptable * @return whether file uploads can be used */ public static boolean isFileUploadAvailable(final boolean needsCorrectMimeType) { if (Build.VERSION.SDK_INT == 19) { final String platformVersion = (Build.VERSION.RELEASE == null) ? "" : Build.VERSION.RELEASE; return !needsCorrectMimeType && (platformVersion.startsWith("4.4.3") || platformVersion.startsWith("4.4.4")); } else { return true; } } /** * Handles a download by loading the file from `fromUrl` and saving it to `toFilename` on the external storage * * This requires the two permissions `android.permission.INTERNET` and `android.permission.WRITE_EXTERNAL_STORAGE` * * Only supported on API level 9 (Android 2.3) and above * * @param context a valid `Context` reference * @param fromUrl the URL of the file to download, e.g. the one from `AdvancedWebView.onDownloadRequested(...)` * @param toFilename the name of the destination file where the download should be saved, e.g. `myImage.jpg` * @return whether the download has been successfully handled or not */ @SuppressLint("NewApi") public static boolean handleDownload(final Context context, final String fromUrl, final String toFilename) { if (Build.VERSION.SDK_INT < 9) { throw new RuntimeException("Method requires API level 9 or above"); } final Request request = new Request(Uri.parse(fromUrl)); if (Build.VERSION.SDK_INT >= 11) { request.allowScanningByMediaScanner(); request.setNotificationVisibility(DownloadManager.Request.VISIBILITY_VISIBLE_NOTIFY_COMPLETED); } request.setDestinationInExternalPublicDir(Environment.DIRECTORY_DOWNLOADS, toFilename); final DownloadManager dm = (DownloadManager) context.getSystemService(Context.DOWNLOAD_SERVICE); try { try { dm.enqueue(request); } catch (SecurityException e) { if (Build.VERSION.SDK_INT >= 11) { request.setNotificationVisibility(DownloadManager.Request.VISIBILITY_VISIBLE); } dm.enqueue(request); } return true; } // if the download manager app has been disabled on the device catch (IllegalArgumentException e) { // show the settings screen where the user can enable the download manager app again openAppSettings(context, AdvancedWebView.PACKAGE_NAME_DOWNLOAD_MANAGER); return false; } } @SuppressLint("NewApi") private static boolean openAppSettings(final Context context, final String packageName) { if (Build.VERSION.SDK_INT < 9) { throw new RuntimeException("Method requires API level 9 or above"); } try { final Intent intent = new Intent(android.provider.Settings.ACTION_APPLICATION_DETAILS_SETTINGS); intent.setData(Uri.parse("package:" + packageName)); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(intent); return true; } catch (Exception e) { return false; } } /** Wrapper for methods related to alternative browsers that have their own rendering engines */ public static class Browsers { /** Package name of an alternative browser that is installed on this device */ private static String mAlternativePackage; /** * Returns whether there is an alternative browser with its own rendering engine currently installed * * @param context a valid `Context` reference * @return whether there is an alternative browser or not */ public static boolean hasAlternative(final Context context) { return getAlternative(context) != null; } /** * Returns the package name of an alternative browser with its own rendering engine or `null` * * @param context a valid `Context` reference * @return the package name or `null` */ public static String getAlternative(final Context context) { if (mAlternativePackage != null) { return mAlternativePackage; } final List<String> alternativeBrowsers = Arrays.asList(ALTERNATIVE_BROWSERS); final List<ApplicationInfo> apps = context.getPackageManager().getInstalledApplications(PackageManager.GET_META_DATA); for (ApplicationInfo app : apps) { if (!app.enabled) { continue; } if (alternativeBrowsers.contains(app.packageName)) { mAlternativePackage = app.packageName; return app.packageName; } } return null; } /** * Opens the given URL in an alternative browser * * @param context a valid `Activity` reference * @param url the URL to open */ public static void openUrl(final Activity context, final String url) { openUrl(context, url, false); } /** * Opens the given URL in an alternative browser * * @param context a valid `Activity` reference * @param url the URL to open * @param withoutTransition whether to switch to the browser `Activity` without a transition */ public static void openUrl(final Activity context, final String url, final boolean withoutTransition) { final Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url)); intent.setPackage(getAlternative(context)); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(intent); if (withoutTransition) { context.overridePendingTransition(0, 0); } } } }
Source/library/src/main/java/im/delight/android/webview/AdvancedWebView.java
package im.delight.android.webview; /* * Android-AdvancedWebView (https://github.com/delight-im/Android-AdvancedWebView) * Copyright (c) delight.im (https://www.delight.im/) * Licensed under the MIT License (https://opensource.org/licenses/MIT) */ import android.view.ViewGroup; import android.app.DownloadManager; import android.app.DownloadManager.Request; import android.os.Environment; import android.webkit.CookieManager; import java.util.Arrays; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager; import java.util.HashMap; import android.net.http.SslError; import android.view.InputEvent; import android.view.KeyEvent; import android.webkit.ClientCertRequest; import android.webkit.HttpAuthHandler; import android.webkit.SslErrorHandler; import android.webkit.URLUtil; import android.webkit.WebResourceRequest; import android.webkit.WebResourceResponse; import android.os.Message; import android.view.View; import android.webkit.ConsoleMessage; import android.webkit.GeolocationPermissions.Callback; import android.webkit.JsPromptResult; import android.webkit.JsResult; import android.webkit.PermissionRequest; import android.webkit.WebStorage.QuotaUpdater; import android.app.Fragment; import android.util.Base64; import android.os.Build; import android.webkit.DownloadListener; import android.graphics.Bitmap; import android.app.Activity; import android.content.Intent; import android.net.Uri; import android.webkit.ValueCallback; import android.webkit.WebChromeClient; import android.webkit.WebViewClient; import android.webkit.WebSettings; import android.annotation.SuppressLint; import android.content.Context; import android.util.AttributeSet; import android.webkit.WebView; import java.util.MissingResourceException; import java.util.Locale; import java.util.LinkedList; import java.util.Collection; import java.util.List; import java.io.UnsupportedEncodingException; import java.lang.ref.WeakReference; import java.util.Map; /** Advanced WebView component for Android that works as intended out of the box */ @SuppressWarnings("deprecation") public class AdvancedWebView extends WebView { public interface Listener { void onPageStarted(String url, Bitmap favicon); void onPageFinished(String url); void onPageError(int errorCode, String description, String failingUrl); void onDownloadRequested(String url, String suggestedFilename, String mimeType, long contentLength, String contentDisposition, String userAgent); void onExternalPageRequest(String url); } public static final String PACKAGE_NAME_DOWNLOAD_MANAGER = "com.android.providers.downloads"; protected static final int REQUEST_CODE_FILE_PICKER = 51426; protected static final String DATABASES_SUB_FOLDER = "/databases"; protected static final String LANGUAGE_DEFAULT_ISO3 = "eng"; protected static final String CHARSET_DEFAULT = "UTF-8"; /** Alternative browsers that have their own rendering engine and *may* be installed on this device */ protected static final String[] ALTERNATIVE_BROWSERS = new String[] { "org.mozilla.firefox", "com.android.chrome", "com.opera.browser", "org.mozilla.firefox_beta", "com.chrome.beta", "com.opera.browser.beta" }; protected WeakReference<Activity> mActivity; protected WeakReference<Fragment> mFragment; protected Listener mListener; protected final List<String> mPermittedHostnames = new LinkedList<String>(); /** File upload callback for platform versions prior to Android 5.0 */ protected ValueCallback<Uri> mFileUploadCallbackFirst; /** File upload callback for Android 5.0+ */ protected ValueCallback<Uri[]> mFileUploadCallbackSecond; protected long mLastError; protected String mLanguageIso3; protected int mRequestCodeFilePicker = REQUEST_CODE_FILE_PICKER; protected WebViewClient mCustomWebViewClient; protected WebChromeClient mCustomWebChromeClient; protected boolean mGeolocationEnabled; protected String mUploadableFileTypes = "*/*"; protected final Map<String, String> mHttpHeaders = new HashMap<String, String>(); public AdvancedWebView(Context context) { super(context); init(context); } public AdvancedWebView(Context context, AttributeSet attrs) { super(context, attrs); init(context); } public AdvancedWebView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(context); } public void setListener(final Activity activity, final Listener listener) { setListener(activity, listener, REQUEST_CODE_FILE_PICKER); } public void setListener(final Activity activity, final Listener listener, final int requestCodeFilePicker) { if (activity != null) { mActivity = new WeakReference<Activity>(activity); } else { mActivity = null; } setListener(listener, requestCodeFilePicker); } public void setListener(final Fragment fragment, final Listener listener) { setListener(fragment, listener, REQUEST_CODE_FILE_PICKER); } public void setListener(final Fragment fragment, final Listener listener, final int requestCodeFilePicker) { if (fragment != null) { mFragment = new WeakReference<Fragment>(fragment); } else { mFragment = null; } setListener(listener, requestCodeFilePicker); } protected void setListener(final Listener listener, final int requestCodeFilePicker) { mListener = listener; mRequestCodeFilePicker = requestCodeFilePicker; } @Override public void setWebViewClient(final WebViewClient client) { mCustomWebViewClient = client; } @Override public void setWebChromeClient(final WebChromeClient client) { mCustomWebChromeClient = client; } @SuppressLint("SetJavaScriptEnabled") public void setGeolocationEnabled(final boolean enabled) { if (enabled) { getSettings().setJavaScriptEnabled(true); getSettings().setGeolocationEnabled(true); setGeolocationDatabasePath(); } mGeolocationEnabled = enabled; } @SuppressLint("NewApi") protected void setGeolocationDatabasePath() { final Activity activity; if (mFragment != null && mFragment.get() != null && Build.VERSION.SDK_INT >= 11 && mFragment.get().getActivity() != null) { activity = mFragment.get().getActivity(); } else if (mActivity != null && mActivity.get() != null) { activity = mActivity.get(); } else { return; } getSettings().setGeolocationDatabasePath(activity.getFilesDir().getPath()); } public void setUploadableFileTypes(final String mimeType) { mUploadableFileTypes = mimeType; } /** * Loads and displays the provided HTML source text * * @param html the HTML source text to load */ public void loadHtml(final String html) { loadHtml(html, null); } /** * Loads and displays the provided HTML source text * * @param html the HTML source text to load * @param baseUrl the URL to use as the page's base URL */ public void loadHtml(final String html, final String baseUrl) { loadHtml(html, baseUrl, null); } /** * Loads and displays the provided HTML source text * * @param html the HTML source text to load * @param baseUrl the URL to use as the page's base URL * @param historyUrl the URL to use for the page's history entry */ public void loadHtml(final String html, final String baseUrl, final String historyUrl) { loadHtml(html, baseUrl, historyUrl, "utf-8"); } /** * Loads and displays the provided HTML source text * * @param html the HTML source text to load * @param baseUrl the URL to use as the page's base URL * @param historyUrl the URL to use for the page's history entry * @param encoding the encoding or charset of the HTML source text */ public void loadHtml(final String html, final String baseUrl, final String historyUrl, final String encoding) { loadDataWithBaseURL(baseUrl, html, "text/html", encoding, historyUrl); } @SuppressLint("NewApi") @SuppressWarnings("all") public void onResume() { if (Build.VERSION.SDK_INT >= 11) { super.onResume(); } resumeTimers(); } @SuppressLint("NewApi") @SuppressWarnings("all") public void onPause() { pauseTimers(); if (Build.VERSION.SDK_INT >= 11) { super.onPause(); } } public void onDestroy() { // try to remove this view from its parent first try { ((ViewGroup) getParent()).removeView(this); } catch (Exception e) { } // then try to remove all child views from this view try { removeAllViews(); } catch (Exception e) { } // and finally destroy this view destroy(); } public void onActivityResult(final int requestCode, final int resultCode, final Intent intent) { if (requestCode == mRequestCodeFilePicker) { if (resultCode == Activity.RESULT_OK) { if (intent != null) { if (mFileUploadCallbackFirst != null) { mFileUploadCallbackFirst.onReceiveValue(intent.getData()); mFileUploadCallbackFirst = null; } else if (mFileUploadCallbackSecond != null) { Uri[] dataUris = null; try { if (intent.getDataString() != null) { dataUris = new Uri[] { Uri.parse(intent.getDataString()) }; } else { if (Build.VERSION.SDK_INT >= 16) { if (intent.getClipData() != null) { final int numSelectedFiles = intent.getClipData().getItemCount(); dataUris = new Uri[numSelectedFiles]; for (int i = 0; i < numSelectedFiles; i++) { dataUris[i] = intent.getClipData().getItemAt(i).getUri(); } } } } } catch (Exception ignored) { } mFileUploadCallbackSecond.onReceiveValue(dataUris); mFileUploadCallbackSecond = null; } } } else { if (mFileUploadCallbackFirst != null) { mFileUploadCallbackFirst.onReceiveValue(null); mFileUploadCallbackFirst = null; } else if (mFileUploadCallbackSecond != null) { mFileUploadCallbackSecond.onReceiveValue(null); mFileUploadCallbackSecond = null; } } } } /** * Adds an additional HTTP header that will be sent along with every HTTP `GET` request * * This does only affect the main requests, not the requests to included resources (e.g. images) * * If you later want to delete an HTTP header that was previously added this way, call `removeHttpHeader()` * * The `WebView` implementation may in some cases overwrite headers that you set or unset * * @param name the name of the HTTP header to add * @param value the value of the HTTP header to send */ public void addHttpHeader(final String name, final String value) { mHttpHeaders.put(name, value); } /** * Removes one of the HTTP headers that have previously been added via `addHttpHeader()` * * If you want to unset a pre-defined header, set it to an empty string with `addHttpHeader()` instead * * The `WebView` implementation may in some cases overwrite headers that you set or unset * * @param name the name of the HTTP header to remove */ public void removeHttpHeader(final String name) { mHttpHeaders.remove(name); } public void addPermittedHostname(String hostname) { mPermittedHostnames.add(hostname); } public void addPermittedHostnames(Collection<? extends String> collection) { mPermittedHostnames.addAll(collection); } public List<String> getPermittedHostnames() { return mPermittedHostnames; } public void removePermittedHostname(String hostname) { mPermittedHostnames.remove(hostname); } public void clearPermittedHostnames() { mPermittedHostnames.clear(); } public boolean onBackPressed() { if (canGoBack()) { goBack(); return false; } else { return true; } } @SuppressLint("NewApi") protected static void setAllowAccessFromFileUrls(final WebSettings webSettings, final boolean allowed) { if (Build.VERSION.SDK_INT >= 16) { webSettings.setAllowFileAccessFromFileURLs(allowed); webSettings.setAllowUniversalAccessFromFileURLs(allowed); } } @SuppressWarnings("static-method") public void setCookiesEnabled(final boolean enabled) { CookieManager.getInstance().setAcceptCookie(enabled); } @SuppressLint("NewApi") public void setThirdPartyCookiesEnabled(final boolean enabled) { if (Build.VERSION.SDK_INT >= 21) { CookieManager.getInstance().setAcceptThirdPartyCookies(this, enabled); } } public void setMixedContentAllowed(final boolean allowed) { setMixedContentAllowed(getSettings(), allowed); } @SuppressWarnings("static-method") @SuppressLint("NewApi") protected void setMixedContentAllowed(final WebSettings webSettings, final boolean allowed) { if (Build.VERSION.SDK_INT >= 21) { webSettings.setMixedContentMode(allowed ? WebSettings.MIXED_CONTENT_ALWAYS_ALLOW : WebSettings.MIXED_CONTENT_NEVER_ALLOW); } } public void setDesktopMode(final boolean enabled) { final WebSettings webSettings = getSettings(); final String newUserAgent; if (enabled) { newUserAgent = webSettings.getUserAgentString().replace("Mobile", "eliboM").replace("Android", "diordnA"); } else { newUserAgent = webSettings.getUserAgentString().replace("eliboM", "Mobile").replace("diordnA", "Android"); } webSettings.setUserAgentString(newUserAgent); webSettings.setUseWideViewPort(enabled); webSettings.setLoadWithOverviewMode(enabled); webSettings.setSupportZoom(enabled); webSettings.setBuiltInZoomControls(enabled); } @SuppressLint({ "SetJavaScriptEnabled" }) protected void init(Context context) { // in IDE's preview mode if (isInEditMode()) { // do not run the code from this method return; } if (context instanceof Activity) { mActivity = new WeakReference<Activity>((Activity) context); } mLanguageIso3 = getLanguageIso3(); setFocusable(true); setFocusableInTouchMode(true); setSaveEnabled(true); final String filesDir = context.getFilesDir().getPath(); final String databaseDir = filesDir.substring(0, filesDir.lastIndexOf("/")) + DATABASES_SUB_FOLDER; final WebSettings webSettings = getSettings(); webSettings.setAllowFileAccess(false); setAllowAccessFromFileUrls(webSettings, false); webSettings.setBuiltInZoomControls(false); webSettings.setJavaScriptEnabled(true); webSettings.setDomStorageEnabled(true); if (Build.VERSION.SDK_INT < 18) { webSettings.setRenderPriority(WebSettings.RenderPriority.HIGH); } webSettings.setDatabaseEnabled(true); if (Build.VERSION.SDK_INT < 19) { webSettings.setDatabasePath(databaseDir); } setMixedContentAllowed(webSettings, true); setThirdPartyCookiesEnabled(true); super.setWebViewClient(new WebViewClient() { @Override public void onPageStarted(WebView view, String url, Bitmap favicon) { if (!hasError()) { if (mListener != null) { mListener.onPageStarted(url, favicon); } } if (mCustomWebViewClient != null) { mCustomWebViewClient.onPageStarted(view, url, favicon); } } @Override public void onPageFinished(WebView view, String url) { if (!hasError()) { if (mListener != null) { mListener.onPageFinished(url); } } if (mCustomWebViewClient != null) { mCustomWebViewClient.onPageFinished(view, url); } } @Override public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) { setLastError(); if (mListener != null) { mListener.onPageError(errorCode, description, failingUrl); } if (mCustomWebViewClient != null) { mCustomWebViewClient.onReceivedError(view, errorCode, description, failingUrl); } } @Override public boolean shouldOverrideUrlLoading(final WebView view, final String url) { // if the hostname may not be accessed if (!isHostnameAllowed(url)) { // if a listener is available if (mListener != null) { // inform the listener about the request mListener.onExternalPageRequest(url); } // cancel the original request return true; } // if there is a user-specified handler available if (mCustomWebViewClient != null) { // if the user-specified handler asks to override the request if (mCustomWebViewClient.shouldOverrideUrlLoading(view, url)) { // cancel the original request return true; } } // route the request through the custom URL loading method view.loadUrl(url); // cancel the original request return true; } @Override public void onLoadResource(WebView view, String url) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onLoadResource(view, url); } else { super.onLoadResource(view, url); } } @SuppressLint("NewApi") @SuppressWarnings("all") public WebResourceResponse shouldInterceptRequest(WebView view, String url) { if (Build.VERSION.SDK_INT >= 11) { if (mCustomWebViewClient != null) { return mCustomWebViewClient.shouldInterceptRequest(view, url); } else { return super.shouldInterceptRequest(view, url); } } else { return null; } } @SuppressLint("NewApi") @SuppressWarnings("all") public WebResourceResponse shouldInterceptRequest(WebView view, WebResourceRequest request) { if (Build.VERSION.SDK_INT >= 21) { if (mCustomWebViewClient != null) { return mCustomWebViewClient.shouldInterceptRequest(view, request); } else { return super.shouldInterceptRequest(view, request); } } else { return null; } } @Override public void onFormResubmission(WebView view, Message dontResend, Message resend) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onFormResubmission(view, dontResend, resend); } else { super.onFormResubmission(view, dontResend, resend); } } @Override public void doUpdateVisitedHistory(WebView view, String url, boolean isReload) { if (mCustomWebViewClient != null) { mCustomWebViewClient.doUpdateVisitedHistory(view, url, isReload); } else { super.doUpdateVisitedHistory(view, url, isReload); } } @Override public void onReceivedSslError(WebView view, SslErrorHandler handler, SslError error) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onReceivedSslError(view, handler, error); } else { super.onReceivedSslError(view, handler, error); } } @SuppressLint("NewApi") @SuppressWarnings("all") public void onReceivedClientCertRequest(WebView view, ClientCertRequest request) { if (Build.VERSION.SDK_INT >= 21) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onReceivedClientCertRequest(view, request); } else { super.onReceivedClientCertRequest(view, request); } } } @Override public void onReceivedHttpAuthRequest(WebView view, HttpAuthHandler handler, String host, String realm) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onReceivedHttpAuthRequest(view, handler, host, realm); } else { super.onReceivedHttpAuthRequest(view, handler, host, realm); } } @Override public boolean shouldOverrideKeyEvent(WebView view, KeyEvent event) { if (mCustomWebViewClient != null) { return mCustomWebViewClient.shouldOverrideKeyEvent(view, event); } else { return super.shouldOverrideKeyEvent(view, event); } } @Override public void onUnhandledKeyEvent(WebView view, KeyEvent event) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onUnhandledKeyEvent(view, event); } else { super.onUnhandledKeyEvent(view, event); } } @SuppressLint("NewApi") @SuppressWarnings("all") public void onUnhandledInputEvent(WebView view, InputEvent event) { if (Build.VERSION.SDK_INT >= 21) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onUnhandledInputEvent(view, event); } else { super.onUnhandledInputEvent(view, event); } } } @Override public void onScaleChanged(WebView view, float oldScale, float newScale) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onScaleChanged(view, oldScale, newScale); } else { super.onScaleChanged(view, oldScale, newScale); } } @SuppressLint("NewApi") @SuppressWarnings("all") public void onReceivedLoginRequest(WebView view, String realm, String account, String args) { if (Build.VERSION.SDK_INT >= 12) { if (mCustomWebViewClient != null) { mCustomWebViewClient.onReceivedLoginRequest(view, realm, account, args); } else { super.onReceivedLoginRequest(view, realm, account, args); } } } }); super.setWebChromeClient(new WebChromeClient() { // file upload callback (Android 2.2 (API level 8) -- Android 2.3 (API level 10)) (hidden method) @SuppressWarnings("unused") public void openFileChooser(ValueCallback<Uri> uploadMsg) { openFileChooser(uploadMsg, null); } // file upload callback (Android 3.0 (API level 11) -- Android 4.0 (API level 15)) (hidden method) public void openFileChooser(ValueCallback<Uri> uploadMsg, String acceptType) { openFileChooser(uploadMsg, acceptType, null); } // file upload callback (Android 4.1 (API level 16) -- Android 4.3 (API level 18)) (hidden method) @SuppressWarnings("unused") public void openFileChooser(ValueCallback<Uri> uploadMsg, String acceptType, String capture) { openFileInput(uploadMsg, null, false); } // file upload callback (Android 5.0 (API level 21) -- current) (public method) @SuppressWarnings("all") public boolean onShowFileChooser(WebView webView, ValueCallback<Uri[]> filePathCallback, WebChromeClient.FileChooserParams fileChooserParams) { if (Build.VERSION.SDK_INT >= 21) { final boolean allowMultiple = fileChooserParams.getMode() == FileChooserParams.MODE_OPEN_MULTIPLE; openFileInput(null, filePathCallback, allowMultiple); return true; } else { return false; } } @Override public void onProgressChanged(WebView view, int newProgress) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onProgressChanged(view, newProgress); } else { super.onProgressChanged(view, newProgress); } } @Override public void onReceivedTitle(WebView view, String title) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onReceivedTitle(view, title); } else { super.onReceivedTitle(view, title); } } @Override public void onReceivedIcon(WebView view, Bitmap icon) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onReceivedIcon(view, icon); } else { super.onReceivedIcon(view, icon); } } @Override public void onReceivedTouchIconUrl(WebView view, String url, boolean precomposed) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onReceivedTouchIconUrl(view, url, precomposed); } else { super.onReceivedTouchIconUrl(view, url, precomposed); } } @Override public void onShowCustomView(View view, CustomViewCallback callback) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onShowCustomView(view, callback); } else { super.onShowCustomView(view, callback); } } @SuppressLint("NewApi") @SuppressWarnings("all") public void onShowCustomView(View view, int requestedOrientation, CustomViewCallback callback) { if (Build.VERSION.SDK_INT >= 14) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onShowCustomView(view, requestedOrientation, callback); } else { super.onShowCustomView(view, requestedOrientation, callback); } } } @Override public void onHideCustomView() { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onHideCustomView(); } else { super.onHideCustomView(); } } @Override public boolean onCreateWindow(WebView view, boolean isDialog, boolean isUserGesture, Message resultMsg) { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onCreateWindow(view, isDialog, isUserGesture, resultMsg); } else { return super.onCreateWindow(view, isDialog, isUserGesture, resultMsg); } } @Override public void onRequestFocus(WebView view) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onRequestFocus(view); } else { super.onRequestFocus(view); } } @Override public void onCloseWindow(WebView window) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onCloseWindow(window); } else { super.onCloseWindow(window); } } @Override public boolean onJsAlert(WebView view, String url, String message, JsResult result) { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onJsAlert(view, url, message, result); } else { return super.onJsAlert(view, url, message, result); } } @Override public boolean onJsConfirm(WebView view, String url, String message, JsResult result) { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onJsConfirm(view, url, message, result); } else { return super.onJsConfirm(view, url, message, result); } } @Override public boolean onJsPrompt(WebView view, String url, String message, String defaultValue, JsPromptResult result) { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onJsPrompt(view, url, message, defaultValue, result); } else { return super.onJsPrompt(view, url, message, defaultValue, result); } } @Override public boolean onJsBeforeUnload(WebView view, String url, String message, JsResult result) { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onJsBeforeUnload(view, url, message, result); } else { return super.onJsBeforeUnload(view, url, message, result); } } @Override public void onGeolocationPermissionsShowPrompt(String origin, Callback callback) { if (mGeolocationEnabled) { callback.invoke(origin, true, false); } else { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onGeolocationPermissionsShowPrompt(origin, callback); } else { super.onGeolocationPermissionsShowPrompt(origin, callback); } } } @Override public void onGeolocationPermissionsHidePrompt() { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onGeolocationPermissionsHidePrompt(); } else { super.onGeolocationPermissionsHidePrompt(); } } @SuppressLint("NewApi") @SuppressWarnings("all") public void onPermissionRequest(PermissionRequest request) { if (Build.VERSION.SDK_INT >= 21) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onPermissionRequest(request); } else { super.onPermissionRequest(request); } } } @SuppressLint("NewApi") @SuppressWarnings("all") public void onPermissionRequestCanceled(PermissionRequest request) { if (Build.VERSION.SDK_INT >= 21) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onPermissionRequestCanceled(request); } else { super.onPermissionRequestCanceled(request); } } } @Override public boolean onJsTimeout() { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onJsTimeout(); } else { return super.onJsTimeout(); } } @Override public void onConsoleMessage(String message, int lineNumber, String sourceID) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onConsoleMessage(message, lineNumber, sourceID); } else { super.onConsoleMessage(message, lineNumber, sourceID); } } @Override public boolean onConsoleMessage(ConsoleMessage consoleMessage) { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.onConsoleMessage(consoleMessage); } else { return super.onConsoleMessage(consoleMessage); } } @Override public Bitmap getDefaultVideoPoster() { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.getDefaultVideoPoster(); } else { return super.getDefaultVideoPoster(); } } @Override public View getVideoLoadingProgressView() { if (mCustomWebChromeClient != null) { return mCustomWebChromeClient.getVideoLoadingProgressView(); } else { return super.getVideoLoadingProgressView(); } } @Override public void getVisitedHistory(ValueCallback<String[]> callback) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.getVisitedHistory(callback); } else { super.getVisitedHistory(callback); } } @Override public void onExceededDatabaseQuota(String url, String databaseIdentifier, long quota, long estimatedDatabaseSize, long totalQuota, QuotaUpdater quotaUpdater) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onExceededDatabaseQuota(url, databaseIdentifier, quota, estimatedDatabaseSize, totalQuota, quotaUpdater); } else { super.onExceededDatabaseQuota(url, databaseIdentifier, quota, estimatedDatabaseSize, totalQuota, quotaUpdater); } } @Override public void onReachedMaxAppCacheSize(long requiredStorage, long quota, QuotaUpdater quotaUpdater) { if (mCustomWebChromeClient != null) { mCustomWebChromeClient.onReachedMaxAppCacheSize(requiredStorage, quota, quotaUpdater); } else { super.onReachedMaxAppCacheSize(requiredStorage, quota, quotaUpdater); } } }); setDownloadListener(new DownloadListener() { @Override public void onDownloadStart(final String url, final String userAgent, final String contentDisposition, final String mimeType, final long contentLength) { final String suggestedFilename = URLUtil.guessFileName(url, contentDisposition, mimeType); if (mListener != null) { mListener.onDownloadRequested(url, suggestedFilename, mimeType, contentLength, contentDisposition, userAgent); } } }); } @Override public void loadUrl(final String url, Map<String, String> additionalHttpHeaders) { if (additionalHttpHeaders == null) { additionalHttpHeaders = mHttpHeaders; } else if (mHttpHeaders.size() > 0) { additionalHttpHeaders.putAll(mHttpHeaders); } super.loadUrl(url, additionalHttpHeaders); } @Override public void loadUrl(final String url) { if (mHttpHeaders.size() > 0) { super.loadUrl(url, mHttpHeaders); } else { super.loadUrl(url); } } public void loadUrl(String url, final boolean preventCaching) { if (preventCaching) { url = makeUrlUnique(url); } loadUrl(url); } public void loadUrl(String url, final boolean preventCaching, final Map<String,String> additionalHttpHeaders) { if (preventCaching) { url = makeUrlUnique(url); } loadUrl(url, additionalHttpHeaders); } protected static String makeUrlUnique(final String url) { StringBuilder unique = new StringBuilder(); unique.append(url); if (url.contains("?")) { unique.append('&'); } else { if (url.lastIndexOf('/') <= 7) { unique.append('/'); } unique.append('?'); } unique.append(System.currentTimeMillis()); unique.append('='); unique.append(1); return unique.toString(); } protected boolean isHostnameAllowed(final String url) { // if the permitted hostnames have not been restricted to a specific set if (mPermittedHostnames.size() == 0) { // all hostnames are allowed return true; } // get the actual hostname of the URL that is to be checked final String actualHost = Uri.parse(url).getHost(); // for every hostname in the set of permitted hosts for (String expectedHost : mPermittedHostnames) { // if the two hostnames match or if the actual host is a subdomain of the expected host if (actualHost.equals(expectedHost) || actualHost.endsWith("."+expectedHost)) { // the actual hostname of the URL to be checked is allowed return true; } } // the actual hostname of the URL to be checked is not allowed since there were no matches return false; } protected void setLastError() { mLastError = System.currentTimeMillis(); } protected boolean hasError() { return (mLastError + 500) >= System.currentTimeMillis(); } protected static String getLanguageIso3() { try { return Locale.getDefault().getISO3Language().toLowerCase(Locale.US); } catch (MissingResourceException e) { return LANGUAGE_DEFAULT_ISO3; } } /** Provides localizations for the 25 most widely spoken languages that have a ISO 639-2/T code */ protected String getFileUploadPromptLabel() { try { if (mLanguageIso3.equals("zho")) return decodeBase64("6YCJ5oup5LiA5Liq5paH5Lu2"); else if (mLanguageIso3.equals("spa")) return decodeBase64("RWxpamEgdW4gYXJjaGl2bw=="); else if (mLanguageIso3.equals("hin")) return decodeBase64("4KSP4KSVIOCkq+CkvOCkvuCkh+CksiDgpJrgpYHgpKjgpYfgpII="); else if (mLanguageIso3.equals("ben")) return decodeBase64("4KaP4KaV4Kaf4Ka/IOCmq+CmvuCmh+CmsiDgpqjgpr/gprDgp43gpqzgpr7gpprgpqg="); else if (mLanguageIso3.equals("ara")) return decodeBase64("2KfYrtiq2YrYp9ixINmF2YTZgSDZiNin2K3Yrw=="); else if (mLanguageIso3.equals("por")) return decodeBase64("RXNjb2xoYSB1bSBhcnF1aXZv"); else if (mLanguageIso3.equals("rus")) return decodeBase64("0JLRi9Cx0LXRgNC40YLQtSDQvtC00LjQvSDRhNCw0LnQuw=="); else if (mLanguageIso3.equals("jpn")) return decodeBase64("MeODleOCoeOCpOODq+OCkumBuOaKnuOBl+OBpuOBj+OBoOOBleOBhA=="); else if (mLanguageIso3.equals("pan")) return decodeBase64("4KiH4Kmx4KiVIOCoq+CovuCoh+CosiDgqJrgqYHgqKPgqYs="); else if (mLanguageIso3.equals("deu")) return decodeBase64("V8OkaGxlIGVpbmUgRGF0ZWk="); else if (mLanguageIso3.equals("jav")) return decodeBase64("UGlsaWggc2lqaSBiZXJrYXM="); else if (mLanguageIso3.equals("msa")) return decodeBase64("UGlsaWggc2F0dSBmYWls"); else if (mLanguageIso3.equals("tel")) return decodeBase64("4LCS4LCVIOCwq+CxhuCxluCwsuCxjeCwqOCxgSDgsI7gsILgsJrgsYHgsJXgsYvgsILgsKHgsL8="); else if (mLanguageIso3.equals("vie")) return decodeBase64("Q2jhu41uIG3hu5l0IHThuq1wIHRpbg=="); else if (mLanguageIso3.equals("kor")) return decodeBase64("7ZWY64KY7J2YIO2MjOydvOydhCDshKDtg50="); else if (mLanguageIso3.equals("fra")) return decodeBase64("Q2hvaXNpc3NleiB1biBmaWNoaWVy"); else if (mLanguageIso3.equals("mar")) return decodeBase64("4KSr4KS+4KSH4KSyIOCkqOCkv+CkteCkoeCkvg=="); else if (mLanguageIso3.equals("tam")) return decodeBase64("4K6S4K6w4K+BIOCuleCvh+CuvuCuquCvjeCuquCviCDgrqTgr4fgrrDgr43grrXgr4E="); else if (mLanguageIso3.equals("urd")) return decodeBase64("2KfbjNqpINmB2KfYptmEINmF24zauiDYs9uSINin2YbYqtiu2KfYqCDaqdix24zaug=="); else if (mLanguageIso3.equals("fas")) return decodeBase64("2LHYpyDYp9mG2KrYrtin2Kgg2qnZhtuM2K8g24zaqSDZgdin24zZhA=="); else if (mLanguageIso3.equals("tur")) return decodeBase64("QmlyIGRvc3lhIHNlw6dpbg=="); else if (mLanguageIso3.equals("ita")) return decodeBase64("U2NlZ2xpIHVuIGZpbGU="); else if (mLanguageIso3.equals("tha")) return decodeBase64("4LmA4Lil4Li34Lit4LiB4LmE4Lif4Lil4LmM4Lir4LiZ4Li24LmI4LiH"); else if (mLanguageIso3.equals("guj")) return decodeBase64("4KqP4KqVIOCqq+CqvuCqh+CqsuCqqOCrhyDgqqrgqrjgqoLgqqY="); } catch (Exception e) { } // return English translation by default return "Choose a file"; } protected static String decodeBase64(final String base64) throws IllegalArgumentException, UnsupportedEncodingException { final byte[] bytes = Base64.decode(base64, Base64.DEFAULT); return new String(bytes, CHARSET_DEFAULT); } @SuppressLint("NewApi") protected void openFileInput(final ValueCallback<Uri> fileUploadCallbackFirst, final ValueCallback<Uri[]> fileUploadCallbackSecond, final boolean allowMultiple) { if (mFileUploadCallbackFirst != null) { mFileUploadCallbackFirst.onReceiveValue(null); } mFileUploadCallbackFirst = fileUploadCallbackFirst; if (mFileUploadCallbackSecond != null) { mFileUploadCallbackSecond.onReceiveValue(null); } mFileUploadCallbackSecond = fileUploadCallbackSecond; Intent i = new Intent(Intent.ACTION_GET_CONTENT); i.addCategory(Intent.CATEGORY_OPENABLE); if (allowMultiple) { if (Build.VERSION.SDK_INT >= 18) { i.putExtra(Intent.EXTRA_ALLOW_MULTIPLE, true); } } i.setType(mUploadableFileTypes); if (mFragment != null && mFragment.get() != null && Build.VERSION.SDK_INT >= 11) { mFragment.get().startActivityForResult(Intent.createChooser(i, getFileUploadPromptLabel()), mRequestCodeFilePicker); } else if (mActivity != null && mActivity.get() != null) { mActivity.get().startActivityForResult(Intent.createChooser(i, getFileUploadPromptLabel()), mRequestCodeFilePicker); } } /** * Returns whether file uploads can be used on the current device (generally all platform versions except for 4.4) * * @return whether file uploads can be used */ public static boolean isFileUploadAvailable() { return isFileUploadAvailable(false); } /** * Returns whether file uploads can be used on the current device (generally all platform versions except for 4.4) * * On Android 4.4.3/4.4.4, file uploads may be possible but will come with a wrong MIME type * * @param needsCorrectMimeType whether a correct MIME type is required for file uploads or `application/octet-stream` is acceptable * @return whether file uploads can be used */ public static boolean isFileUploadAvailable(final boolean needsCorrectMimeType) { if (Build.VERSION.SDK_INT == 19) { final String platformVersion = (Build.VERSION.RELEASE == null) ? "" : Build.VERSION.RELEASE; return !needsCorrectMimeType && (platformVersion.startsWith("4.4.3") || platformVersion.startsWith("4.4.4")); } else { return true; } } /** * Handles a download by loading the file from `fromUrl` and saving it to `toFilename` on the external storage * * This requires the two permissions `android.permission.INTERNET` and `android.permission.WRITE_EXTERNAL_STORAGE` * * Only supported on API level 9 (Android 2.3) and above * * @param context a valid `Context` reference * @param fromUrl the URL of the file to download, e.g. the one from `AdvancedWebView.onDownloadRequested(...)` * @param toFilename the name of the destination file where the download should be saved, e.g. `myImage.jpg` * @return whether the download has been successfully handled or not */ @SuppressLint("NewApi") public static boolean handleDownload(final Context context, final String fromUrl, final String toFilename) { if (Build.VERSION.SDK_INT < 9) { throw new RuntimeException("Method requires API level 9 or above"); } final Request request = new Request(Uri.parse(fromUrl)); if (Build.VERSION.SDK_INT >= 11) { request.allowScanningByMediaScanner(); request.setNotificationVisibility(DownloadManager.Request.VISIBILITY_VISIBLE_NOTIFY_COMPLETED); } request.setDestinationInExternalPublicDir(Environment.DIRECTORY_DOWNLOADS, toFilename); final DownloadManager dm = (DownloadManager) context.getSystemService(Context.DOWNLOAD_SERVICE); try { try { dm.enqueue(request); } catch (SecurityException e) { if (Build.VERSION.SDK_INT >= 11) { request.setNotificationVisibility(DownloadManager.Request.VISIBILITY_VISIBLE); } dm.enqueue(request); } return true; } // if the download manager app has been disabled on the device catch (IllegalArgumentException e) { // show the settings screen where the user can enable the download manager app again openAppSettings(context, AdvancedWebView.PACKAGE_NAME_DOWNLOAD_MANAGER); return false; } } @SuppressLint("NewApi") private static boolean openAppSettings(final Context context, final String packageName) { if (Build.VERSION.SDK_INT < 9) { throw new RuntimeException("Method requires API level 9 or above"); } try { final Intent intent = new Intent(android.provider.Settings.ACTION_APPLICATION_DETAILS_SETTINGS); intent.setData(Uri.parse("package:" + packageName)); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(intent); return true; } catch (Exception e) { return false; } } /** Wrapper for methods related to alternative browsers that have their own rendering engines */ public static class Browsers { /** Package name of an alternative browser that is installed on this device */ private static String mAlternativePackage; /** * Returns whether there is an alternative browser with its own rendering engine currently installed * * @param context a valid `Context` reference * @return whether there is an alternative browser or not */ public static boolean hasAlternative(final Context context) { return getAlternative(context) != null; } /** * Returns the package name of an alternative browser with its own rendering engine or `null` * * @param context a valid `Context` reference * @return the package name or `null` */ public static String getAlternative(final Context context) { if (mAlternativePackage != null) { return mAlternativePackage; } final List<String> alternativeBrowsers = Arrays.asList(ALTERNATIVE_BROWSERS); final List<ApplicationInfo> apps = context.getPackageManager().getInstalledApplications(PackageManager.GET_META_DATA); for (ApplicationInfo app : apps) { if (!app.enabled) { continue; } if (alternativeBrowsers.contains(app.packageName)) { mAlternativePackage = app.packageName; return app.packageName; } } return null; } /** * Opens the given URL in an alternative browser * * @param context a valid `Activity` reference * @param url the URL to open */ public static void openUrl(final Activity context, final String url) { openUrl(context, url, false); } /** * Opens the given URL in an alternative browser * * @param context a valid `Activity` reference * @param url the URL to open * @param withoutTransition whether to switch to the browser `Activity` without a transition */ public static void openUrl(final Activity context, final String url, final boolean withoutTransition) { final Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url)); intent.setPackage(getAlternative(context)); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(intent); if (withoutTransition) { context.overridePendingTransition(0, 0); } } } }
Improve code style
Source/library/src/main/java/im/delight/android/webview/AdvancedWebView.java
Improve code style
Java
mit
8deb93f5a7bc0af91087fc0d94ef6da033e5162a
0
r1chardj0n3s/pycode-minecraft
/* * Copyright (c) 2017 Richard Jones <richard@mechanicalcat.net> * All Rights Reserved * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * */ package net.mechanicalcat.pycode.script; import net.mechanicalcat.pycode.PythonEngine; import net.minecraft.command.ICommandSender; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.item.EnumDyeColor; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.EnumFacing; import net.minecraft.util.EnumParticleTypes; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.minecraft.world.WorldServer; import net.minecraftforge.fml.common.FMLLog; import org.python.core.Py; import org.python.core.PyFunction; import org.python.core.PyObject; import javax.annotation.Nullable; import javax.script.*; import java.io.PrintWriter; import java.io.StringWriter; import java.util.HashMap; import java.util.LinkedList; import java.util.List; public class PythonCode { private String code = ""; private boolean codeChanged = false; private SimpleScriptContext context; private Bindings bindings; private World world = null; private BlockPos pos; private ICommandSender runner; public static String CODE_NBT_TAG = "code"; public MyEntityPlayers players; public PythonCode() { this.context = new SimpleScriptContext(); this.bindings = new SimpleBindings(); this.context.setBindings(this.bindings, ScriptContext.ENGINE_SCOPE); } public String getCode() {return code;} public boolean hasCode() {return !code.isEmpty();} public void check(String code) throws ScriptException { PythonEngine.compile(code); } public void setCodeString(String code) { this.code = code; this.codeChanged = true; } public void writeToNBT(NBTTagCompound compound) { compound.setString(CODE_NBT_TAG, this.code); } public void readFromNBT(NBTTagCompound compound) { this.setCodeString(compound.getString(CODE_NBT_TAG)); } // CODE BINDINGS public void put(String key,Object val) { this.bindings.put(key, val); } public boolean hasKey(String key) { return this.bindings.containsKey(key); } static public void failz0r(World world, BlockPos pos, String fmt, Object... args) { if (world.isRemote) return; ((WorldServer)world).spawnParticle(EnumParticleTypes.SPELL, pos.getX() + .5, pos.getY() + 1, pos.getZ() + .5, 20, 0, 0, 0, .5, new int[0]); FMLLog.severe(fmt, args); } // TODO refactor this to be more generic public void invoke(String method, MyEntity entity) { PyObject obj = (PyObject) this.bindings.get(method); if (obj == null) { failz0r(world, pos, "Unknown function '%s'", method); return; } PyFunction func = (PyFunction)obj; // handle instances of optional player argument PyObject co_varnames = func.__code__.__getattr__("co_varnames"); if (entity instanceof MyEntityPlayer && !co_varnames.__contains__(Py.java2py("player"))) { // don't pass the player in if it's not expected try { func.__call__(); } catch (RuntimeException e) { failz0r(world, pos, "Error running code: %s", e.toString()); } return; } // carry on! try { func.__call__(Py.java2py(entity)); } catch (RuntimeException e) { failz0r(world, pos, "Error running code: %s", e.toString()); } } public void invoke(String method, @Nullable MyBase target) { PyObject obj = (PyObject) this.bindings.get(method); if (obj == null) { failz0r(world, pos, "Unknown function '%s'", method); return; } PyFunction func = (PyFunction)obj; // handle instances of optional player argument PyObject co_varnames = func.__code__.__getattr__("co_varnames"); if (co_varnames.__contains__(Py.java2py("target"))) { try { func.__call__(Py.java2py(target)); } catch (RuntimeException e) { failz0r(world, pos, "Error running code: %s", e.toString()); } } else { // don't pass the target in if it's not expected try { func.__call__(); } catch (RuntimeException e) { failz0r(world, pos, "Error running code: %s", e.toString()); } } } public void invoke(String method) { PyObject obj = (PyObject) this.bindings.get(method); if (obj == null) { failz0r(world, pos, "Unknown function '%s'", method); return; } PyFunction func = (PyFunction)obj; try { func.__call__(); } catch (RuntimeException e) { failz0r(world, pos, "Error running code: %s", e.toString()); } } public static final String bookAsString(ItemStack book) { NBTTagCompound bookData = book.getTagCompound(); NBTTagList pages; try { // pages are all of type TAG_String == 8 pages = bookData.getTagList("pages", 8); } catch (NullPointerException e) { // this should not happen! return null; } // collapse the pages into one string StringBuilder sbStr = new StringBuilder(); for(int i = 0;i<pages.tagCount();i++) { String s = pages.getStringTagAt(i); if (i > 0) sbStr.append("\n"); sbStr.append(s); } return sbStr.toString(); } public boolean setCodeFromBook(World world, EntityPlayer player, ICommandSender runner, BlockPos pos, ItemStack heldItem) { String code = bookAsString(heldItem); if (code == null) { failz0r(world, pos, "Could not get pages from the book!?"); return false; } this.setCodeString(code); // set context using the player so they get feedback on success/fail this.setContext(world, player, pos); // now set the default runner to be the code entity this.setRunner(runner); return true; } public void setRunner(ICommandSender runner) { this.runner = runner; this.bindings.put("__runner__", runner); } public void setContext(World world, ICommandSender runner, BlockPos pos) { if (this.world == world && this.runner == runner && this.pos == pos) { this.ensureCompiled(); return; } this.world = world; this.pos = pos; this.runner = runner; this.players = new MyEntityPlayers(world); this.bindings.put("pos", new MyBlockPos(pos)); this.bindings.put("runner", PyRegistry.myWrapper(world, runner)); // I am reasonably certain that I can't just shove the methods below directly // into the script engine namespace because I can't pass a Runnable as a // value to be stored in the engine namespace. this.bindings.put("__utils__", this); // So.. now I copy all those methods to set up the "utils" try { String s = ""; for (String n : utils) { s += String.format("%s = __utils__.%s\n", n, n); } PythonEngine.eval(s, this.context); } catch (ScriptException e) { failz0r(world, pos, "Error setting up utils: %s", e.getMessage()); return; } // create the MyCommand curries and attach callables to utils / global scope try { String s = ""; for (String n: MyCommands.COMMANDS.keySet()) { // bind the name to just the invoke method, using the dynamic runner value this.bindings.put("__" + n, MyCommands.curry(n, this.world)); s += String.format("%s = lambda *a: __%s.invoke(runner, *a)\n", n, n); } PythonEngine.eval(s, this.context); } catch (ScriptException e) { failz0r(world, pos, "Error setting up commands: %s", e.getMessage()); return; } this.ensureCompiled(); } public static final String stackTraceToString(Throwable t) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); t.printStackTrace(pw); return sw.toString(); } private void ensureCompiled() { if (!this.codeChanged) return; FMLLog.fine("Eval my code: %s", this.code); // now execute the code try { PythonEngine.eval(this.code, this.context); if (!world.isRemote) { ((WorldServer)world).spawnParticle(EnumParticleTypes.CRIT, pos.getX() + .5, pos.getY() + 1, pos.getZ() + .5, 20, 0, 0, 0, .5, new int[0]); } } catch (ScriptException e) { failz0r(world, pos, "Error running code, traceback:\n%s", stackTraceToString(e)); } this.codeChanged = false; } private String[] utils = {"colors", "facings", "players"}; public static HashMap<String, EnumDyeColor> COLORMAP = new HashMap<String, EnumDyeColor>(); public static HashMap<String, EnumFacing> FACINGMAP = new HashMap<String, EnumFacing>(); public static List<String> colors = new LinkedList<>(); public static List<String> facings = new LinkedList<>(); public static void init() { COLORMAP.put("white", EnumDyeColor.WHITE); COLORMAP.put("orange", EnumDyeColor.ORANGE); COLORMAP.put("magenta", EnumDyeColor.MAGENTA); COLORMAP.put("lightBlue", EnumDyeColor.LIGHT_BLUE); COLORMAP.put("yellow", EnumDyeColor.YELLOW); COLORMAP.put("lime", EnumDyeColor.LIME); COLORMAP.put("pink", EnumDyeColor.PINK); COLORMAP.put("gray", EnumDyeColor.GRAY); COLORMAP.put("silver", EnumDyeColor.SILVER); COLORMAP.put("cyan", EnumDyeColor.CYAN); COLORMAP.put("purple", EnumDyeColor.PURPLE); COLORMAP.put("blue", EnumDyeColor.BLUE); COLORMAP.put("brown", EnumDyeColor.BROWN); COLORMAP.put("green", EnumDyeColor.GREEN); COLORMAP.put("red", EnumDyeColor.RED ); COLORMAP.put("black", EnumDyeColor.BLACK); FACINGMAP.put("down", EnumFacing.DOWN); FACINGMAP.put("up", EnumFacing.UP); FACINGMAP.put("north", EnumFacing.NORTH); FACINGMAP.put("south", EnumFacing.SOUTH); FACINGMAP.put("west", EnumFacing.WEST); FACINGMAP.put("east", EnumFacing.EAST); for (String name : COLORMAP.keySet()) { colors.add(name); } for (String name : FACINGMAP.keySet()) { facings.add(name); } } }
src/main/java/net/mechanicalcat/pycode/script/PythonCode.java
/* * Copyright (c) 2017 Richard Jones <richard@mechanicalcat.net> * All Rights Reserved * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * */ package net.mechanicalcat.pycode.script; import net.mechanicalcat.pycode.PythonEngine; import net.minecraft.command.ICommandSender; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.item.EnumDyeColor; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.EnumFacing; import net.minecraft.util.EnumParticleTypes; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.minecraft.world.WorldServer; import net.minecraftforge.fml.common.FMLLog; import org.python.core.Py; import org.python.core.PyFunction; import org.python.core.PyObject; import javax.annotation.Nullable; import javax.script.*; import java.io.PrintWriter; import java.io.StringWriter; import java.util.HashMap; import java.util.LinkedList; import java.util.List; public class PythonCode { private String code = ""; private boolean codeChanged = false; private SimpleScriptContext context; private Bindings bindings; private World world = null; private BlockPos pos; private ICommandSender runner; public static String CODE_NBT_TAG = "code"; public MyEntityPlayers players; public PythonCode() { this.context = new SimpleScriptContext(); this.bindings = new SimpleBindings(); this.context.setBindings(this.bindings, ScriptContext.ENGINE_SCOPE); } public String getCode() {return code;} public boolean hasCode() {return !code.isEmpty();} public void check(String code) throws ScriptException { PythonEngine.compile(code); } public void setCodeString(String code) { this.code = code; this.codeChanged = true; } public void writeToNBT(NBTTagCompound compound) { compound.setString(CODE_NBT_TAG, this.code); } public void readFromNBT(NBTTagCompound compound) { this.setCodeString(compound.getString(CODE_NBT_TAG)); } // CODE BINDINGS public void put(String key,Object val) { this.bindings.put(key, val); } public boolean hasKey(String key) { return this.bindings.containsKey(key); } static public void failz0r(World world, BlockPos pos, String fmt, Object... args) { if (world.isRemote) return; ((WorldServer)world).spawnParticle(EnumParticleTypes.SPELL, pos.getX() + .5, pos.getY() + 1, pos.getZ() + .5, 20, 0, 0, 0, .5, new int[0]); FMLLog.severe(fmt, args); } // TODO refactor this to be more generic public void invoke(String method, MyEntity entity) { PyObject obj = (PyObject) this.bindings.get(method); if (obj == null) { failz0r(world, pos, "Unknown function '%s'", method); return; } PyFunction func = (PyFunction)obj; // handle instances of optional player argument PyObject co_varnames = func.__code__.__getattr__("co_varnames"); if (entity instanceof MyEntityPlayer && !co_varnames.__contains__(Py.java2py("player"))) { // don't pass the player in if it's not expected try { func.__call__(); } catch (NullPointerException e) { failz0r(world, pos, "Error running code: ", e.getMessage()); } return; } // carry on! try { func.__call__(Py.java2py(entity)); } catch (NullPointerException e) { failz0r(world, pos, "Error running code: %s", e.getMessage()); } } public void invoke(String method, @Nullable MyBase target) { PyObject obj = (PyObject) this.bindings.get(method); if (obj == null) { failz0r(world, pos, "Unknown function '%s'", method); return; } PyFunction func = (PyFunction)obj; // handle instances of optional player argument PyObject co_varnames = func.__code__.__getattr__("co_varnames"); if (co_varnames.__contains__(Py.java2py("target"))) { try { func.__call__(Py.java2py(target)); } catch (NullPointerException e) { failz0r(world, pos, "Error running code: %s", e.getMessage()); } } else { // don't pass the target in if it's not expected try { func.__call__(); } catch (NullPointerException e) { failz0r(world, pos, "Error running code: ", e.getMessage()); } } } public void invoke(String method) { PyObject obj = (PyObject) this.bindings.get(method); if (obj == null) { failz0r(world, pos, "Unknown function '%s'", method); return; } PyFunction func = (PyFunction)obj; try { func.__call__(); } catch (NullPointerException e) { failz0r(world, pos, "Error running code: ", e.getMessage()); } } public static final String bookAsString(ItemStack book) { NBTTagCompound bookData = book.getTagCompound(); NBTTagList pages; try { // pages are all of type TAG_String == 8 pages = bookData.getTagList("pages", 8); } catch (NullPointerException e) { // this should not happen! return null; } // collapse the pages into one string StringBuilder sbStr = new StringBuilder(); for(int i = 0;i<pages.tagCount();i++) { String s = pages.getStringTagAt(i); if (i > 0) sbStr.append("\n"); sbStr.append(s); } return sbStr.toString(); } public boolean setCodeFromBook(World world, EntityPlayer player, ICommandSender runner, BlockPos pos, ItemStack heldItem) { String code = bookAsString(heldItem); if (code == null) { failz0r(world, pos, "Could not get pages from the book!?"); return false; } this.setCodeString(code); // set context using the player so they get feedback on success/fail this.setContext(world, player, pos); // now set the default runner to be the code entity this.setRunner(runner); return true; } public void setRunner(ICommandSender runner) { this.runner = runner; this.bindings.put("__runner__", runner); } public void setContext(World world, ICommandSender runner, BlockPos pos) { if (this.world == world && this.runner == runner && this.pos == pos) { this.ensureCompiled(); return; } this.world = world; this.pos = pos; this.runner = runner; this.players = new MyEntityPlayers(world); this.bindings.put("pos", new MyBlockPos(pos)); this.bindings.put("runner", PyRegistry.myWrapper(world, runner)); // I am reasonably certain that I can't just shove the methods below directly // into the script engine namespace because I can't pass a Runnable as a // value to be stored in the engine namespace. this.bindings.put("__utils__", this); // So.. now I copy all those methods to set up the "utils" try { String s = ""; for (String n : utils) { s += String.format("%s = __utils__.%s\n", n, n); } PythonEngine.eval(s, this.context); } catch (ScriptException e) { failz0r(world, pos, "Error setting up utils: %s", e.getMessage()); return; } // create the MyCommand curries and attach callables to utils / global scope try { String s = ""; for (String n: MyCommands.COMMANDS.keySet()) { // bind the name to just the invoke method, using the dynamic runner value this.bindings.put("__" + n, MyCommands.curry(n, this.world)); s += String.format("%s = lambda *a: __%s.invoke(runner, *a)\n", n, n); } PythonEngine.eval(s, this.context); } catch (ScriptException e) { failz0r(world, pos, "Error setting up commands: %s", e.getMessage()); return; } this.ensureCompiled(); } public static final String stackTraceToString(Throwable t) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); t.printStackTrace(pw); return sw.toString(); } private void ensureCompiled() { if (!this.codeChanged) return; FMLLog.fine("Eval my code: %s", this.code); // now execute the code try { PythonEngine.eval(this.code, this.context); if (!world.isRemote) { ((WorldServer)world).spawnParticle(EnumParticleTypes.CRIT, pos.getX() + .5, pos.getY() + 1, pos.getZ() + .5, 20, 0, 0, 0, .5, new int[0]); } } catch (ScriptException e) { failz0r(world, pos, "Error running code, traceback:\n%s", stackTraceToString(e)); } this.codeChanged = false; } private String[] utils = {"colors", "facings", "players"}; public static HashMap<String, EnumDyeColor> COLORMAP = new HashMap<String, EnumDyeColor>(); public static HashMap<String, EnumFacing> FACINGMAP = new HashMap<String, EnumFacing>(); public static List<String> colors = new LinkedList<>(); public static List<String> facings = new LinkedList<>(); public static void init() { COLORMAP.put("white", EnumDyeColor.WHITE); COLORMAP.put("orange", EnumDyeColor.ORANGE); COLORMAP.put("magenta", EnumDyeColor.MAGENTA); COLORMAP.put("lightBlue", EnumDyeColor.LIGHT_BLUE); COLORMAP.put("yellow", EnumDyeColor.YELLOW); COLORMAP.put("lime", EnumDyeColor.LIME); COLORMAP.put("pink", EnumDyeColor.PINK); COLORMAP.put("gray", EnumDyeColor.GRAY); COLORMAP.put("silver", EnumDyeColor.SILVER); COLORMAP.put("cyan", EnumDyeColor.CYAN); COLORMAP.put("purple", EnumDyeColor.PURPLE); COLORMAP.put("blue", EnumDyeColor.BLUE); COLORMAP.put("brown", EnumDyeColor.BROWN); COLORMAP.put("green", EnumDyeColor.GREEN); COLORMAP.put("red", EnumDyeColor.RED ); COLORMAP.put("black", EnumDyeColor.BLACK); FACINGMAP.put("down", EnumFacing.DOWN); FACINGMAP.put("up", EnumFacing.UP); FACINGMAP.put("north", EnumFacing.NORTH); FACINGMAP.put("south", EnumFacing.SOUTH); FACINGMAP.put("west", EnumFacing.WEST); FACINGMAP.put("east", EnumFacing.EAST); for (String name : COLORMAP.keySet()) { colors.add(name); } for (String name : FACINGMAP.keySet()) { facings.add(name); } } }
fix crash server on python code errors
src/main/java/net/mechanicalcat/pycode/script/PythonCode.java
fix crash server on python code errors
Java
mit
d98a02def3d8fce6812c23989a4ea0aa00434642
0
wlfyit/TTSHub
package haus.pup; import haus.pup.model.Phrase; import haus.pup.model.Provider; import haus.pup.model.Voice; import haus.pup.repository.ProviderRepository; import haus.pup.repository.ProviderRepositoryStub; import haus.pup.tts.TTSProvider; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import java.util.List; import java.util.Map; @Path("providers") public class ProviderResource { private ProviderRepository providerRepo = new ProviderRepositoryStub(); @GET @Produces({MediaType.APPLICATION_JSON,MediaType.APPLICATION_XML}) public List<Provider> getAllProviders() { return providerRepo.getProviders(); } @GET @Produces({MediaType.APPLICATION_JSON,MediaType.APPLICATION_XML}) @Path("{providerId}/voices") public List<Voice> getVoices(@PathParam("providerId") String providerId) { return providerRepo.getVoices(providerId); } @GET @Produces({MediaType.APPLICATION_JSON,MediaType.APPLICATION_XML}) @Path("{providerId}/voices/{lang}") public List<Voice> getVoicesByLang(@PathParam("providerId") String providerId, @PathParam("lang") String lang) { return providerRepo.getVoicesByLang(providerId, lang); } }
src/main/java/haus/pup/ProviderResource.java
package haus.pup; import haus.pup.model.Phrase; import haus.pup.model.Provider; import haus.pup.model.Voice; import haus.pup.repository.ProviderRepository; import haus.pup.repository.ProviderRepositoryStub; import haus.pup.tts.TTSProvider; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import java.util.List; import java.util.Map; @Path("providers") public class ProviderResource { private ProviderRepository providerRepo = new ProviderRepositoryStub(); @GET @Produces({MediaType.APPLICATION_JSON,MediaType.APPLICATION_XML}) public List<Provider> getAllProviders() { return providerRepo.getProviders(); } @GET @Produces({MediaType.APPLICATION_JSON,MediaType.APPLICATION_XML}) @Path("{providerId}/voices") public List<Voice> getPhrase(@PathParam("providerId") String providerId) { return providerRepo.getVoices(providerId); } }
created GetVoicesByLang
src/main/java/haus/pup/ProviderResource.java
created GetVoicesByLang
Java
mit
6a3dd4f62be896412b8e2546a226488bcf2d9ca6
0
FAU-Inf2/rpgpack-android
package de.fau.cs.mad.gamekobold.templatestore; import java.util.List; import de.fau.cs.mad.gamekobold.R; import android.content.Context; import android.graphics.Bitmap; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.ImageView; import android.widget.RatingBar; import android.widget.TextView; public class TemplateStoreArrayAdapter extends ArrayAdapter<StoreTemplate> { static class ViewHolder { TextView worldname; TextView date_author; TextView name; RatingBar bar; ImageView img; Bitmap bm; } Context context; List<StoreTemplate> templates; public TemplateStoreArrayAdapter(Context context, List<StoreTemplate> templates) { super(context, R.layout.template_store_rowlayout, templates); this.templates = templates; this.context = context; } public View getView(int position, View convertView, ViewGroup parent) { LayoutInflater inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); ViewHolder holder; StoreTemplate curr = templates.get(position); if(convertView == null) { convertView = inflater.inflate(R.layout.template_store_rowlayout, parent, false); holder = new ViewHolder(); holder.worldname = (TextView) convertView.findViewById(R.id.tv_store_worldname); holder.date_author = (TextView) convertView.findViewById(R.id.tv_store_date_author); holder.name = (TextView) convertView.findViewById(R.id.tv_store_name); holder.bar = (RatingBar) convertView.findViewById(R.id.ratingBarStore); holder.img = (ImageView) convertView.findViewById(R.id.templateStoreImg); if(curr.hasImage()) { curr.setBm(curr.getImage_data()); } convertView.setTag(holder); } else { holder = (ViewHolder) convertView.getTag(); } holder.worldname.setText(curr.getWorldname()); holder.date_author.setText(curr.getDate()+" - " + curr.getAuthor()); holder.name.setText(curr.getName()); if(holder.bar != null) { holder.bar.setRating(curr.getRating()); } if((position % 2) == 0) { convertView.setBackgroundColor( context.getResources().getColor(R.color.background_green) ); } if(curr.hasImage()) { //byte[] decodedString = Base64.decode(curr.getImage_data(), Base64.DEFAULT); //Bitmap decodedByte = BitmapFactory.decodeByteArray(decodedString, 0, decodedString.length); if(curr.getBm() == null) { curr.setBm(curr.getImage_data()); } holder.img.setImageBitmap(curr.getBm()); } return convertView; } public void add(StoreTemplate tpl) { this.templates.add(tpl); notifyDataSetChanged(); } }
src/de/fau/cs/mad/gamekobold/templatestore/TemplateStoreArrayAdapter.java
package de.fau.cs.mad.gamekobold.templatestore; import java.util.List; import de.fau.cs.mad.gamekobold.R; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.util.Base64; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.ImageView; import android.widget.RatingBar; import android.widget.TextView; public class TemplateStoreArrayAdapter extends ArrayAdapter<StoreTemplate> { static class ViewHolder { TextView worldname; TextView date_author; TextView name; RatingBar bar; ImageView img; Bitmap bm; } Context context; List<StoreTemplate> templates; public TemplateStoreArrayAdapter(Context context, List<StoreTemplate> templates) { super(context, R.layout.template_store_rowlayout, templates); this.templates = templates; this.context = context; } public View getView(int position, View convertView, ViewGroup parent) { LayoutInflater inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); ViewHolder holder; View rowView = null; StoreTemplate curr = templates.get(position); if(convertView == null) { convertView = inflater.inflate(R.layout.template_store_rowlayout, parent, false); holder = new ViewHolder(); holder.worldname = (TextView) convertView.findViewById(R.id.tv_store_worldname); holder.date_author = (TextView) convertView.findViewById(R.id.tv_store_date_author); holder.name = (TextView) convertView.findViewById(R.id.tv_store_name); holder.bar = (RatingBar) convertView.findViewById(R.id.ratingBarStore); holder.img = (ImageView) convertView.findViewById(R.id.templateStoreImg); if(curr.hasImage()) { curr.setBm(curr.getImage_data()); } convertView.setTag(holder); } else { holder = (ViewHolder) convertView.getTag(); } holder.worldname.setText(curr.getWorldname()); holder.date_author.setText(curr.getDate()+" - " + curr.getAuthor()); holder.name.setText(curr.getName()); if(holder.bar != null) { holder.bar.setRating(curr.getRating()); } if((position % 2) == 0) { convertView.setBackgroundColor( context.getResources().getColor(R.color.background_green) ); } if(curr.hasImage()) { Log.e("store", "curr has image"); //byte[] decodedString = Base64.decode(curr.getImage_data(), Base64.DEFAULT); //Bitmap decodedByte = BitmapFactory.decodeByteArray(decodedString, 0, decodedString.length); if(curr.getBm() == null) { Log.e("store", "getBm == null : ja "); curr.setBm(curr.getImage_data()); } else { Log.e("store", "getBm == null : nein"); } holder.img.setImageBitmap(curr.getBm()); } return convertView; } public void add(StoreTemplate tpl) { this.templates.add(tpl); notifyDataSetChanged(); } }
removed unused stuff in adapter
src/de/fau/cs/mad/gamekobold/templatestore/TemplateStoreArrayAdapter.java
removed unused stuff in adapter
Java
mpl-2.0
bfb6f442dac752beea69f9dba8c54e3c2a28eebc
0
digidotcom/XBeeJavaLibrary,brucetsao/XBeeJavaLibrary,GUBotDev/XBeeJavaLibrary
/** * Copyright (c) 2014 Digi International Inc., * All rights not expressly granted are reserved. * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this file, * You can obtain one at http://mozilla.org/MPL/2.0/. * * Digi International Inc. 11001 Bren Road East, Minnetonka, MN 55343 * ======================================================================= */ package com.digi.xbee.api.connection.serial; import gnu.io.CommPortIdentifier; import gnu.io.CommPortOwnershipListener; import gnu.io.NoSuchPortException; import gnu.io.PortInUseException; import gnu.io.RXTXPort; import gnu.io.SerialPortEvent; import gnu.io.SerialPortEventListener; import gnu.io.UnsupportedCommOperationException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Enumeration; import java.util.TooManyListenersException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.digi.xbee.api.exceptions.ConnectionException; import com.digi.xbee.api.exceptions.InterfaceInUseException; import com.digi.xbee.api.exceptions.InvalidConfigurationException; import com.digi.xbee.api.exceptions.InvalidInterfaceException; import com.digi.xbee.api.exceptions.PermissionDeniedException; /** * This class represents a serial port using the RxTx library to communicate * with it. */ public class SerialPortRxTx extends AbstractSerialPort implements SerialPortEventListener, CommPortOwnershipListener { // Variables. private final Object lock = new Object(); private RXTXPort serialPort; private InputStream inputStream; private OutputStream outputStream; private Thread breakThread; private boolean breakEnabled = false; private CommPortIdentifier portIdentifier = null; private Logger logger; /** * Class constructor. Instances a new {@code SerialPortRxTx} object using * the given parameters. * * @param port Serial port name to use. * @param parameters Serial port parameters. * * @throws NullPointerException if {@code port == null} or * if {@code parameters == null}. * * @see #SerialPortRxTx(String, int) * @see #SerialPortRxTx(String, int, int) * @see #SerialPortRxTx(String, SerialPortParameters, int) * @see SerialPortParameters */ public SerialPortRxTx(String port, SerialPortParameters parameters) { this(port, parameters, DEFAULT_PORT_TIMEOUT); } /** * Class constructor. Instances a new {@code SerialPortRxTx} object using * the given parameters. * * @param port Serial port name to use. * @param parameters Serial port parameters. * @param receiveTimeout Serial port receive timeout in milliseconds. * * @throws IllegalArgumentException if {@code receiveTimeout < 0}. * @throws NullPointerException if {@code port == null} or * if {@code parameters == null}. * * @see #SerialPortRxTx(String, int) * @see #SerialPortRxTx(String, int, int) * @see #SerialPortRxTx(String, SerialPortParameters) * @see SerialPortParameters */ public SerialPortRxTx(String port, SerialPortParameters parameters, int receiveTimeout) { super(port, parameters, receiveTimeout); this.logger = LoggerFactory.getLogger(SerialPortRxTx.class); } /** * Class constructor. Instances a new {@code SerialPortRxTx} object using * the given parameters. * * @param port Serial port name to use. * @param baudRate Serial port baud rate, the rest of parameters will be * set by default. * * @throws NullPointerException if {@code port == null}. * * @see #DEFAULT_DATA_BITS * @see #DEFAULT_FLOW_CONTROL * @see #DEFAULT_PARITY * @see #DEFAULT_STOP_BITS * @see #DEFAULT_PORT_TIMEOUT * @see #SerialPortRxTx(String, int, int) * @see #SerialPortRxTx(String, SerialPortParameters) * @see #SerialPortRxTx(String, SerialPortParameters, int) * @see SerialPortParameters */ public SerialPortRxTx(String port, int baudRate) { this(port, baudRate, DEFAULT_PORT_TIMEOUT); } /** * Class constructor. Instances a new {@code SerialPortRxTx} object using * the given parameters. * * @param port Serial port name to use. * @param baudRate Serial port baud rate, the rest of parameters will be * set by default. * @param receiveTimeout Serial port receive timeout in milliseconds. * * @throws IllegalArgumentException if {@code receiveTimeout < 0}. * @throws NullPointerException if {@code port == null}. * * @see #DEFAULT_DATA_BITS * @see #DEFAULT_FLOW_CONTROL * @see #DEFAULT_PARITY * @see #DEFAULT_STOP_BITS * @see #SerialPortRxTx(String, int) * @see #SerialPortRxTx(String, SerialPortParameters) * @see #SerialPortRxTx(String, SerialPortParameters, int) * @see SerialPortParameters */ public SerialPortRxTx(String port, int baudRate, int receiveTimeout) { super(port, baudRate, receiveTimeout); this.logger = LoggerFactory.getLogger(SerialPortRxTx.class); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.IConnectionInterface#open() */ @Override public void open() throws InterfaceInUseException, InvalidInterfaceException, InvalidConfigurationException, PermissionDeniedException { // Check that the given serial port exists. try { portIdentifier = CommPortIdentifier.getPortIdentifier(port); } catch (NoSuchPortException e) { throw new InvalidInterfaceException("No such port: " + port, e); } try { // Get the serial port. serialPort = (RXTXPort)portIdentifier.open(PORT_ALIAS + " " + port, receiveTimeout); // Set port as connected. connectionOpen = true; // Configure the port. if (parameters == null) parameters = new SerialPortParameters(baudRate, DEFAULT_DATA_BITS, DEFAULT_STOP_BITS, DEFAULT_PARITY, DEFAULT_FLOW_CONTROL); serialPort.setSerialPortParams(baudRate, parameters.dataBits, parameters.stopBits, parameters.parity); serialPort.setFlowControlMode(parameters.flowControl); serialPort.enableReceiveTimeout(receiveTimeout); // Set the port ownership. portIdentifier.addPortOwnershipListener(this); // Initialize input and output streams before setting the listener. inputStream = serialPort.getInputStream(); outputStream = serialPort.getOutputStream(); // Activate data received event. serialPort.notifyOnDataAvailable(true); // Register serial port event listener to be notified when data is available. serialPort.addEventListener(this); } catch (PortInUseException e) { throw new InterfaceInUseException("Port " + port + " is already in use by other application(s)", e); } catch (UnsupportedCommOperationException e) { throw new InvalidConfigurationException(e.getMessage(), e); } catch (TooManyListenersException e) { throw new InvalidConfigurationException(e.getMessage(), e); } } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.IConnectionInterface#close() */ @Override public void close() { try { if (inputStream != null) { inputStream.close(); inputStream = null; } if (outputStream != null) { outputStream.close(); outputStream = null; } } catch (IOException e) { logger.error(e.getMessage(), e); } synchronized (lock) { if (serialPort != null) { try { serialPort.notifyOnDataAvailable(false); serialPort.removeEventListener(); portIdentifier.removePortOwnershipListener(this); serialPort.close(); serialPort = null; connectionOpen = false; } catch (Exception e) { } } } } /* * (non-Javadoc) * @see gnu.io.SerialPortEventListener#serialEvent(gnu.io.SerialPortEvent) */ @Override public void serialEvent(SerialPortEvent event) { // Listen only to data available event. switch (event.getEventType()) { case SerialPortEvent.DATA_AVAILABLE: // Check if serial device has been disconnected or not. try { getInputStream().available(); } catch (Exception e) { // Serial device has been disconnected. close(); synchronized (this) { this.notify(); } break; } // Notify data is available by waking up the read thread. try { if (getInputStream().available() > 0) { synchronized (this) { this.notify(); } } } catch (Exception e) { logger.error(e.getMessage(), e); } break; } } /* * (non-Javadoc) * @see java.lang.Object#toString() */ @Override public String toString() { return super.toString(); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#setBreak(boolean) */ @Override public void setBreak(boolean enabled) { breakEnabled = enabled; if(breakEnabled){ if (breakThread == null) { breakThread = new Thread() { public void run() { while (breakEnabled && serialPort != null) serialPort.sendBreak(100); }; }; breakThread.start(); } } else { if (breakThread != null) breakThread.interrupt(); breakThread = null; serialPort.sendBreak(0); } } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.IConnectionInterface#getInputStream() */ @Override public InputStream getInputStream() { return inputStream; } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.IConnectionInterface#getOutputStream() */ @Override public OutputStream getOutputStream() { return outputStream; } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#setReadTimeout(int) */ @Override public void setReadTimeout(int timeout) { serialPort.disableReceiveTimeout(); serialPort.enableReceiveTimeout(timeout); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#getReadTimeout() */ @Override public int getReadTimeout() { return serialPort.getReceiveTimeout(); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#setDTR(boolean) */ @Override public void setDTR(boolean state) { serialPort.setDTR(state); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#setRTS(boolean) */ @Override public void setRTS(boolean state) { serialPort.setRTS(state); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#setPortParameters(int, int, int, int, int) */ @Override public void setPortParameters(int baudRate, int dataBits, int stopBits, int parity, int flowControl) throws InvalidConfigurationException, ConnectionException { parameters = new SerialPortParameters(baudRate, dataBits, stopBits, parity, flowControl); if (serialPort != null) { try { serialPort.setSerialPortParams(baudRate, dataBits, stopBits, parity); serialPort.setFlowControlMode(flowControl); } catch (UnsupportedCommOperationException e) { throw new InvalidConfigurationException(e.getMessage(), e); } } } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#sendBreak(int) */ @Override public void sendBreak(int duration) { if (serialPort != null) serialPort.sendBreak(duration); } /* * (non-Javadoc) * @see gnu.io.CommPortOwnershipListener#ownershipChange(int) */ @Override public void ownershipChange(int nType) { switch (nType) { case CommPortOwnershipListener.PORT_OWNERSHIP_REQUESTED: onSerialOwnershipRequested(null); break; } } /** * Releases the port on any ownership request in the same application * instance. * * @param data The port requester. */ private void onSerialOwnershipRequested(Object data) { try { throw new Exception(); } catch (Exception e) { StackTraceElement[] elems = e.getStackTrace(); String requester = elems[elems.length - 4].getClassName(); synchronized (this) { this.notify(); } close(); String myPackage = this.getClass().getPackage().getName(); if (requester.startsWith(myPackage)) requester = "another AT connection"; logger.warn("Connection for port {} canceled due to ownership request from {}.", port, requester); } } /** * Retrieves the list of available serial ports in the system. * * @return List of available serial ports. * * @see #listSerialPortsInfo() */ public static String[] listSerialPorts() { ArrayList<String> serialPorts = new ArrayList<String>(); @SuppressWarnings("unchecked") Enumeration<CommPortIdentifier> comPorts = CommPortIdentifier.getPortIdentifiers(); if (comPorts == null) return serialPorts.toArray(new String[serialPorts.size()]); while (comPorts.hasMoreElements()) { CommPortIdentifier identifier = (CommPortIdentifier)comPorts.nextElement(); if (identifier == null) continue; String strName = identifier.getName(); serialPorts.add(strName); } return serialPorts.toArray(new String[serialPorts.size()]); } /** * Retrieves the list of available serial ports with their information. * * @return List of available serial ports with their information. * * @see #listSerialPorts() * @see SerialPortInfo */ public static ArrayList<SerialPortInfo> listSerialPortsInfo() { ArrayList<SerialPortInfo> ports = new ArrayList<SerialPortInfo>(); @SuppressWarnings("unchecked") Enumeration<CommPortIdentifier> comPorts = CommPortIdentifier.getPortIdentifiers(); if (comPorts == null) return ports; while (comPorts.hasMoreElements()) { CommPortIdentifier identifier = (CommPortIdentifier)comPorts.nextElement(); if (identifier == null) continue; ports.add(new SerialPortInfo(identifier.getName())); } return ports; } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#isCTS() */ @Override public boolean isCTS() { return serialPort.isCTS(); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#isDSR() */ @Override public boolean isDSR() { return serialPort.isDSR(); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#isCD() */ @Override public boolean isCD() { return serialPort.isCD(); } }
library/src/main/java/com/digi/xbee/api/connection/serial/SerialPortRxTx.java
/** * Copyright (c) 2014 Digi International Inc., * All rights not expressly granted are reserved. * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this file, * You can obtain one at http://mozilla.org/MPL/2.0/. * * Digi International Inc. 11001 Bren Road East, Minnetonka, MN 55343 * ======================================================================= */ package com.digi.xbee.api.connection.serial; import gnu.io.CommPortIdentifier; import gnu.io.CommPortOwnershipListener; import gnu.io.NoSuchPortException; import gnu.io.PortInUseException; import gnu.io.RXTXPort; import gnu.io.SerialPortEvent; import gnu.io.SerialPortEventListener; import gnu.io.UnsupportedCommOperationException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Enumeration; import java.util.TooManyListenersException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.digi.xbee.api.exceptions.ConnectionException; import com.digi.xbee.api.exceptions.InterfaceInUseException; import com.digi.xbee.api.exceptions.InvalidConfigurationException; import com.digi.xbee.api.exceptions.InvalidInterfaceException; import com.digi.xbee.api.exceptions.PermissionDeniedException; /** * This class represents a serial port using the RxTx library to communicate * with it. */ public class SerialPortRxTx extends AbstractSerialPort implements SerialPortEventListener, CommPortOwnershipListener { // Variables. private final Object lock = new Object(); private RXTXPort serialPort; private InputStream inputStream; private OutputStream outputStream; private Thread breakThread; private boolean breakEnabled = false; private CommPortIdentifier portIdentifier = null; private Logger logger; /** * Class constructor. Instances a new {@code SerialPortRxTx} object using * the given parameters. * * @param port Serial port name to use. * @param parameters Serial port parameters. * * @throws NullPointerException if {@code port == null} or * if {@code parameters == null}. * * @see #SerialPortRxTx(String, int) * @see #SerialPortRxTx(String, int, int) * @see #SerialPortRxTx(String, SerialPortParameters, int) * @see SerialPortParameters */ public SerialPortRxTx(String port, SerialPortParameters parameters) { this(port, parameters, DEFAULT_PORT_TIMEOUT); } /** * Class constructor. Instances a new {@code SerialPortRxTx} object using * the given parameters. * * @param port Serial port name to use. * @param parameters Serial port parameters. * @param receiveTimeout Serial port receive timeout in milliseconds. * * @throws IllegalArgumentException if {@code receiveTimeout < 0}. * @throws NullPointerException if {@code port == null} or * if {@code parameters == null}. * * @see #SerialPortRxTx(String, int) * @see #SerialPortRxTx(String, int, int) * @see #SerialPortRxTx(String, SerialPortParameters) * @see SerialPortParameters */ public SerialPortRxTx(String port, SerialPortParameters parameters, int receiveTimeout) { super(port, parameters, receiveTimeout); this.logger = LoggerFactory.getLogger(SerialPortRxTx.class); } /** * Class constructor. Instances a new {@code SerialPortRxTx} object using * the given parameters. * * @param port Serial port name to use. * @param baudRate Serial port baud rate, the rest of parameters will be * set by default. * * @throws NullPointerException if {@code port == null}. * * @see #DEFAULT_DATA_BITS * @see #DEFAULT_FLOW_CONTROL * @see #DEFAULT_PARITY * @see #DEFAULT_STOP_BITS * @see #DEFAULT_PORT_TIMEOUT * @see #SerialPortRxTx(String, int, int) * @see #SerialPortRxTx(String, SerialPortParameters) * @see #SerialPortRxTx(String, SerialPortParameters, int) * @see SerialPortParameters */ public SerialPortRxTx(String port, int baudRate) { this(port, baudRate, DEFAULT_PORT_TIMEOUT); } /** * Class constructor. Instances a new {@code SerialPortRxTx} object using * the given parameters. * * @param port Serial port name to use. * @param baudRate Serial port baud rate, the rest of parameters will be * set by default. * @param receiveTimeout Serial port receive timeout in milliseconds. * * @throws IllegalArgumentException if {@code receiveTimeout < 0}. * @throws NullPointerException if {@code port == null}. * * @see #DEFAULT_DATA_BITS * @see #DEFAULT_FLOW_CONTROL * @see #DEFAULT_PARITY * @see #DEFAULT_STOP_BITS * @see #SerialPortRxTx(String, int) * @see #SerialPortRxTx(String, SerialPortParameters) * @see #SerialPortRxTx(String, SerialPortParameters, int) * @see SerialPortParameters */ public SerialPortRxTx(String port, int baudRate, int receiveTimeout) { super(port, baudRate, receiveTimeout); this.logger = LoggerFactory.getLogger(SerialPortRxTx.class); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.IConnectionInterface#open() */ @Override public void open() throws InterfaceInUseException, InvalidInterfaceException, InvalidConfigurationException, PermissionDeniedException { // Check that the given serial port exists. try { portIdentifier = CommPortIdentifier.getPortIdentifier(port); } catch (NoSuchPortException e) { throw new InvalidInterfaceException("No such port: " + port, e); } try { // Get the serial port. serialPort = (RXTXPort)portIdentifier.open(PORT_ALIAS + " " + port, receiveTimeout); // Set port as connected. connectionOpen = true; // Configure the port. if (parameters == null) parameters = new SerialPortParameters(baudRate, DEFAULT_DATA_BITS, DEFAULT_STOP_BITS, DEFAULT_PARITY, DEFAULT_FLOW_CONTROL); serialPort.setSerialPortParams(baudRate, parameters.dataBits, parameters.stopBits, parameters.parity); serialPort.setFlowControlMode(parameters.flowControl); serialPort.enableReceiveTimeout(receiveTimeout); // Set the port ownership. portIdentifier.addPortOwnershipListener(this); // Initialize input and output streams before setting the listener. inputStream = serialPort.getInputStream(); outputStream = serialPort.getOutputStream(); // Activate data received event. serialPort.notifyOnDataAvailable(true); // Register serial port event listener to be notified when data is available. serialPort.addEventListener(this); } catch (PortInUseException e) { throw new InterfaceInUseException("Port " + port + " is already in use by other application(s)", e); } catch (UnsupportedCommOperationException e) { throw new InvalidConfigurationException(e.getMessage(), e); } catch (TooManyListenersException e) { throw new InvalidConfigurationException(e.getMessage(), e); } } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.IConnectionInterface#close() */ @Override public void close() { try { if (inputStream != null) { inputStream.close(); inputStream = null; } if (outputStream != null) { outputStream.close(); outputStream = null; } } catch (IOException e) { logger.error(e.getMessage(), e); } if (serialPort != null) { try { serialPort.notifyOnDataAvailable(false); serialPort.removeEventListener(); portIdentifier.removePortOwnershipListener(this); synchronized (lock) { serialPort.close(); serialPort = null; connectionOpen = false; } } catch (Exception e) { } } } /* * (non-Javadoc) * @see gnu.io.SerialPortEventListener#serialEvent(gnu.io.SerialPortEvent) */ @Override public void serialEvent(SerialPortEvent event) { // Listen only to data available event. switch (event.getEventType()) { case SerialPortEvent.DATA_AVAILABLE: // Check if serial device has been disconnected or not. try { getInputStream().available(); } catch (Exception e) { // Serial device has been disconnected. close(); synchronized (this) { this.notify(); } break; } // Notify data is available by waking up the read thread. try { if (getInputStream().available() > 0) { synchronized (this) { this.notify(); } } } catch (Exception e) { logger.error(e.getMessage(), e); } break; } } /* * (non-Javadoc) * @see java.lang.Object#toString() */ @Override public String toString() { return super.toString(); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#setBreak(boolean) */ @Override public void setBreak(boolean enabled) { breakEnabled = enabled; if(breakEnabled){ if (breakThread == null) { breakThread = new Thread() { public void run() { while (breakEnabled && serialPort != null) serialPort.sendBreak(100); }; }; breakThread.start(); } } else { if (breakThread != null) breakThread.interrupt(); breakThread = null; serialPort.sendBreak(0); } } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.IConnectionInterface#getInputStream() */ @Override public InputStream getInputStream() { return inputStream; } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.IConnectionInterface#getOutputStream() */ @Override public OutputStream getOutputStream() { return outputStream; } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#setReadTimeout(int) */ @Override public void setReadTimeout(int timeout) { serialPort.disableReceiveTimeout(); serialPort.enableReceiveTimeout(timeout); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#getReadTimeout() */ @Override public int getReadTimeout() { return serialPort.getReceiveTimeout(); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#setDTR(boolean) */ @Override public void setDTR(boolean state) { serialPort.setDTR(state); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#setRTS(boolean) */ @Override public void setRTS(boolean state) { serialPort.setRTS(state); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#setPortParameters(int, int, int, int, int) */ @Override public void setPortParameters(int baudRate, int dataBits, int stopBits, int parity, int flowControl) throws InvalidConfigurationException, ConnectionException { parameters = new SerialPortParameters(baudRate, dataBits, stopBits, parity, flowControl); if (serialPort != null) { try { serialPort.setSerialPortParams(baudRate, dataBits, stopBits, parity); serialPort.setFlowControlMode(flowControl); } catch (UnsupportedCommOperationException e) { throw new InvalidConfigurationException(e.getMessage(), e); } } } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#sendBreak(int) */ @Override public void sendBreak(int duration) { if (serialPort != null) serialPort.sendBreak(duration); } /* * (non-Javadoc) * @see gnu.io.CommPortOwnershipListener#ownershipChange(int) */ @Override public void ownershipChange(int nType) { switch (nType) { case CommPortOwnershipListener.PORT_OWNERSHIP_REQUESTED: onSerialOwnershipRequested(null); break; } } /** * Releases the port on any ownership request in the same application * instance. * * @param data The port requester. */ private void onSerialOwnershipRequested(Object data) { try { throw new Exception(); } catch (Exception e) { StackTraceElement[] elems = e.getStackTrace(); String requester = elems[elems.length - 4].getClassName(); synchronized (this) { this.notify(); } close(); String myPackage = this.getClass().getPackage().getName(); if (requester.startsWith(myPackage)) requester = "another AT connection"; logger.warn("Connection for port {} canceled due to ownership request from {}.", port, requester); } } /** * Retrieves the list of available serial ports in the system. * * @return List of available serial ports. * * @see #listSerialPortsInfo() */ public static String[] listSerialPorts() { ArrayList<String> serialPorts = new ArrayList<String>(); @SuppressWarnings("unchecked") Enumeration<CommPortIdentifier> comPorts = CommPortIdentifier.getPortIdentifiers(); if (comPorts == null) return serialPorts.toArray(new String[serialPorts.size()]); while (comPorts.hasMoreElements()) { CommPortIdentifier identifier = (CommPortIdentifier)comPorts.nextElement(); if (identifier == null) continue; String strName = identifier.getName(); serialPorts.add(strName); } return serialPorts.toArray(new String[serialPorts.size()]); } /** * Retrieves the list of available serial ports with their information. * * @return List of available serial ports with their information. * * @see #listSerialPorts() * @see SerialPortInfo */ public static ArrayList<SerialPortInfo> listSerialPortsInfo() { ArrayList<SerialPortInfo> ports = new ArrayList<SerialPortInfo>(); @SuppressWarnings("unchecked") Enumeration<CommPortIdentifier> comPorts = CommPortIdentifier.getPortIdentifiers(); if (comPorts == null) return ports; while (comPorts.hasMoreElements()) { CommPortIdentifier identifier = (CommPortIdentifier)comPorts.nextElement(); if (identifier == null) continue; ports.add(new SerialPortInfo(identifier.getName())); } return ports; } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#isCTS() */ @Override public boolean isCTS() { return serialPort.isCTS(); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#isDSR() */ @Override public boolean isDSR() { return serialPort.isDSR(); } /* * (non-Javadoc) * @see com.digi.xbee.api.connection.serial.AbstractSerialPort#isCD() */ @Override public boolean isCD() { return serialPort.isCD(); } }
Fixed an issue related to a lock when closing the serial port. Signed-off-by: Ruben Moral <b34e6e46f990ecfddff1160f59e825f0a4c1057a@digi.com>
library/src/main/java/com/digi/xbee/api/connection/serial/SerialPortRxTx.java
Fixed an issue related to a lock when closing the serial port.
Java
agpl-3.0
070f3fa66facdbe617f895c8cbc46042aafffd80
0
ivanovlev/Gadgetbridge,Freeyourgadget/Gadgetbridge,Freeyourgadget/Gadgetbridge,rosenpin/Gadgetbridge,roidelapluie/Gadgetbridge,rosenpin/Gadgetbridge,ivanovlev/Gadgetbridge,roidelapluie/Gadgetbridge,ivanovlev/Gadgetbridge,Freeyourgadget/Gadgetbridge,Freeyourgadget/Gadgetbridge,roidelapluie/Gadgetbridge,rosenpin/Gadgetbridge
package nodomain.freeyourgadget.gadgetbridge.model; import android.content.ContentUris; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.provider.CalendarContract; import android.provider.CalendarContract.Instances; import java.util.ArrayList; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.List; public class CalendarEvents { // needed for pebble: time, duration, layout, reminders, actions // layout: type, title, subtitle, body (max 512), tinyIcon, smallIcon, largeIcon //further: primaryColor, secondaryColor, backgroundColor, headings, paragraphs, lastUpdated // taken from: https://developer.getpebble.com/guides/timeline/pin-structure/ // needed for miband: // time private static final String[] EVENT_INSTANCE_PROJECTION = new String[] { Instances._ID, Instances.BEGIN, Instances.END, Instances.EVENT_ID, Instances.TITLE, Instances.DESCRIPTION, Instances.EVENT_LOCATION, Instances.CALENDAR_DISPLAY_NAME }; private static final int lookahead_days = 7; private List<CalendarEvent> calendarEventList = new ArrayList<CalendarEvent>(); public List<CalendarEvent> getCalendarEventList(Context mContext) { fetchSystemEvents(mContext); return calendarEventList; } private boolean fetchSystemEvents(Context mContext) { Calendar cal = GregorianCalendar.getInstance(); Long dtStart = cal.getTime().getTime(); cal.add(Calendar.DATE, lookahead_days); Long dtEnd = cal.getTime().getTime(); Uri.Builder eventsUriBuilder = CalendarContract.Instances.CONTENT_URI.buildUpon(); ContentUris.appendId(eventsUriBuilder, dtStart); ContentUris.appendId(eventsUriBuilder, dtEnd); Uri eventsUri = eventsUriBuilder.build(); Cursor evtCursor = null; evtCursor = mContext.getContentResolver().query(eventsUri, EVENT_INSTANCE_PROJECTION, null, null, CalendarContract.Instances.BEGIN + " ASC"); if (evtCursor.moveToFirst()) { do { CalendarEvent calEvent = new CalendarEvent( evtCursor.getLong(1), evtCursor.getLong(2), evtCursor.getLong(3), evtCursor.getString(4), evtCursor.getString(5), evtCursor.getString(6), evtCursor.getString(7) ); calendarEventList.add(calEvent); } while(evtCursor.moveToNext()); return true; } return false; } public class CalendarEvent { private long begin; private long end; private long id; private String title; private String description; private String location; private String calName; public CalendarEvent(long begin, long end, long id, String title, String description, String location, String calName) { this.begin = begin; this.end = end; this.id = id; this.title = title; this.description = description; this.location = location; this.calName = calName; } public long getBegin() { return begin; } public int getBeginSeconds() { return (int)(begin/1000); } public long getEnd() { return end; } public long getDuration() { return end - begin; } public int getDurationSeconds() { return (int)((getDuration())/1000); } public short getDurationMinutes() { return (short)(getDurationSeconds()/60); } public long getId() { return id; } public String getTitle() { return title; } public String getDescription() { return description; } public String getLocation() { return location; } public String getCalName() { return calName; } } }
app/src/main/java/nodomain/freeyourgadget/gadgetbridge/model/CalendarEvents.java
package nodomain.freeyourgadget.gadgetbridge.model; import android.content.ContentUris; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.provider.CalendarContract; import android.provider.CalendarContract.Instances; import java.util.ArrayList; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.List; public class CalendarEvents { // needed for pebble: time, duration, layout, reminders, actions // layout: type, title, subtitle, body (max 512), tinyIcon, smallIcon, largeIcon //further: primaryColor, secondaryColor, backgroundColor, headings, paragraphs, lastUpdated // taken from: https://developer.getpebble.com/guides/timeline/pin-structure/ // needed for miband: // time private static final String[] EVENT_INSTANCE_PROJECTION = new String[] { Instances._ID, Instances.BEGIN, Instances.END, Instances.EVENT_ID, Instances.TITLE, Instances.DESCRIPTION, Instances.EVENT_LOCATION, Instances.CALENDAR_DISPLAY_NAME }; private static final int lookahead_days = 7; private List<CalendarEvent> calendarEventList = new ArrayList<CalendarEvent>(); public List<CalendarEvent> getCalendarEventList(Context mContext) { fetchSystemEvents(mContext); return calendarEventList; } private boolean fetchSystemEvents(Context mContext) { Calendar cal = GregorianCalendar.getInstance(); Long dtStart = cal.getTime().getTime(); cal.add(Calendar.DATE, lookahead_days); Long dtEnd = cal.getTime().getTime(); Uri.Builder eventsUriBuilder = CalendarContract.Instances.CONTENT_URI.buildUpon(); ContentUris.appendId(eventsUriBuilder, dtStart); ContentUris.appendId(eventsUriBuilder, dtEnd); Uri eventsUri = eventsUriBuilder.build(); Cursor evtCursor = null; evtCursor = mContext.getContentResolver().query(eventsUri, EVENT_INSTANCE_PROJECTION, null, null, CalendarContract.Instances.BEGIN + " ASC"); if (evtCursor.moveToFirst()) { do { CalendarEvent calEvent = new CalendarEvent( evtCursor.getLong(1), evtCursor.getLong(2), evtCursor.getLong(3), evtCursor.getString(4), evtCursor.getString(5), evtCursor.getString(6), evtCursor.getString(7) ); calendarEventList.add(calEvent); } while(evtCursor.moveToNext()); return true; } return false; } public class CalendarEvent { private long begin; private long end; private long id; private String title; private String description; private String location; private String calName; public CalendarEvent(long begin, long end, long id, String title, String description, String location, String calName) { this.begin = begin; this.end = end; this.id = id; this.title = title; this.description = description; this.location = location; this.calName = calName; } public long getBegin() { return begin; } public long getEnd() { return end; } public long getDuration() { return end - begin; } public long getId() { return id; } public String getTitle() { return title; } public String getDescription() { return description; } public String getLocation() { return location; } public String getCalName() { return calName; } } }
Add further getters, converting the data to other units
app/src/main/java/nodomain/freeyourgadget/gadgetbridge/model/CalendarEvents.java
Add further getters, converting the data to other units
Java
agpl-3.0
582f2017a4fceb52cd6dfc8aec0497df6752d57c
0
mnip91/proactive-component-monitoring,jrochas/scale-proactive,lpellegr/programming,fviale/programming,lpellegr/programming,mnip91/programming-multiactivities,paraita/programming,acontes/programming,ow2-proactive/programming,PaulKh/scale-proactive,ow2-proactive/programming,lpellegr/programming,ow2-proactive/programming,fviale/programming,acontes/programming,mnip91/programming-multiactivities,mnip91/proactive-component-monitoring,lpellegr/programming,paraita/programming,PaulKh/scale-proactive,acontes/programming,acontes/programming,acontes/programming,jrochas/scale-proactive,paraita/programming,jrochas/scale-proactive,acontes/scheduling,jrochas/scale-proactive,mnip91/proactive-component-monitoring,fviale/programming,acontes/programming,acontes/scheduling,paraita/programming,PaulKh/scale-proactive,mnip91/proactive-component-monitoring,fviale/programming,PaulKh/scale-proactive,mnip91/programming-multiactivities,PaulKh/scale-proactive,lpellegr/programming,jrochas/scale-proactive,lpellegr/programming,ow2-proactive/programming,mnip91/programming-multiactivities,mnip91/programming-multiactivities,paraita/programming,acontes/programming,acontes/scheduling,acontes/scheduling,fviale/programming,ow2-proactive/programming,acontes/scheduling,mnip91/proactive-component-monitoring,fviale/programming,acontes/scheduling,mnip91/proactive-component-monitoring,ow2-proactive/programming,PaulKh/scale-proactive,paraita/programming,jrochas/scale-proactive,PaulKh/scale-proactive,jrochas/scale-proactive,mnip91/programming-multiactivities,acontes/scheduling
/* * ################################################################ * * ProActive: The Java(TM) library for Parallel, Distributed, * Concurrent computing with Security and Mobility * * Copyright (C) 1997-2005 INRIA/University of Nice-Sophia Antipolis * Contact: proactive@objectweb.org * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA * * Initial developer(s): The ProActive Team * http://www.inria.fr/oasis/ProActive/contacts.html * Contributor(s): * * ################################################################ */ package org.objectweb.proactive.core.descriptor.xml; /** * Defines many constants useful across ProActive * * @author ProActive Team * @version 1.0, 2001/10/23 * @since ProActive 0.9 * */ public interface ProActiveDescriptorConstants { public static final String PROACTIVE_DESCRIPTOR_TAG = "ProActiveDescriptor"; public static final String MAIN_DEFINITION_TAG = "mainDefinition"; public static final String VARIABLES_TAG = "variables"; public static final String VARIABLES_DESCRIPTOR_TAG ="DescriptorVariable"; public static final String VARIABLES_PROGRAM_TAG ="ProgramVariable"; public static final String VARIABLES_DESCRIPTOR_DEFAULT_TAG ="DescriptorDefaultVariable"; public static final String VARIABLES_PROGRAM_DEFAULT_TAG ="ProgramDefaultVariable"; public static final String VARIABLES_JAVAPROPERTY_TAG ="JavaPropertyVariable"; public static final String VARIABLES_INCLUDE_XML_FILE_TAG ="IncludeXMLFile"; public static final String VARIABLES_INCLUDE_PROPERTY_FILE_TAG ="IncludePropertyFile"; public static final String DEPLOYMENT_TAG = "deployment"; public static final String INFRASTRUCTURE_TAG = "infrastructure"; public static final String COMPONENT_DEFINITION_TAG = "componentDefinition"; public static final String ARG_TAG = "arg"; public static final String MAP_TO_VIRTUAL_NODE_TAG = "mapToVirtualNode"; public static final String VIRTUAL_NODES_DEFINITION_TAG = "virtualNodesDefinition"; public static final String VIRTUAL_NODES_ACQUISITION_TAG = "virtualNodesAcquisition"; public static final String VIRTUAL_NODE_TAG = "virtualNode"; public static final String REGISTER_TAG = "register"; public static final String MAPPING_TAG = "mapping"; public static final String MAP_TAG = "map"; public static final String JVMSET_TAG = "jvmSet"; public static final String VMNAME_TAG = "vmName"; public static final String CURRENTJVM_TAG = "currentJVM"; public static final String LOOKUP_TAG = "lookup"; public static final String JVMS_TAG = "jvms"; public static final String JVM_TAG = "jvm"; public static final String ACQUISITION_TAG = "acquisition"; public static final String CREATION_PROCESS_TAG = "creation"; public static final String PROCESS_TAG = "process"; public static final String PROCESS_DEFINITION_TAG = "processDefinition"; public static final String SERVICE_DEFINITION_TAG = "serviceDefinition"; public static final String JVM_PROCESS_TAG = "jvmProcess"; public static final String RSH_PROCESS_TAG = "rshProcess"; public static final String PROCESS_LIST_TAG = "processList"; public static final String PROCESS_LIST_BYHOST_TAG = "processListbyHost"; public static final String MAPRSH_PROCESS_TAG = "maprshProcess"; public static final String SSH_PROCESS_TAG = "sshProcess"; public static final String RLOGIN_PROCESS_TAG = "rloginProcess"; public static final String BSUB_PROCESS_TAG = "bsubProcess"; public static final String GLOBUS_PROCESS_TAG = "globusProcess"; public static final String PRUN_PROCESS_TAG = "prunProcess"; public static final String PBS_PROCESS_TAG = "pbsProcess"; public static final String OAR_PROCESS_TAG = "oarProcess"; public static final String GLITE_PROCESS_TAG = "gLiteProcess"; public static final String DEPENDENT_PROCESS_SEQUENCE_TAG = "dependentProcessSequence"; public static final String SEQUENTIAL_PROCESS_TAG = "independentProcessSequence"; public static final String MPI_PROCESS_TAG = "mpiProcess"; public static final String MPI_PROCESS_OPTIONS_TAG = "mpiOptions"; public static final String MPI_LOCAL_PATH_TAG = "localRelativePath"; public static final String MPI_REMOTE_PATH_TAG = "remoteAbsolutePath"; public static final String NG_PROCESS_TAG = "ngProcess"; public static final String OARGRID_PROCESS_TAG = "oarGridProcess"; public static final String HIERARCHICAL_PROCESS_TAG = "hierarchicalProcess"; public static final String GRID_ENGINE_PROCESS_TAG = "gridEngineProcess"; public static final String GLITE_PROCESS_OPTIONS_TAG = "gLiteOptions"; public static final String GRID_ENGINE_OPTIONS_TAG = "gridEngineOption"; public static final String PROCESSES_TAG = "processes"; public static final String SERVICES_TAG = "services"; public static final String EXTENDED_JVM_TAG = "extendedJvm"; public static final String PROCESS_REFERENCE_TAG = "processReference"; public static final String SERVICE_REFERENCE_TAG = "serviceReference"; public static final String HIERARCHICIAL_REFERENCE_TAG = "hierarchicalReference"; public static final String COMMAND_PATH_TAG = "commandPath"; public static final String ENVIRONMENT_TAG = "environment"; public static final String HOST_LIST_TAG = "hostlist"; public static final String BSUB_OPTIONS_TAG = "bsubOption"; public static final String RES_REQ_TAG = "resourceRequirement"; public static final String SCRIPT_PATH_TAG = "scriptPath"; public static final String GLOBUS_OPTIONS_TAG = "globusOption"; public static final String COUNT_TAG = "count"; public static final String GLOBUS_MAXTIME_TAG = "maxTime"; public static final String PRUN_OPTIONS_TAG = "prunOption"; public static final String PROCESSOR_TAG = "processor"; public static final String HOSTS_NUMBER_TAG = "hostsNumber"; public static final String PROCESSOR_PER_NODE_TAG = "processorPerNode"; public static final String BOOKING_DURATION_TAG = "bookingDuration"; public static final String QUEUE_NAME_TAG = "queueName"; public static final String PARALLEL_ENVIRONMENT_TAG = "parallelEnvironment"; public static final String OUTPUT_FILE = "outputFile"; public static final String ERROR_FILE = "errorFile"; public static final String PBS_OPTIONS_TAG = "pbsOption"; public static final String OAR_OPTIONS_TAG = "oarOption"; public static final String OARGRID_OPTIONS_TAG = "oarGridOption"; public static final String OAR_RESOURCE_TAG = "resources"; public static final String OARGRID_WALLTIME_TAG = "walltime"; public static final String NG_OPTIONS_TAG = "ngOption"; public static final String EXECUTABLE_TAG = "executable"; //public static final String OAR_PROPERTY_TAG="properties"; public static final String VARIABLE_TAG = "variable"; public static final String CLASSPATH_TAG = "classpath"; public static final String BOOT_CLASSPATH_TAG = "bootclasspath"; public static final String JAVA_PATH_TAG = "javaPath"; public static final String POLICY_FILE_TAG = "policyFile"; public static final String LOG4J_FILE_TAG = "log4jpropertiesFile"; public static final String PROACTIVE_PROPS_FILE_TAG = "ProActiveUserPropertiesFile"; public static final String CLASSNAME_TAG = "classname"; public static final String PARAMETERS_TAG = "parameters"; public static final String ABS_PATH_TAG = "absolutePath"; public static final String REL_PATH_TAG = "relativePath"; public static final String GLITE_PATH_TAG = "JDLFilePath"; public static final String GLITE_REMOTE_PATH_TAG = "JDLRemoteFilePath"; public static final String GLITE_ARGUMENTS_TAG = "arguments"; public static final String GLITE_INPUTSANDBOX_TAG = "inputSandbox"; public static final String GLITE_OUTPUTSANDBOX_TAG = "outputSandbox"; public static final String GLITE_ENVIRONMENT_TAG = "environment"; public static final String GLITE_REQUIREMENTS_TAG = "requirements"; public static final String GLITE_RANK_TAG = "rank"; public static final String GLITE_CONFIG_TAG = "configFile"; public static final String GLITE_INPUTDATA_TAG = "inputData"; public static final String JVMPARAMETERS_TAG = "jvmParameters"; public static final String JVMPARAMETER_TAG = "parameter"; public static final String SECURITY_TAG = "security"; public static final String RMI_LOOKUP_TAG = "RMIRegistryLookup"; public static final String P2P_SERVICE_TAG = "P2PService"; public static final String PEERS_SET_TAG = "peerSet"; public static final String PEER_TAG = "peer"; public static final String FT_CONFIG_TAG = "faultTolerance"; public static final String FT_CKPTSERVER_TAG = "checkpointServer"; public static final String FT_RECPROCESS_TAG = "recoveryProcess"; public static final String FT_LOCSERVER_TAG = "locationServer"; public static final String FT_RESSERVER_TAG = "resourceServer"; public static final String FT_GLOBALSERVER_TAG = "globalServer"; public static final String FT_TTCVALUE_TAG = "ttc"; public static final String FT_PROTO_TAG = "protocol"; public static final String UNICORE_PROCESS_TAG = "unicoreProcess"; public static final String UNICORE_OPTIONS_TAG = "unicoreOption"; public static final String UNICORE_DIR_PATH_TAG = "unicoreDirPath"; public static final String UNICORE_KEYFILE_PATH_TAG = "keyFilePath"; public static final String UNICORE_USITE_TAG = "usite"; public static final String UNICORE_VSITE_TAG = "vsite"; public static final String FILE_TRANSFER_DEFINITIONS_TAG = "FileTransferDefinitions"; public static final String FILE_TRANSFER_TAG = "FileTransfer"; public static final String FILE_TRANSFER_FILE_TAG = "file"; public static final String FILE_TRANSFER_DIR_TAG = "dir"; public static final String FILE_TRANSFER_DEPLOY_TAG = "FileTransferDeploy"; public static final String FILE_TRANSFER_RETRIEVE_TAG = "FileTransferRetrieve"; public static final String FILE_TRANSFER_COPY_PROTOCOL_TAG = "copyProtocol"; public static final String FILE_TRANSFER_SRC_INFO_TAG = "sourceInfo"; public static final String FILE_TRANSFER_DST_INFO_TAG = "destinationInfo"; public static final String FILE_TRANSFER_IMPLICT_KEYWORD = "implicit"; }
src/org/objectweb/proactive/core/descriptor/xml/ProActiveDescriptorConstants.java
/* * ################################################################ * * ProActive: The Java(TM) library for Parallel, Distributed, * Concurrent computing with Security and Mobility * * Copyright (C) 1997-2005 INRIA/University of Nice-Sophia Antipolis * Contact: proactive@objectweb.org * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA * * Initial developer(s): The ProActive Team * http://www.inria.fr/oasis/ProActive/contacts.html * Contributor(s): * * ################################################################ */ package org.objectweb.proactive.core.descriptor.xml; /** * Defines many constants useful across ProActive * * @author ProActive Team * @version 1.0, 2001/10/23 * @since ProActive 0.9 * */ public interface ProActiveDescriptorConstants { public static final String PROACTIVE_DESCRIPTOR_TAG = "ProActiveDescriptor"; public static final String MAIN_DEFINITION_TAG = "mainDefinition"; public static final String VARIABLES_TAG = "variables"; public static final String VARIABLES_DESCRIPTOR_TAG ="DescriptorVariable"; public static final String VARIABLES_PROGRAM_TAG ="ProgramVariable"; public static final String VARIABLES_DESCRIPTOR_DEFAULT_TAG ="DescriptorDefaultVariable"; public static final String VARIABLES_PROGRAM_DEFAULT_TAG ="ProgramDefaultVariable"; public static final String VARIABLES_JAVAPROPERTY_TAG ="JavaPropertyVariable"; public static final String VARIABLES_INCLUDE_XML_FILE_TAG ="IncludeXMLFile"; public static final String VARIABLES_INCLUDE_PROPERTY_FILE_TAG ="IncludePropertyFile"; public static final String DEPLOYMENT_TAG = "deployment"; public static final String INFRASTRUCTURE_TAG = "infrastructure"; public static final String COMPONENT_DEFINITION_TAG = "componentDefinition"; public static final String ARG_TAG = "arg"; public static final String MAP_TO_VIRTUAL_NODE_TAG = "mapToVirtualNode"; public static final String VIRTUAL_NODES_DEFINITION_TAG = "virtualNodesDefinition"; public static final String VIRTUAL_NODES_ACQUISITION_TAG = "virtualNodesAcquisition"; public static final String VIRTUAL_NODE_TAG = "virtualNode"; public static final String REGISTER_TAG = "register"; public static final String MAPPING_TAG = "mapping"; public static final String MAP_TAG = "map"; public static final String JVMSET_TAG = "jvmSet"; public static final String VMNAME_TAG = "vmName"; public static final String CURRENTJVM_TAG = "currentJVM"; public static final String LOOKUP_TAG = "lookup"; public static final String JVMS_TAG = "jvms"; public static final String JVM_TAG = "jvm"; public static final String ACQUISITION_TAG = "acquisition"; public static final String CREATION_PROCESS_TAG = "creation"; public static final String PROCESS_TAG = "process"; public static final String PROCESS_DEFINITION_TAG = "processDefinition"; public static final String SERVICE_DEFINITION_TAG = "serviceDefinition"; public static final String JVM_PROCESS_TAG = "jvmProcess"; public static final String RSH_PROCESS_TAG = "rshProcess"; public static final String PROCESS_LIST_TAG = "processList"; public static final String PROCESS_LIST_BYHOST_TAG = "processListbyHost"; public static final String MAPRSH_PROCESS_TAG = "maprshProcess"; public static final String SSH_PROCESS_TAG = "sshProcess"; public static final String RLOGIN_PROCESS_TAG = "rloginProcess"; public static final String BSUB_PROCESS_TAG = "bsubProcess"; public static final String GLOBUS_PROCESS_TAG = "globusProcess"; public static final String PRUN_PROCESS_TAG = "prunProcess"; public static final String PBS_PROCESS_TAG = "pbsProcess"; public static final String OAR_PROCESS_TAG = "oarProcess"; public static final String GLITE_PROCESS_TAG = "gLiteProcess"; public static final String NG_PROCESS_TAG = "ngProcess"; public static final String OARGRID_PROCESS_TAG = "oarGridProcess"; public static final String HIERARCHICAL_PROCESS_TAG = "hierarchicalProcess"; public static final String GRID_ENGINE_PROCESS_TAG = "gridEngineProcess"; public static final String GLITE_PROCESS_OPTIONS_TAG = "gLiteOptions"; public static final String GRID_ENGINE_OPTIONS_TAG = "gridEngineOption"; public static final String PROCESSES_TAG = "processes"; public static final String SERVICES_TAG = "services"; public static final String EXTENDED_JVM_TAG = "extendedJvm"; public static final String PROCESS_REFERENCE_TAG = "processReference"; public static final String SERVICE_REFERENCE_TAG = "serviceReference"; public static final String HIERARCHICIAL_REFERENCE_TAG = "hierarchicalReference"; public static final String COMMAND_PATH_TAG = "commandPath"; public static final String ENVIRONMENT_TAG = "environment"; public static final String HOST_LIST_TAG = "hostlist"; public static final String BSUB_OPTIONS_TAG = "bsubOption"; public static final String RES_REQ_TAG = "resourceRequirement"; public static final String SCRIPT_PATH_TAG = "scriptPath"; public static final String GLOBUS_OPTIONS_TAG = "globusOption"; public static final String COUNT_TAG = "count"; public static final String GLOBUS_MAXTIME_TAG = "maxTime"; public static final String PRUN_OPTIONS_TAG = "prunOption"; public static final String PROCESSOR_TAG = "processor"; public static final String HOSTS_NUMBER_TAG = "hostsNumber"; public static final String PROCESSOR_PER_NODE_TAG = "processorPerNode"; public static final String BOOKING_DURATION_TAG = "bookingDuration"; public static final String QUEUE_NAME_TAG = "queueName"; public static final String PARALLEL_ENVIRONMENT_TAG = "parallelEnvironment"; public static final String OUTPUT_FILE = "outputFile"; public static final String ERROR_FILE = "errorFile"; public static final String PBS_OPTIONS_TAG = "pbsOption"; public static final String OAR_OPTIONS_TAG = "oarOption"; public static final String OARGRID_OPTIONS_TAG = "oarGridOption"; public static final String OAR_RESOURCE_TAG = "resources"; public static final String OARGRID_WALLTIME_TAG = "walltime"; public static final String NG_OPTIONS_TAG = "ngOption"; public static final String EXECUTABLE_TAG = "executable"; //public static final String OAR_PROPERTY_TAG="properties"; public static final String VARIABLE_TAG = "variable"; public static final String CLASSPATH_TAG = "classpath"; public static final String BOOT_CLASSPATH_TAG = "bootclasspath"; public static final String JAVA_PATH_TAG = "javaPath"; public static final String POLICY_FILE_TAG = "policyFile"; public static final String LOG4J_FILE_TAG = "log4jpropertiesFile"; public static final String PROACTIVE_PROPS_FILE_TAG = "ProActiveUserPropertiesFile"; public static final String CLASSNAME_TAG = "classname"; public static final String PARAMETERS_TAG = "parameters"; public static final String ABS_PATH_TAG = "absolutePath"; public static final String REL_PATH_TAG = "relativePath"; public static final String GLITE_PATH_TAG = "JDLFilePath"; public static final String GLITE_REMOTE_PATH_TAG = "JDLRemoteFilePath"; public static final String GLITE_ARGUMENTS_TAG = "arguments"; public static final String GLITE_INPUTSANDBOX_TAG = "inputSandbox"; public static final String GLITE_OUTPUTSANDBOX_TAG = "outputSandbox"; public static final String GLITE_ENVIRONMENT_TAG = "environment"; public static final String GLITE_REQUIREMENTS_TAG = "requirements"; public static final String GLITE_RANK_TAG = "rank"; public static final String GLITE_CONFIG_TAG = "configFile"; public static final String GLITE_INPUTDATA_TAG = "inputData"; public static final String JVMPARAMETERS_TAG = "jvmParameters"; public static final String JVMPARAMETER_TAG = "parameter"; public static final String SECURITY_TAG = "security"; public static final String RMI_LOOKUP_TAG = "RMIRegistryLookup"; public static final String P2P_SERVICE_TAG = "P2PService"; public static final String PEERS_SET_TAG = "peerSet"; public static final String PEER_TAG = "peer"; public static final String FT_CONFIG_TAG = "faultTolerance"; public static final String FT_CKPTSERVER_TAG = "checkpointServer"; public static final String FT_RECPROCESS_TAG = "recoveryProcess"; public static final String FT_LOCSERVER_TAG = "locationServer"; public static final String FT_RESSERVER_TAG = "resourceServer"; public static final String FT_GLOBALSERVER_TAG = "globalServer"; public static final String FT_TTCVALUE_TAG = "ttc"; public static final String FT_PROTO_TAG = "protocol"; public static final String UNICORE_PROCESS_TAG = "unicoreProcess"; public static final String UNICORE_OPTIONS_TAG = "unicoreOption"; public static final String UNICORE_DIR_PATH_TAG = "unicoreDirPath"; public static final String UNICORE_KEYFILE_PATH_TAG = "keyFilePath"; public static final String UNICORE_USITE_TAG = "usite"; public static final String UNICORE_VSITE_TAG = "vsite"; public static final String FILE_TRANSFER_DEFINITIONS_TAG = "FileTransferDefinitions"; public static final String FILE_TRANSFER_TAG = "FileTransfer"; public static final String FILE_TRANSFER_FILE_TAG = "file"; public static final String FILE_TRANSFER_DIR_TAG = "dir"; public static final String FILE_TRANSFER_DEPLOY_TAG = "FileTransferDeploy"; public static final String FILE_TRANSFER_RETRIEVE_TAG = "FileTransferRetrieve"; public static final String FILE_TRANSFER_COPY_PROTOCOL_TAG = "copyProtocol"; public static final String FILE_TRANSFER_SRC_INFO_TAG = "sourceInfo"; public static final String FILE_TRANSFER_DST_INFO_TAG = "destinationInfo"; public static final String FILE_TRANSFER_IMPLICT_KEYWORD = "implicit"; }
Add specific tags for dependent, sequential and mpi process git-svn-id: 9146c88ff6d39b48099bf954d15d68f687b3fa69@3049 28e8926c-6b08-0410-baaa-805c5e19b8d6
src/org/objectweb/proactive/core/descriptor/xml/ProActiveDescriptorConstants.java
Add specific tags for dependent, sequential and mpi process
Java
agpl-3.0
c10c1f6447087f31b64924de2d9fcf1e0a4228ef
0
quikkian-ua-devops/will-financials,ua-eas/kfs-devops-automation-fork,quikkian-ua-devops/kfs,bhutchinson/kfs,quikkian-ua-devops/kfs,smith750/kfs,ua-eas/kfs,ua-eas/kfs-devops-automation-fork,ua-eas/kfs,kuali/kfs,ua-eas/kfs,ua-eas/kfs-devops-automation-fork,kuali/kfs,kuali/kfs,quikkian-ua-devops/will-financials,smith750/kfs,kkronenb/kfs,ua-eas/kfs-devops-automation-fork,kkronenb/kfs,quikkian-ua-devops/will-financials,quikkian-ua-devops/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/will-financials,ua-eas/kfs,bhutchinson/kfs,smith750/kfs,UniversityOfHawaii/kfs,quikkian-ua-devops/kfs,ua-eas/kfs-devops-automation-fork,UniversityOfHawaii/kfs,UniversityOfHawaii/kfs,quikkian-ua-devops/will-financials,ua-eas/kfs,kkronenb/kfs,UniversityOfHawaii/kfs,UniversityOfHawaii/kfs,quikkian-ua-devops/will-financials,smith750/kfs,kuali/kfs,quikkian-ua-devops/kfs,kuali/kfs,bhutchinson/kfs,kkronenb/kfs,bhutchinson/kfs
/* * Copyright 2013 The Kuali Foundation. * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.module.tem.util; import java.util.Comparator; import org.apache.commons.lang.StringUtils; import org.kuali.kfs.module.tem.businessobject.TemDistributionAccountingLine; import org.kuali.rice.krad.util.ObjectUtils; /** * */ public class TemDistributionAccountingLineComparator implements Comparator<TemDistributionAccountingLine> { @Override public int compare(TemDistributionAccountingLine dee, TemDistributionAccountingLine dum) { if (ObjectUtils.isNull(dee.getObjectCode()) || StringUtils.isBlank(dee.getObjectCode().getFinancialObjectCodeName())) { if (ObjectUtils.isNull(dum.getObjectCode()) || StringUtils.isBlank(dum.getObjectCode().getFinancialObjectCodeName())) { return 0; // they're both effectively null, so they're equal } return 1; // dee's still empty, it should go to the top } if (ObjectUtils.isNull(dum.getObjectCode()) || StringUtils.isBlank(dum.getObjectCode().getFinancialObjectCodeName())) { return -1; // dum's empty; it should go to the top } return dee.getObjectCode().getFinancialObjectCodeName().compareTo(dum.getObjectCode().getFinancialObjectCodeName()); } }
work/src/org/kuali/kfs/module/tem/util/TemDistributionAccountingLineComparator.java
/* * Copyright 2013 The Kuali Foundation. * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.module.tem.util; import java.util.Comparator; import org.kuali.kfs.module.tem.businessobject.TemDistributionAccountingLine; /** * */ public class TemDistributionAccountingLineComparator implements Comparator<TemDistributionAccountingLine> { @Override public int compare(TemDistributionAccountingLine dee, TemDistributionAccountingLine dum) { return dee.getObjectCode().getFinancialObjectCodeName().compareTo(dum.getObjectCode().getFinancialObjectCodeName()); } }
KFSMI-11083: some null checks for the comparator
work/src/org/kuali/kfs/module/tem/util/TemDistributionAccountingLineComparator.java
KFSMI-11083: some null checks for the comparator
Java
lgpl-2.1
363775b08270e858151eb0bf7b14ddbee7987f0b
0
jfree/jfreechart-fse,oskopek/jfreechart-fse,oskopek/jfreechart-fse,jfree/jfreechart-fse
/* =========================================================== * JFreeChart : a free chart library for the Java(tm) platform * =========================================================== * * (C) Copyright 2000-2014, by Object Refinery Limited and Contributors. * * Project Info: http://www.jfree.org/jfreechart/index.html * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Oracle and Java are registered trademarks of Oracle and/or its affiliates. * Other names may be trademarks of their respective owners.] * * --------- * Plot.java * --------- * (C) Copyright 2000-2014, by Object Refinery Limited and Contributors. * * Original Author: David Gilbert (for Object Refinery Limited); * Contributor(s): Sylvain Vieujot; * Jeremy Bowman; * Andreas Schneider; * Gideon Krause; * Nicolas Brodu; * Michal Krause; * Richard West, Advanced Micro Devices, Inc.; * Peter Kolb - patches 2603321, 2809117; * * Changes * ------- * 21-Jun-2001 : Removed redundant JFreeChart parameter from constructors (DG); * 18-Sep-2001 : Updated header info and fixed DOS encoding problem (DG); * 19-Oct-2001 : Moved series paint and stroke methods from JFreeChart * class (DG); * 23-Oct-2001 : Created renderer for LinePlot class (DG); * 07-Nov-2001 : Changed type names for ChartChangeEvent (DG); * Tidied up some Javadoc comments (DG); * 13-Nov-2001 : Changes to allow for null axes on plots such as PiePlot (DG); * Added plot/axis compatibility checks (DG); * 12-Dec-2001 : Changed constructors to protected, and removed unnecessary * 'throws' clauses (DG); * 13-Dec-2001 : Added tooltips (DG); * 22-Jan-2002 : Added handleClick() method, as part of implementation for * crosshairs (DG); * Moved tooltips reference into ChartInfo class (DG); * 23-Jan-2002 : Added test for null axes in chartChanged() method, thanks * to Barry Evans for the bug report (number 506979 on * SourceForge) (DG); * Added a zoom() method (DG); * 05-Feb-2002 : Updated setBackgroundPaint(), setOutlineStroke() and * setOutlinePaint() to better handle null values, as suggested * by Sylvain Vieujot (DG); * 06-Feb-2002 : Added background image, plus alpha transparency for background * and foreground (DG); * 06-Mar-2002 : Added AxisConstants interface (DG); * 26-Mar-2002 : Changed zoom method from empty to abstract (DG); * 23-Apr-2002 : Moved dataset from JFreeChart class (DG); * 11-May-2002 : Added ShapeFactory interface for getShape() methods, * contributed by Jeremy Bowman (DG); * 28-May-2002 : Fixed bug in setSeriesPaint(int, Paint) for subplots (AS); * 25-Jun-2002 : Removed redundant imports (DG); * 30-Jul-2002 : Added 'no data' message for charts with null or empty * datasets (DG); * 21-Aug-2002 : Added code to extend series array if necessary (refer to * SourceForge bug id 594547 for details) (DG); * 17-Sep-2002 : Fixed bug in getSeriesOutlineStroke() method, reported by * Andreas Schroeder (DG); * 23-Sep-2002 : Added getLegendItems() abstract method (DG); * 24-Sep-2002 : Removed firstSeriesIndex, subplots now use their own paint * settings, there is a new mechanism for the legend to collect * the legend items (DG); * 27-Sep-2002 : Added dataset group (DG); * 14-Oct-2002 : Moved listener storage into EventListenerList. Changed some * abstract methods to empty implementations (DG); * 28-Oct-2002 : Added a getBackgroundImage() method (DG); * 21-Nov-2002 : Added a plot index for identifying subplots in combined and * overlaid charts (DG); * 22-Nov-2002 : Changed all attributes from 'protected' to 'private'. Added * dataAreaRatio attribute from David M O'Donnell's code (DG); * 09-Jan-2003 : Integrated fix for plot border contributed by Gideon * Krause (DG); * 17-Jan-2003 : Moved to com.jrefinery.chart.plot (DG); * 23-Jan-2003 : Removed one constructor (DG); * 26-Mar-2003 : Implemented Serializable (DG); * 14-Jul-2003 : Moved the dataset and secondaryDataset attributes to the * CategoryPlot and XYPlot classes (DG); * 21-Jul-2003 : Moved DrawingSupplier from CategoryPlot and XYPlot up to this * class (DG); * 20-Aug-2003 : Implemented Cloneable (DG); * 11-Sep-2003 : Listeners and clone (NB); * 29-Oct-2003 : Added workaround for font alignment in PDF output (DG); * 03-Dec-2003 : Modified draw method to accept anchor (DG); * 12-Mar-2004 : Fixed clipping bug in drawNoDataMessage() method (DG); * 07-Apr-2004 : Modified string bounds calculation (DG); * 04-Nov-2004 : Added default shapes for legend items (DG); * 25-Nov-2004 : Some changes to the clone() method implementation (DG); * 23-Feb-2005 : Implemented new LegendItemSource interface (and also * PublicCloneable) (DG); * 21-Apr-2005 : Replaced Insets with RectangleInsets (DG); * 05-May-2005 : Removed unused draw() method (DG); * 06-Jun-2005 : Fixed bugs in equals() method (DG); * 01-Sep-2005 : Moved dataAreaRatio from here to ContourPlot (DG); * ------------- JFREECHART 1.0.x --------------------------------------------- * 30-Jun-2006 : Added background image alpha - see bug report 1514904 (DG); * 05-Sep-2006 : Implemented the MarkerChangeListener interface (DG); * 11-Jan-2007 : Added some argument checks, event notifications, and many * API doc updates (DG); * 03-Apr-2007 : Made drawBackgroundImage() public (DG); * 07-Jun-2007 : Added new fillBackground() method to handle GradientPaint * taking into account orientation (DG); * 25-Mar-2008 : Added fireChangeEvent() method - see patch 1914411 (DG); * 15-Aug-2008 : Added setDrawingSupplier() method with notify flag (DG); * 13-Jan-2009 : Added notify flag (DG); * 19-Mar-2009 : Added entity support - see patch 2603321 by Peter Kolb (DG); * 24-Jun-2009 : Implemented AnnotationChangeListener (see patch 2809117 by * PK) (DG); * 13-Jul-2009 : Plot background image should be clipped if necessary (DG); * 10-Mar-2014 : Remove LegendItemCollection (DG); * */ package org.jfree.chart.plot; import java.awt.AlphaComposite; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Composite; import java.awt.Font; import java.awt.Graphics2D; import java.awt.Image; import java.awt.Paint; import java.awt.Shape; import java.awt.Stroke; import java.awt.geom.Ellipse2D; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import javax.swing.event.EventListenerList; import org.jfree.chart.JFreeChart; import org.jfree.chart.LegendItem; import org.jfree.chart.LegendItemSource; import org.jfree.chart.annotations.Annotation; import org.jfree.chart.axis.AxisLocation; import org.jfree.chart.drawable.BorderPainter; import org.jfree.chart.drawable.ColorPainter; import org.jfree.chart.drawable.Drawable; import org.jfree.chart.entity.EntityCollection; import org.jfree.chart.entity.PlotEntity; import org.jfree.chart.event.AnnotationChangeEvent; import org.jfree.chart.event.AnnotationChangeListener; import org.jfree.chart.event.AxisChangeEvent; import org.jfree.chart.event.AxisChangeListener; import org.jfree.chart.event.ChartChangeEventType; import org.jfree.chart.event.MarkerChangeEvent; import org.jfree.chart.event.MarkerChangeListener; import org.jfree.chart.event.PlotChangeEvent; import org.jfree.chart.event.PlotChangeListener; import org.jfree.chart.text.G2TextMeasurer; import org.jfree.chart.text.TextBlock; import org.jfree.chart.text.TextBlockAnchor; import org.jfree.chart.text.TextUtilities; import org.jfree.chart.ui.Align; import org.jfree.chart.ui.RectangleEdge; import org.jfree.chart.ui.RectangleInsets; import org.jfree.chart.util.ObjectUtils; import org.jfree.chart.util.PaintUtils; import org.jfree.chart.util.PublicCloneable; import org.jfree.chart.util.SerialUtils; import org.jfree.data.general.DatasetChangeEvent; import org.jfree.data.general.DatasetChangeListener; import org.jfree.data.general.LabelChangeEvent; import org.jfree.data.general.LabelChangeListener; import org.jfree.data.general.SelectionChangeEvent; import org.jfree.data.general.SelectionChangeListener; /** * The base class for all plots in JFreeChart. The {@link JFreeChart} class * delegates the drawing of axes and data to the plot. This base class * provides facilities common to most plot types. */ public abstract class Plot implements AxisChangeListener, DatasetChangeListener, SelectionChangeListener, LabelChangeListener, AnnotationChangeListener, MarkerChangeListener, LegendItemSource, PublicCloneable, Cloneable, Serializable { /** For serialization. */ private static final long serialVersionUID = -8831571430103671324L; /** Useful constant representing zero. */ public static final Number ZERO = 0; /** The default insets. */ public static final RectangleInsets DEFAULT_INSETS = new RectangleInsets(4.0, 8.0, 4.0, 8.0); /** The default outline stroke. */ public static final Stroke DEFAULT_OUTLINE_STROKE = new BasicStroke(0.5f, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND); /** The default outline color. */ public static final Color DEFAULT_OUTLINE_COLOR = Color.GRAY; /** The default foreground alpha transparency. */ public static final float DEFAULT_FOREGROUND_ALPHA = 1.0f; /** The default background alpha transparency. */ public static final float DEFAULT_BACKGROUND_ALPHA = 1.0f; /** The minimum width at which the plot should be drawn. */ public static final int MINIMUM_WIDTH_TO_DRAW = 10; /** The minimum height at which the plot should be drawn. */ public static final int MINIMUM_HEIGHT_TO_DRAW = 10; /** A default box shape for legend items. */ public static final Shape DEFAULT_LEGEND_ITEM_BOX = new Rectangle2D.Double(-4.0, -4.0, 8.0, 8.0); /** A default circle shape for legend items. */ public static final Shape DEFAULT_LEGEND_ITEM_CIRCLE = new Ellipse2D.Double(-4.0, -4.0, 8.0, 8.0); /** The parent plot (<code>null</code> if this is the root plot). */ private Plot parent; /** The message to display if no data is available. */ private String noDataMessage; /** The font used to display the 'no data' message. */ private Font noDataMessageFont; /** The paint used to draw the 'no data' message. */ private transient Paint noDataMessagePaint; /** Amount of blank space around the plot area. */ private RectangleInsets insets; private Drawable borderPainter; /** An optional painter used to fill the plot background. */ private Drawable backgroundPainter; /** An optional image for the plot background. */ private transient Image backgroundImage; // not currently serialized /** The alignment for the background image. */ private int backgroundImageAlignment = Align.FIT; /** The alpha value used to draw the background image. */ private float backgroundImageAlpha = 0.5f; /** The alpha-transparency for the plot. */ private float foregroundAlpha; /** The alpha transparency for the background paint. */ private float backgroundAlpha; /** The drawing supplier. */ private DrawingSupplier drawingSupplier; /** Storage for registered change listeners. */ private transient EventListenerList listenerList; /** * A flag that controls whether or not the plot will notify listeners * of changes (defaults to true, but sometimes it is useful to disable * this). * * @since 1.0.13 */ private boolean notify; /** * Creates a new plot. */ protected Plot() { this.parent = null; this.insets = DEFAULT_INSETS; this.backgroundPainter = new ColorPainter(Color.WHITE); this.backgroundAlpha = DEFAULT_BACKGROUND_ALPHA; this.backgroundImage = null; this.borderPainter = new BorderPainter(Color.GRAY, DEFAULT_OUTLINE_STROKE); this.foregroundAlpha = DEFAULT_FOREGROUND_ALPHA; this.noDataMessage = null; this.noDataMessageFont = new Font("SansSerif", Font.PLAIN, 12); this.noDataMessagePaint = Color.BLACK; this.drawingSupplier = new DefaultDrawingSupplier(); this.notify = true; this.listenerList = new EventListenerList(); } /** * Returns the string that is displayed when the dataset is empty or * <code>null</code>. * * @return The 'no data' message (<code>null</code> possible). * * @see #setNoDataMessage(String) * @see #getNoDataMessageFont() * @see #getNoDataMessagePaint() */ public String getNoDataMessage() { return this.noDataMessage; } /** * Sets the message that is displayed when the dataset is empty or * <code>null</code>, and sends a {@link PlotChangeEvent} to all registered * listeners. * * @param message the message (<code>null</code> permitted). * * @see #getNoDataMessage() */ public void setNoDataMessage(String message) { this.noDataMessage = message; fireChangeEvent(); } /** * Returns the font used to display the 'no data' message. * * @return The font (never <code>null</code>). * * @see #setNoDataMessageFont(Font) * @see #getNoDataMessage() */ public Font getNoDataMessageFont() { return this.noDataMessageFont; } /** * Sets the font used to display the 'no data' message and sends a * {@link PlotChangeEvent} to all registered listeners. * * @param font the font (<code>null</code> not permitted). * * @see #getNoDataMessageFont() */ public void setNoDataMessageFont(Font font) { if (font == null) { throw new IllegalArgumentException("Null 'font' argument."); } this.noDataMessageFont = font; fireChangeEvent(); } /** * Returns the paint used to display the 'no data' message. * * @return The paint (never <code>null</code>). * * @see #setNoDataMessagePaint(Paint) * @see #getNoDataMessage() */ public Paint getNoDataMessagePaint() { return this.noDataMessagePaint; } /** * Sets the paint used to display the 'no data' message and sends a * {@link PlotChangeEvent} to all registered listeners. * * @param paint the paint (<code>null</code> not permitted). * * @see #getNoDataMessagePaint() */ public void setNoDataMessagePaint(Paint paint) { if (paint == null) { throw new IllegalArgumentException("Null 'paint' argument."); } this.noDataMessagePaint = paint; fireChangeEvent(); } /** * Returns a short string describing the plot type. * <P> * Note: this gets used in the chart property editing user interface, * but there needs to be a better mechanism for identifying the plot type. * * @return A short string describing the plot type (never * <code>null</code>). */ public abstract String getPlotType(); /** * Returns the parent plot (or <code>null</code> if this plot is not part * of a combined plot). * * @return The parent plot. * * @see #setParent(Plot) * @see #getRootPlot() */ public Plot getParent() { return this.parent; } /** * Sets the parent plot. This method is intended for internal use, you * shouldn't need to call it directly. * * @param parent the parent plot (<code>null</code> permitted). * * @see #getParent() */ public void setParent(Plot parent) { this.parent = parent; } /** * Returns the root plot. * * @return The root plot. * * @see #getParent() */ public Plot getRootPlot() { Plot p = getParent(); if (p == null) { return this; } return p.getRootPlot(); } /** * Returns <code>true</code> if this plot is part of a combined plot * structure (that is, {@link #getParent()} returns a non-<code>null</code> * value), and <code>false</code> otherwise. * * @return <code>true</code> if this plot is part of a combined plot * structure. * * @see #getParent() */ public boolean isSubplot() { return (getParent() != null); } /** * Returns the insets for the plot area. * * @return The insets (never <code>null</code>). * * @see #setInsets(RectangleInsets) */ public RectangleInsets getInsets() { return this.insets; } /** * Sets the insets for the plot and sends a {@link PlotChangeEvent} to * all registered listeners. * * @param insets the new insets (<code>null</code> not permitted). * * @see #getInsets() * @see #setInsets(RectangleInsets, boolean) */ public void setInsets(RectangleInsets insets) { setInsets(insets, true); } /** * Returns the background painter. The default value is * <code>new ColorPainter(Color.WHITE)</code>. * * @return The background painter (possibly <code>null</code>). */ public Drawable getBackgroundPainter() { return this.backgroundPainter; } /** * Sets the background painter and sends a change event to all registered * listeners. * * @param painter the new painter (<code>null</code> permitted). */ public void setBackgroundPainter(Drawable painter) { this.backgroundPainter = painter; fireChangeEvent(); } public void setBackgroundColor(Color color) { if (color != null) { setBackgroundPainter(new ColorPainter(color)); } else { setBackgroundPainter(null); } } /** * Sets the insets for the plot and, if requested, and sends a * {@link PlotChangeEvent} to all registered listeners. * * @param insets the new insets (<code>null</code> not permitted). * @param notify a flag that controls whether the registered listeners are * notified. * * @see #getInsets() * @see #setInsets(RectangleInsets) */ public void setInsets(RectangleInsets insets, boolean notify) { if (insets == null) { throw new IllegalArgumentException("Null 'insets' argument."); } if (!this.insets.equals(insets)) { this.insets = insets; if (notify) { fireChangeEvent(); } } } /** * Returns the alpha transparency of the plot area background. * * @return The alpha transparency. * * @see #setBackgroundAlpha(float) */ public float getBackgroundAlpha() { return this.backgroundAlpha; } /** * Sets the alpha transparency of the plot area background, and notifies * registered listeners that the plot has been modified. * * @param alpha the new alpha value (in the range 0.0f to 1.0f). * * @see #getBackgroundAlpha() */ public void setBackgroundAlpha(float alpha) { if (this.backgroundAlpha != alpha) { this.backgroundAlpha = alpha; fireChangeEvent(); } } /** * Returns the drawing supplier for the plot. * * @return The drawing supplier (possibly <code>null</code>). * * @see #setDrawingSupplier(DrawingSupplier) */ public DrawingSupplier getDrawingSupplier() { DrawingSupplier result = null; Plot p = getParent(); if (p != null) { result = p.getDrawingSupplier(); } else { result = this.drawingSupplier; } return result; } /** * Sets the drawing supplier for the plot and sends a * {@link PlotChangeEvent} to all registered listeners. The drawing * supplier is responsible for supplying a limitless (possibly repeating) * sequence of <code>Paint</code>, <code>Stroke</code> and * <code>Shape</code> objects that the plot's renderer(s) can use to * populate its (their) tables. * * @param supplier the new supplier. * * @see #getDrawingSupplier() */ public void setDrawingSupplier(DrawingSupplier supplier) { this.drawingSupplier = supplier; fireChangeEvent(); } /** * Sets the drawing supplier for the plot and, if requested, sends a * {@link PlotChangeEvent} to all registered listeners. The drawing * supplier is responsible for supplying a limitless (possibly repeating) * sequence of <code>Paint</code>, <code>Stroke</code> and * <code>Shape</code> objects that the plot's renderer(s) can use to * populate its (their) tables. * * @param supplier the new supplier. * @param notify notify listeners? * * @see #getDrawingSupplier() * * @since 1.0.11 */ public void setDrawingSupplier(DrawingSupplier supplier, boolean notify) { this.drawingSupplier = supplier; if (notify) { fireChangeEvent(); } } /** * Returns the background image that is used to fill the plot's background * area. * * @return The image (possibly <code>null</code>). * * @see #setBackgroundImage(Image) */ public Image getBackgroundImage() { return this.backgroundImage; } /** * Sets the background image for the plot and sends a * {@link PlotChangeEvent} to all registered listeners. * * @param image the image (<code>null</code> permitted). * * @see #getBackgroundImage() */ public void setBackgroundImage(Image image) { this.backgroundImage = image; fireChangeEvent(); } /** * Returns the background image alignment. Alignment constants are defined * in the <code>org.jfree.ui.Align</code> class in the JCommon class * library. * * @return The alignment. * * @see #setBackgroundImageAlignment(int) */ public int getBackgroundImageAlignment() { return this.backgroundImageAlignment; } /** * Sets the alignment for the background image and sends a * {@link PlotChangeEvent} to all registered listeners. Alignment options * are defined by the {@link org.jfree.ui.Align} class in the JCommon * class library. * * @param alignment the alignment. * * @see #getBackgroundImageAlignment() */ public void setBackgroundImageAlignment(int alignment) { if (this.backgroundImageAlignment != alignment) { this.backgroundImageAlignment = alignment; fireChangeEvent(); } } /** * Returns the alpha transparency used to draw the background image. This * is a value in the range 0.0f to 1.0f, where 0.0f is fully transparent * and 1.0f is fully opaque. * * @return The alpha transparency. * * @see #setBackgroundImageAlpha(float) */ public float getBackgroundImageAlpha() { return this.backgroundImageAlpha; } /** * Sets the alpha transparency used when drawing the background image. * * @param alpha the alpha transparency (in the range 0.0f to 1.0f, where * 0.0f is fully transparent, and 1.0f is fully opaque). * * @throws IllegalArgumentException if <code>alpha</code> is not within * the specified range. * * @see #getBackgroundImageAlpha() */ public void setBackgroundImageAlpha(float alpha) { if (alpha < 0.0f || alpha > 1.0f) { throw new IllegalArgumentException( "The 'alpha' value must be in the range 0.0f to 1.0f."); } if (this.backgroundImageAlpha != alpha) { this.backgroundImageAlpha = alpha; fireChangeEvent(); } } public Drawable getBorderPainter() { return this.borderPainter; } public void setBorderPainter(Drawable painter) { this.borderPainter = painter; fireChangeEvent(); } /** * Returns the alpha-transparency for the plot foreground. * * @return The alpha-transparency. * * @see #setForegroundAlpha(float) */ public float getForegroundAlpha() { return this.foregroundAlpha; } /** * Sets the alpha-transparency for the plot and sends a * {@link PlotChangeEvent} to all registered listeners. * * @param alpha the new alpha transparency. * * @see #getForegroundAlpha() */ public void setForegroundAlpha(float alpha) { if (this.foregroundAlpha != alpha) { this.foregroundAlpha = alpha; fireChangeEvent(); } } /** * Returns the legend items for the plot. By default, this method returns * <code>null</code>. Subclasses should override to return a list of * legend items for the plot. * * @return The legend items for the plot (possibly empty, but never * <code>null</code>). */ @Override public List<LegendItem> getLegendItems() { return new ArrayList<LegendItem>(); } /** * Returns a flag that controls whether or not change events are sent to * registered listeners. * * @return A boolean. * * @see #setNotify(boolean) * * @since 1.0.13 */ public boolean isNotify() { return this.notify; } /** * Sets a flag that controls whether or not listeners receive * {@link PlotChangeEvent} notifications. * * @param notify a boolean. * * @see #isNotify() * * @since 1.0.13 */ public void setNotify(boolean notify) { this.notify = notify; // if the flag is being set to true, there may be queued up changes... if (notify) { notifyListeners(new PlotChangeEvent(this)); } } /** * Registers an object for notification of changes to the plot. * * @param listener the object to be registered. * * @see #removeChangeListener(PlotChangeListener) */ public void addChangeListener(PlotChangeListener listener) { this.listenerList.add(PlotChangeListener.class, listener); } /** * Unregisters an object for notification of changes to the plot. * * @param listener the object to be unregistered. * * @see #addChangeListener(PlotChangeListener) */ public void removeChangeListener(PlotChangeListener listener) { this.listenerList.remove(PlotChangeListener.class, listener); } /** * Notifies all registered listeners that the plot has been modified. * * @param event information about the change event. */ public void notifyListeners(PlotChangeEvent event) { // if the 'notify' flag has been switched to false, we don't notify // the listeners if (!this.notify) { return; } Object[] listeners = this.listenerList.getListenerList(); for (int i = listeners.length - 2; i >= 0; i -= 2) { if (listeners[i] == PlotChangeListener.class) { ((PlotChangeListener) listeners[i + 1]).plotChanged(event); } } } /** * Sends a {@link PlotChangeEvent} to all registered listeners. * * @since 1.0.10 */ protected void fireChangeEvent() { notifyListeners(new PlotChangeEvent(this)); } /** * Draws the plot within the specified area. The anchor is a point on the * chart that is specified externally (for instance, it may be the last * point of the last mouse click performed by the user) - plots can use or * ignore this value as they see fit. * <br><br> * Subclasses need to provide an implementation of this method, obviously. * * @param g2 the graphics device. * @param area the plot area. * @param anchor the anchor point (<code>null</code> permitted). * @param parentState the parent state (if any). * @param info carries back plot rendering info. */ public abstract void draw(Graphics2D g2, Rectangle2D area, Point2D anchor, PlotState parentState, PlotRenderingInfo info); /** * Draws the plot background (the background color and/or image). * <P> * This method will be called during the chart drawing process and is * declared public so that it can be accessed by the renderers used by * certain subclasses. You shouldn't need to call this method directly. * * @param g2 the graphics device. * @param area the area within which the plot should be drawn. */ public void drawBackground(Graphics2D g2, Rectangle2D area) { // some subclasses override this method completely, so don't put // anything here that *must* be done if (this.backgroundPainter != null) { this.backgroundPainter.draw(g2, area); } drawBackgroundImage(g2, area); } /** * Draws the background image (if there is one) aligned within the * specified area. * * @param g2 the graphics device. * @param area the area. * * @see #getBackgroundImage() * @see #getBackgroundImageAlignment() * @see #getBackgroundImageAlpha() */ public void drawBackgroundImage(Graphics2D g2, Rectangle2D area) { if (this.backgroundImage == null) { return; // nothing to do } Composite savedComposite = g2.getComposite(); g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, this.backgroundImageAlpha)); Rectangle2D dest = new Rectangle2D.Double(0.0, 0.0, this.backgroundImage.getWidth(null), this.backgroundImage.getHeight(null)); Align.align(dest, area, this.backgroundImageAlignment); Shape savedClip = g2.getClip(); g2.clip(area); g2.drawImage(this.backgroundImage, (int) dest.getX(), (int) dest.getY(), (int) dest.getWidth() + 1, (int) dest.getHeight() + 1, null); g2.setClip(savedClip); g2.setComposite(savedComposite); } /** * Draws the plot outline. This method will be called during the chart * drawing process and is declared public so that it can be accessed by the * renderers used by certain subclasses. You shouldn't need to call this * method directly. * * @param g2 the graphics device. * @param area the area within which the plot should be drawn. */ public void drawOutline(Graphics2D g2, Rectangle2D area) { // FIXME : rename this drawBorder if (this.borderPainter != null) { this.borderPainter.draw(g2, area); } } /** * Draws a message to state that there is no data to plot. * * @param g2 the graphics device. * @param area the area within which the plot should be drawn. */ protected void drawNoDataMessage(Graphics2D g2, Rectangle2D area) { Shape savedClip = g2.getClip(); g2.clip(area); String message = this.noDataMessage; if (message != null) { g2.setFont(this.noDataMessageFont); g2.setPaint(this.noDataMessagePaint); TextBlock block = TextUtilities.createTextBlock( this.noDataMessage, this.noDataMessageFont, this.noDataMessagePaint, 0.9f * (float) area.getWidth(), new G2TextMeasurer(g2)); block.draw(g2, (float) area.getCenterX(), (float) area.getCenterY(), TextBlockAnchor.CENTER); } g2.setClip(savedClip); } /** * Creates a plot entity that contains a reference to the plot and the * data area as shape. * * @param dataArea the data area used as hot spot for the entity. * @param plotState the plot rendering info containing a reference to the * EntityCollection. * @param toolTip the tool tip (defined in the respective Plot * subclass) (<code>null</code> permitted). * @param urlText the url (defined in the respective Plot subclass) * (<code>null</code> permitted). * * @since 1.0.13 */ protected void createAndAddEntity(Rectangle2D dataArea, PlotRenderingInfo plotState, String toolTip, String urlText) { if (plotState != null && plotState.getOwner() != null) { EntityCollection e = plotState.getOwner().getEntityCollection(); if (e != null) { e.add(new PlotEntity(dataArea, this, toolTip, urlText)); } } } /** * Handles a 'click' on the plot. Since the plot does not maintain any * information about where it has been drawn, the plot rendering info is * supplied as an argument so that the plot dimensions can be determined. * * @param x the x coordinate (in Java2D space). * @param y the y coordinate (in Java2D space). * @param info an object containing information about the dimensions of * the plot. */ public void handleClick(int x, int y, PlotRenderingInfo info) { // provides a 'no action' default } /** * Performs a zoom on the plot. Subclasses should override if zooming is * appropriate for the type of plot. * * @param percent the zoom percentage. */ public void zoom(double percent) { // do nothing by default. } /** * Receives notification of a change to an {@link Annotation} added to * this plot. * * @param event information about the event (not used here). * * @since 1.0.14 */ @Override public void annotationChanged(AnnotationChangeEvent event) { fireChangeEvent(); } /** * Receives notification of a change to one of the plot's axes. * * @param event information about the event (not used here). */ @Override public void axisChanged(AxisChangeEvent event) { fireChangeEvent(); } /** * Receives notification of a change to the plot's dataset. * <P> * The plot reacts by passing on a plot change event to all registered * listeners. * * @param event information about the event (not used here). */ @Override public void datasetChanged(DatasetChangeEvent event) { PlotChangeEvent newEvent = new PlotChangeEvent(this); newEvent.setType(ChartChangeEventType.DATASET_UPDATED); notifyListeners(newEvent); } /** * Receives notification of a change to the selection state of the plot's data * <P> * The plot reacts by passing on a plot change event to all registered * listeners. * * @param event information about the event (not used here). */ @Override public void selectionChanged(SelectionChangeEvent event) { //could be typed but would require typing Plot and its decendents with a DatasetCursor PlotChangeEvent newEvent = new PlotChangeEvent(this); newEvent.setType(ChartChangeEventType.GENERAL); notifyListeners(newEvent); } /** * Receives notification of a change to the label information of the plot's data * <P> * The plot reacts by passing on a plot change event to all registered * listeners. * * @param event information about the event (not used here). */ @Override public void labelChanged(LabelChangeEvent event) { //could be typed but would require typing Plot and its decendents with a DatasetCursor PlotChangeEvent newEvent = new PlotChangeEvent(this); newEvent.setType(ChartChangeEventType.GENERAL); notifyListeners(newEvent); } /** * Receives notification of a change to a marker that is assigned to the * plot. * * @param event the event. * * @since 1.0.3 */ @Override public void markerChanged(MarkerChangeEvent event) { fireChangeEvent(); } /** * Adjusts the supplied x-value. * * @param x the x-value. * @param w1 width 1. * @param w2 width 2. * @param edge the edge (left or right). * * @return The adjusted x-value. */ protected double getRectX(double x, double w1, double w2, RectangleEdge edge) { double result = x; if (edge == RectangleEdge.LEFT) { result = result + w1; } else if (edge == RectangleEdge.RIGHT) { result = result + w2; } return result; } /** * Adjusts the supplied y-value. * * @param y the x-value. * @param h1 height 1. * @param h2 height 2. * @param edge the edge (top or bottom). * * @return The adjusted y-value. */ protected double getRectY(double y, double h1, double h2, RectangleEdge edge) { double result = y; if (edge == RectangleEdge.TOP) { result = result + h1; } else if (edge == RectangleEdge.BOTTOM) { result = result + h2; } return result; } /** * Tests this plot for equality with another object. * * @param obj the object (<code>null</code> permitted). * * @return <code>true</code> or <code>false</code>. */ @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof Plot)) { return false; } Plot that = (Plot) obj; if (!ObjectUtils.equal(this.noDataMessage, that.noDataMessage)) { return false; } if (!ObjectUtils.equal( this.noDataMessageFont, that.noDataMessageFont )) { return false; } if (!PaintUtils.equal(this.noDataMessagePaint, that.noDataMessagePaint)) { return false; } if (!ObjectUtils.equal(this.insets, that.insets)) { return false; } if (!ObjectUtils.equal(this.borderPainter, that.borderPainter)) { return false; } if (!ObjectUtils.equal(this.backgroundPainter, that.backgroundPainter)) { return false; } if (!ObjectUtils.equal(this.backgroundImage, that.backgroundImage)) { return false; } if (this.backgroundImageAlignment != that.backgroundImageAlignment) { return false; } if (this.backgroundImageAlpha != that.backgroundImageAlpha) { return false; } if (this.foregroundAlpha != that.foregroundAlpha) { return false; } if (this.backgroundAlpha != that.backgroundAlpha) { return false; } if (!this.drawingSupplier.equals(that.drawingSupplier)) { return false; } if (this.notify != that.notify) { return false; } return true; } /** * Creates a clone of the plot. * * @return A clone. * * @throws CloneNotSupportedException if some component of the plot does not * support cloning. */ @Override public Object clone() throws CloneNotSupportedException { Plot clone = (Plot) super.clone(); // private Plot parent <-- don't clone the parent plot, but take care // childs in combined plots instead clone.drawingSupplier = ObjectUtils.clone(this.drawingSupplier); clone.listenerList = new EventListenerList(); return clone; } /** * Provides serialization support. * * @param stream the output stream. * * @throws IOException if there is an I/O error. */ private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); SerialUtils.writePaint(this.noDataMessagePaint, stream); //SerialUtils.writeStroke(this.outlineStroke, stream); //SerialUtils.writePaint(this.outlinePaint, stream); // backgroundImage } /** * Provides serialization support. * * @param stream the input stream. * * @throws IOException if there is an I/O error. * @throws ClassNotFoundException if there is a classpath problem. */ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); this.noDataMessagePaint = SerialUtils.readPaint(stream); //this.outlineStroke = SerialUtils.readStroke(stream); //this.outlinePaint = SerialUtils.readPaint(stream); // backgroundImage this.listenerList = new EventListenerList(); } /** * Resolves a domain axis location for a given plot orientation. * * @param location the location (<code>null</code> not permitted). * @param orientation the orientation (<code>null</code> not permitted). * * @return The edge (never <code>null</code>). */ public static RectangleEdge resolveDomainAxisLocation( AxisLocation location, PlotOrientation orientation) { if (location == null) { throw new IllegalArgumentException("Null 'location' argument."); } if (orientation == null) { throw new IllegalArgumentException("Null 'orientation' argument."); } RectangleEdge result = null; if (location == AxisLocation.TOP_OR_RIGHT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.RIGHT; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.TOP; } } else if (location == AxisLocation.TOP_OR_LEFT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.LEFT; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.TOP; } } else if (location == AxisLocation.BOTTOM_OR_RIGHT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.RIGHT; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.BOTTOM; } } else if (location == AxisLocation.BOTTOM_OR_LEFT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.LEFT; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.BOTTOM; } } // the above should cover all the options... if (result == null) { throw new IllegalStateException("resolveDomainAxisLocation()"); } return result; } /** * Resolves a range axis location for a given plot orientation. * * @param location the location (<code>null</code> not permitted). * @param orientation the orientation (<code>null</code> not permitted). * * @return The edge (never <code>null</code>). */ public static RectangleEdge resolveRangeAxisLocation( AxisLocation location, PlotOrientation orientation) { if (location == null) { throw new IllegalArgumentException("Null 'location' argument."); } if (orientation == null) { throw new IllegalArgumentException("Null 'orientation' argument."); } RectangleEdge result = null; if (location == AxisLocation.TOP_OR_RIGHT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.TOP; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.RIGHT; } } else if (location == AxisLocation.TOP_OR_LEFT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.TOP; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.LEFT; } } else if (location == AxisLocation.BOTTOM_OR_RIGHT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.BOTTOM; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.RIGHT; } } else if (location == AxisLocation.BOTTOM_OR_LEFT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.BOTTOM; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.LEFT; } } // the above should cover all the options... if (result == null) { throw new IllegalStateException("resolveRangeAxisLocation()"); } return result; } }
src/main/java/org/jfree/chart/plot/Plot.java
/* =========================================================== * JFreeChart : a free chart library for the Java(tm) platform * =========================================================== * * (C) Copyright 2000-2014, by Object Refinery Limited and Contributors. * * Project Info: http://www.jfree.org/jfreechart/index.html * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Oracle and Java are registered trademarks of Oracle and/or its affiliates. * Other names may be trademarks of their respective owners.] * * --------- * Plot.java * --------- * (C) Copyright 2000-2014, by Object Refinery Limited and Contributors. * * Original Author: David Gilbert (for Object Refinery Limited); * Contributor(s): Sylvain Vieujot; * Jeremy Bowman; * Andreas Schneider; * Gideon Krause; * Nicolas Brodu; * Michal Krause; * Richard West, Advanced Micro Devices, Inc.; * Peter Kolb - patches 2603321, 2809117; * * Changes * ------- * 21-Jun-2001 : Removed redundant JFreeChart parameter from constructors (DG); * 18-Sep-2001 : Updated header info and fixed DOS encoding problem (DG); * 19-Oct-2001 : Moved series paint and stroke methods from JFreeChart * class (DG); * 23-Oct-2001 : Created renderer for LinePlot class (DG); * 07-Nov-2001 : Changed type names for ChartChangeEvent (DG); * Tidied up some Javadoc comments (DG); * 13-Nov-2001 : Changes to allow for null axes on plots such as PiePlot (DG); * Added plot/axis compatibility checks (DG); * 12-Dec-2001 : Changed constructors to protected, and removed unnecessary * 'throws' clauses (DG); * 13-Dec-2001 : Added tooltips (DG); * 22-Jan-2002 : Added handleClick() method, as part of implementation for * crosshairs (DG); * Moved tooltips reference into ChartInfo class (DG); * 23-Jan-2002 : Added test for null axes in chartChanged() method, thanks * to Barry Evans for the bug report (number 506979 on * SourceForge) (DG); * Added a zoom() method (DG); * 05-Feb-2002 : Updated setBackgroundPaint(), setOutlineStroke() and * setOutlinePaint() to better handle null values, as suggested * by Sylvain Vieujot (DG); * 06-Feb-2002 : Added background image, plus alpha transparency for background * and foreground (DG); * 06-Mar-2002 : Added AxisConstants interface (DG); * 26-Mar-2002 : Changed zoom method from empty to abstract (DG); * 23-Apr-2002 : Moved dataset from JFreeChart class (DG); * 11-May-2002 : Added ShapeFactory interface for getShape() methods, * contributed by Jeremy Bowman (DG); * 28-May-2002 : Fixed bug in setSeriesPaint(int, Paint) for subplots (AS); * 25-Jun-2002 : Removed redundant imports (DG); * 30-Jul-2002 : Added 'no data' message for charts with null or empty * datasets (DG); * 21-Aug-2002 : Added code to extend series array if necessary (refer to * SourceForge bug id 594547 for details) (DG); * 17-Sep-2002 : Fixed bug in getSeriesOutlineStroke() method, reported by * Andreas Schroeder (DG); * 23-Sep-2002 : Added getLegendItems() abstract method (DG); * 24-Sep-2002 : Removed firstSeriesIndex, subplots now use their own paint * settings, there is a new mechanism for the legend to collect * the legend items (DG); * 27-Sep-2002 : Added dataset group (DG); * 14-Oct-2002 : Moved listener storage into EventListenerList. Changed some * abstract methods to empty implementations (DG); * 28-Oct-2002 : Added a getBackgroundImage() method (DG); * 21-Nov-2002 : Added a plot index for identifying subplots in combined and * overlaid charts (DG); * 22-Nov-2002 : Changed all attributes from 'protected' to 'private'. Added * dataAreaRatio attribute from David M O'Donnell's code (DG); * 09-Jan-2003 : Integrated fix for plot border contributed by Gideon * Krause (DG); * 17-Jan-2003 : Moved to com.jrefinery.chart.plot (DG); * 23-Jan-2003 : Removed one constructor (DG); * 26-Mar-2003 : Implemented Serializable (DG); * 14-Jul-2003 : Moved the dataset and secondaryDataset attributes to the * CategoryPlot and XYPlot classes (DG); * 21-Jul-2003 : Moved DrawingSupplier from CategoryPlot and XYPlot up to this * class (DG); * 20-Aug-2003 : Implemented Cloneable (DG); * 11-Sep-2003 : Listeners and clone (NB); * 29-Oct-2003 : Added workaround for font alignment in PDF output (DG); * 03-Dec-2003 : Modified draw method to accept anchor (DG); * 12-Mar-2004 : Fixed clipping bug in drawNoDataMessage() method (DG); * 07-Apr-2004 : Modified string bounds calculation (DG); * 04-Nov-2004 : Added default shapes for legend items (DG); * 25-Nov-2004 : Some changes to the clone() method implementation (DG); * 23-Feb-2005 : Implemented new LegendItemSource interface (and also * PublicCloneable) (DG); * 21-Apr-2005 : Replaced Insets with RectangleInsets (DG); * 05-May-2005 : Removed unused draw() method (DG); * 06-Jun-2005 : Fixed bugs in equals() method (DG); * 01-Sep-2005 : Moved dataAreaRatio from here to ContourPlot (DG); * ------------- JFREECHART 1.0.x --------------------------------------------- * 30-Jun-2006 : Added background image alpha - see bug report 1514904 (DG); * 05-Sep-2006 : Implemented the MarkerChangeListener interface (DG); * 11-Jan-2007 : Added some argument checks, event notifications, and many * API doc updates (DG); * 03-Apr-2007 : Made drawBackgroundImage() public (DG); * 07-Jun-2007 : Added new fillBackground() method to handle GradientPaint * taking into account orientation (DG); * 25-Mar-2008 : Added fireChangeEvent() method - see patch 1914411 (DG); * 15-Aug-2008 : Added setDrawingSupplier() method with notify flag (DG); * 13-Jan-2009 : Added notify flag (DG); * 19-Mar-2009 : Added entity support - see patch 2603321 by Peter Kolb (DG); * 24-Jun-2009 : Implemented AnnotationChangeListener (see patch 2809117 by * PK) (DG); * 13-Jul-2009 : Plot background image should be clipped if necessary (DG); * 10-Mar-2014 : Remove LegendItemCollection (DG); * */ package org.jfree.chart.plot; import java.awt.AlphaComposite; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Composite; import java.awt.Font; import java.awt.Graphics2D; import java.awt.Image; import java.awt.Paint; import java.awt.Shape; import java.awt.Stroke; import java.awt.geom.Ellipse2D; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import javax.swing.event.EventListenerList; import org.jfree.chart.JFreeChart; import org.jfree.chart.LegendItem; import org.jfree.chart.LegendItemSource; import org.jfree.chart.annotations.Annotation; import org.jfree.chart.axis.AxisLocation; import org.jfree.chart.drawable.ColorPainter; import org.jfree.chart.drawable.Drawable; import org.jfree.chart.entity.EntityCollection; import org.jfree.chart.entity.PlotEntity; import org.jfree.chart.event.AnnotationChangeEvent; import org.jfree.chart.event.AnnotationChangeListener; import org.jfree.chart.event.AxisChangeEvent; import org.jfree.chart.event.AxisChangeListener; import org.jfree.chart.event.ChartChangeEventType; import org.jfree.chart.event.MarkerChangeEvent; import org.jfree.chart.event.MarkerChangeListener; import org.jfree.chart.event.PlotChangeEvent; import org.jfree.chart.event.PlotChangeListener; import org.jfree.chart.text.G2TextMeasurer; import org.jfree.chart.text.TextBlock; import org.jfree.chart.text.TextBlockAnchor; import org.jfree.chart.text.TextUtilities; import org.jfree.chart.ui.Align; import org.jfree.chart.ui.RectangleEdge; import org.jfree.chart.ui.RectangleInsets; import org.jfree.chart.util.ObjectUtils; import org.jfree.chart.util.PaintUtils; import org.jfree.chart.util.PublicCloneable; import org.jfree.chart.util.SerialUtils; import org.jfree.data.general.DatasetChangeEvent; import org.jfree.data.general.DatasetChangeListener; import org.jfree.data.general.LabelChangeEvent; import org.jfree.data.general.LabelChangeListener; import org.jfree.data.general.SelectionChangeEvent; import org.jfree.data.general.SelectionChangeListener; /** * The base class for all plots in JFreeChart. The {@link JFreeChart} class * delegates the drawing of axes and data to the plot. This base class * provides facilities common to most plot types. */ public abstract class Plot implements AxisChangeListener, DatasetChangeListener, SelectionChangeListener, LabelChangeListener, AnnotationChangeListener, MarkerChangeListener, LegendItemSource, PublicCloneable, Cloneable, Serializable { /** For serialization. */ private static final long serialVersionUID = -8831571430103671324L; /** Useful constant representing zero. */ public static final Number ZERO = 0; /** The default insets. */ public static final RectangleInsets DEFAULT_INSETS = new RectangleInsets(4.0, 8.0, 4.0, 8.0); /** The default outline stroke. */ public static final Stroke DEFAULT_OUTLINE_STROKE = new BasicStroke(0.5f, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND); /** The default outline color. */ public static final Paint DEFAULT_OUTLINE_PAINT = Color.GRAY; /** The default foreground alpha transparency. */ public static final float DEFAULT_FOREGROUND_ALPHA = 1.0f; /** The default background alpha transparency. */ public static final float DEFAULT_BACKGROUND_ALPHA = 1.0f; /** The minimum width at which the plot should be drawn. */ public static final int MINIMUM_WIDTH_TO_DRAW = 10; /** The minimum height at which the plot should be drawn. */ public static final int MINIMUM_HEIGHT_TO_DRAW = 10; /** A default box shape for legend items. */ public static final Shape DEFAULT_LEGEND_ITEM_BOX = new Rectangle2D.Double(-4.0, -4.0, 8.0, 8.0); /** A default circle shape for legend items. */ public static final Shape DEFAULT_LEGEND_ITEM_CIRCLE = new Ellipse2D.Double(-4.0, -4.0, 8.0, 8.0); /** The parent plot (<code>null</code> if this is the root plot). */ private Plot parent; /** The message to display if no data is available. */ private String noDataMessage; /** The font used to display the 'no data' message. */ private Font noDataMessageFont; /** The paint used to draw the 'no data' message. */ private transient Paint noDataMessagePaint; /** Amount of blank space around the plot area. */ private RectangleInsets insets; /** * A flag that controls whether or not the plot outline is drawn. * * @since 1.0.6 */ private boolean outlineVisible; /** The Stroke used to draw an outline around the plot. */ private transient Stroke outlineStroke; /** The Paint used to draw an outline around the plot. */ private transient Paint outlinePaint; /** An optional painter used to fill the plot background. */ private Drawable backgroundPainter; /** An optional image for the plot background. */ private transient Image backgroundImage; // not currently serialized /** The alignment for the background image. */ private int backgroundImageAlignment = Align.FIT; /** The alpha value used to draw the background image. */ private float backgroundImageAlpha = 0.5f; /** The alpha-transparency for the plot. */ private float foregroundAlpha; /** The alpha transparency for the background paint. */ private float backgroundAlpha; /** The drawing supplier. */ private DrawingSupplier drawingSupplier; /** Storage for registered change listeners. */ private transient EventListenerList listenerList; /** * A flag that controls whether or not the plot will notify listeners * of changes (defaults to true, but sometimes it is useful to disable * this). * * @since 1.0.13 */ private boolean notify; /** * Creates a new plot. */ protected Plot() { this.parent = null; this.insets = DEFAULT_INSETS; this.backgroundPainter = new ColorPainter(Color.WHITE); this.backgroundAlpha = DEFAULT_BACKGROUND_ALPHA; this.backgroundImage = null; this.outlineVisible = true; this.outlineStroke = DEFAULT_OUTLINE_STROKE; this.outlinePaint = DEFAULT_OUTLINE_PAINT; this.foregroundAlpha = DEFAULT_FOREGROUND_ALPHA; this.noDataMessage = null; this.noDataMessageFont = new Font("SansSerif", Font.PLAIN, 12); this.noDataMessagePaint = Color.BLACK; this.drawingSupplier = new DefaultDrawingSupplier(); this.notify = true; this.listenerList = new EventListenerList(); } /** * Returns the string that is displayed when the dataset is empty or * <code>null</code>. * * @return The 'no data' message (<code>null</code> possible). * * @see #setNoDataMessage(String) * @see #getNoDataMessageFont() * @see #getNoDataMessagePaint() */ public String getNoDataMessage() { return this.noDataMessage; } /** * Sets the message that is displayed when the dataset is empty or * <code>null</code>, and sends a {@link PlotChangeEvent} to all registered * listeners. * * @param message the message (<code>null</code> permitted). * * @see #getNoDataMessage() */ public void setNoDataMessage(String message) { this.noDataMessage = message; fireChangeEvent(); } /** * Returns the font used to display the 'no data' message. * * @return The font (never <code>null</code>). * * @see #setNoDataMessageFont(Font) * @see #getNoDataMessage() */ public Font getNoDataMessageFont() { return this.noDataMessageFont; } /** * Sets the font used to display the 'no data' message and sends a * {@link PlotChangeEvent} to all registered listeners. * * @param font the font (<code>null</code> not permitted). * * @see #getNoDataMessageFont() */ public void setNoDataMessageFont(Font font) { if (font == null) { throw new IllegalArgumentException("Null 'font' argument."); } this.noDataMessageFont = font; fireChangeEvent(); } /** * Returns the paint used to display the 'no data' message. * * @return The paint (never <code>null</code>). * * @see #setNoDataMessagePaint(Paint) * @see #getNoDataMessage() */ public Paint getNoDataMessagePaint() { return this.noDataMessagePaint; } /** * Sets the paint used to display the 'no data' message and sends a * {@link PlotChangeEvent} to all registered listeners. * * @param paint the paint (<code>null</code> not permitted). * * @see #getNoDataMessagePaint() */ public void setNoDataMessagePaint(Paint paint) { if (paint == null) { throw new IllegalArgumentException("Null 'paint' argument."); } this.noDataMessagePaint = paint; fireChangeEvent(); } /** * Returns a short string describing the plot type. * <P> * Note: this gets used in the chart property editing user interface, * but there needs to be a better mechanism for identifying the plot type. * * @return A short string describing the plot type (never * <code>null</code>). */ public abstract String getPlotType(); /** * Returns the parent plot (or <code>null</code> if this plot is not part * of a combined plot). * * @return The parent plot. * * @see #setParent(Plot) * @see #getRootPlot() */ public Plot getParent() { return this.parent; } /** * Sets the parent plot. This method is intended for internal use, you * shouldn't need to call it directly. * * @param parent the parent plot (<code>null</code> permitted). * * @see #getParent() */ public void setParent(Plot parent) { this.parent = parent; } /** * Returns the root plot. * * @return The root plot. * * @see #getParent() */ public Plot getRootPlot() { Plot p = getParent(); if (p == null) { return this; } return p.getRootPlot(); } /** * Returns <code>true</code> if this plot is part of a combined plot * structure (that is, {@link #getParent()} returns a non-<code>null</code> * value), and <code>false</code> otherwise. * * @return <code>true</code> if this plot is part of a combined plot * structure. * * @see #getParent() */ public boolean isSubplot() { return (getParent() != null); } /** * Returns the insets for the plot area. * * @return The insets (never <code>null</code>). * * @see #setInsets(RectangleInsets) */ public RectangleInsets getInsets() { return this.insets; } /** * Sets the insets for the plot and sends a {@link PlotChangeEvent} to * all registered listeners. * * @param insets the new insets (<code>null</code> not permitted). * * @see #getInsets() * @see #setInsets(RectangleInsets, boolean) */ public void setInsets(RectangleInsets insets) { setInsets(insets, true); } /** * Returns the background painter. The default value is * <code>new ColorPainter(Color.WHITE)</code>. * * @return The background painter (possibly <code>null</code>). */ public Drawable getBackgroundPainter() { return this.backgroundPainter; } /** * Sets the background painter and sends a change event to all registered * listeners. * * @param painter the new painter (<code>null</code> permitted). */ public void setBackgroundPainter(Drawable painter) { this.backgroundPainter = painter; fireChangeEvent(); } public void setBackgroundColor(Color color) { if (color != null) { setBackgroundPainter(new ColorPainter(color)); } else { setBackgroundPainter(null); } } /** * Sets the insets for the plot and, if requested, and sends a * {@link PlotChangeEvent} to all registered listeners. * * @param insets the new insets (<code>null</code> not permitted). * @param notify a flag that controls whether the registered listeners are * notified. * * @see #getInsets() * @see #setInsets(RectangleInsets) */ public void setInsets(RectangleInsets insets, boolean notify) { if (insets == null) { throw new IllegalArgumentException("Null 'insets' argument."); } if (!this.insets.equals(insets)) { this.insets = insets; if (notify) { fireChangeEvent(); } } } /** * Returns the alpha transparency of the plot area background. * * @return The alpha transparency. * * @see #setBackgroundAlpha(float) */ public float getBackgroundAlpha() { return this.backgroundAlpha; } /** * Sets the alpha transparency of the plot area background, and notifies * registered listeners that the plot has been modified. * * @param alpha the new alpha value (in the range 0.0f to 1.0f). * * @see #getBackgroundAlpha() */ public void setBackgroundAlpha(float alpha) { if (this.backgroundAlpha != alpha) { this.backgroundAlpha = alpha; fireChangeEvent(); } } /** * Returns the drawing supplier for the plot. * * @return The drawing supplier (possibly <code>null</code>). * * @see #setDrawingSupplier(DrawingSupplier) */ public DrawingSupplier getDrawingSupplier() { DrawingSupplier result = null; Plot p = getParent(); if (p != null) { result = p.getDrawingSupplier(); } else { result = this.drawingSupplier; } return result; } /** * Sets the drawing supplier for the plot and sends a * {@link PlotChangeEvent} to all registered listeners. The drawing * supplier is responsible for supplying a limitless (possibly repeating) * sequence of <code>Paint</code>, <code>Stroke</code> and * <code>Shape</code> objects that the plot's renderer(s) can use to * populate its (their) tables. * * @param supplier the new supplier. * * @see #getDrawingSupplier() */ public void setDrawingSupplier(DrawingSupplier supplier) { this.drawingSupplier = supplier; fireChangeEvent(); } /** * Sets the drawing supplier for the plot and, if requested, sends a * {@link PlotChangeEvent} to all registered listeners. The drawing * supplier is responsible for supplying a limitless (possibly repeating) * sequence of <code>Paint</code>, <code>Stroke</code> and * <code>Shape</code> objects that the plot's renderer(s) can use to * populate its (their) tables. * * @param supplier the new supplier. * @param notify notify listeners? * * @see #getDrawingSupplier() * * @since 1.0.11 */ public void setDrawingSupplier(DrawingSupplier supplier, boolean notify) { this.drawingSupplier = supplier; if (notify) { fireChangeEvent(); } } /** * Returns the background image that is used to fill the plot's background * area. * * @return The image (possibly <code>null</code>). * * @see #setBackgroundImage(Image) */ public Image getBackgroundImage() { return this.backgroundImage; } /** * Sets the background image for the plot and sends a * {@link PlotChangeEvent} to all registered listeners. * * @param image the image (<code>null</code> permitted). * * @see #getBackgroundImage() */ public void setBackgroundImage(Image image) { this.backgroundImage = image; fireChangeEvent(); } /** * Returns the background image alignment. Alignment constants are defined * in the <code>org.jfree.ui.Align</code> class in the JCommon class * library. * * @return The alignment. * * @see #setBackgroundImageAlignment(int) */ public int getBackgroundImageAlignment() { return this.backgroundImageAlignment; } /** * Sets the alignment for the background image and sends a * {@link PlotChangeEvent} to all registered listeners. Alignment options * are defined by the {@link org.jfree.ui.Align} class in the JCommon * class library. * * @param alignment the alignment. * * @see #getBackgroundImageAlignment() */ public void setBackgroundImageAlignment(int alignment) { if (this.backgroundImageAlignment != alignment) { this.backgroundImageAlignment = alignment; fireChangeEvent(); } } /** * Returns the alpha transparency used to draw the background image. This * is a value in the range 0.0f to 1.0f, where 0.0f is fully transparent * and 1.0f is fully opaque. * * @return The alpha transparency. * * @see #setBackgroundImageAlpha(float) */ public float getBackgroundImageAlpha() { return this.backgroundImageAlpha; } /** * Sets the alpha transparency used when drawing the background image. * * @param alpha the alpha transparency (in the range 0.0f to 1.0f, where * 0.0f is fully transparent, and 1.0f is fully opaque). * * @throws IllegalArgumentException if <code>alpha</code> is not within * the specified range. * * @see #getBackgroundImageAlpha() */ public void setBackgroundImageAlpha(float alpha) { if (alpha < 0.0f || alpha > 1.0f) { throw new IllegalArgumentException( "The 'alpha' value must be in the range 0.0f to 1.0f."); } if (this.backgroundImageAlpha != alpha) { this.backgroundImageAlpha = alpha; fireChangeEvent(); } } /** * Returns the flag that controls whether or not the plot outline is * drawn. The default value is <code>true</code>. Note that for * historical reasons, the plot's outline paint and stroke can take on * <code>null</code> values, in which case the outline will not be drawn * even if this flag is set to <code>true</code>. * * @return The outline visibility flag. * * @since 1.0.6 * * @see #setOutlineVisible(boolean) */ public boolean isOutlineVisible() { return this.outlineVisible; } /** * Sets the flag that controls whether or not the plot's outline is * drawn, and sends a {@link PlotChangeEvent} to all registered listeners. * * @param visible the new flag value. * * @since 1.0.6 * * @see #isOutlineVisible() */ public void setOutlineVisible(boolean visible) { this.outlineVisible = visible; fireChangeEvent(); } /** * Returns the stroke used to outline the plot area. * * @return The stroke (possibly <code>null</code>). * * @see #setOutlineStroke(Stroke) */ public Stroke getOutlineStroke() { return this.outlineStroke; } /** * Sets the stroke used to outline the plot area and sends a * {@link PlotChangeEvent} to all registered listeners. If you set this * attribute to <code>null</code>, no outline will be drawn. * * @param stroke the stroke (<code>null</code> permitted). * * @see #getOutlineStroke() */ public void setOutlineStroke(Stroke stroke) { if (stroke == null) { if (this.outlineStroke != null) { this.outlineStroke = null; fireChangeEvent(); } } else { if (this.outlineStroke != null) { if (this.outlineStroke.equals(stroke)) { return; // nothing to do } } this.outlineStroke = stroke; fireChangeEvent(); } } /** * Returns the color used to draw the outline of the plot area. * * @return The color (possibly <code>null</code>). * * @see #setOutlinePaint(Paint) */ public Paint getOutlinePaint() { return this.outlinePaint; } /** * Sets the paint used to draw the outline of the plot area and sends a * {@link PlotChangeEvent} to all registered listeners. If you set this * attribute to <code>null</code>, no outline will be drawn. * * @param paint the paint (<code>null</code> permitted). * * @see #getOutlinePaint() */ public void setOutlinePaint(Paint paint) { if (paint == null) { if (this.outlinePaint != null) { this.outlinePaint = null; fireChangeEvent(); } } else { if (this.outlinePaint != null) { if (this.outlinePaint.equals(paint)) { return; // nothing to do } } this.outlinePaint = paint; fireChangeEvent(); } } /** * Returns the alpha-transparency for the plot foreground. * * @return The alpha-transparency. * * @see #setForegroundAlpha(float) */ public float getForegroundAlpha() { return this.foregroundAlpha; } /** * Sets the alpha-transparency for the plot and sends a * {@link PlotChangeEvent} to all registered listeners. * * @param alpha the new alpha transparency. * * @see #getForegroundAlpha() */ public void setForegroundAlpha(float alpha) { if (this.foregroundAlpha != alpha) { this.foregroundAlpha = alpha; fireChangeEvent(); } } /** * Returns the legend items for the plot. By default, this method returns * <code>null</code>. Subclasses should override to return a list of * legend items for the plot. * * @return The legend items for the plot (possibly empty, but never * <code>null</code>). */ @Override public List<LegendItem> getLegendItems() { return new ArrayList<LegendItem>(); } /** * Returns a flag that controls whether or not change events are sent to * registered listeners. * * @return A boolean. * * @see #setNotify(boolean) * * @since 1.0.13 */ public boolean isNotify() { return this.notify; } /** * Sets a flag that controls whether or not listeners receive * {@link PlotChangeEvent} notifications. * * @param notify a boolean. * * @see #isNotify() * * @since 1.0.13 */ public void setNotify(boolean notify) { this.notify = notify; // if the flag is being set to true, there may be queued up changes... if (notify) { notifyListeners(new PlotChangeEvent(this)); } } /** * Registers an object for notification of changes to the plot. * * @param listener the object to be registered. * * @see #removeChangeListener(PlotChangeListener) */ public void addChangeListener(PlotChangeListener listener) { this.listenerList.add(PlotChangeListener.class, listener); } /** * Unregisters an object for notification of changes to the plot. * * @param listener the object to be unregistered. * * @see #addChangeListener(PlotChangeListener) */ public void removeChangeListener(PlotChangeListener listener) { this.listenerList.remove(PlotChangeListener.class, listener); } /** * Notifies all registered listeners that the plot has been modified. * * @param event information about the change event. */ public void notifyListeners(PlotChangeEvent event) { // if the 'notify' flag has been switched to false, we don't notify // the listeners if (!this.notify) { return; } Object[] listeners = this.listenerList.getListenerList(); for (int i = listeners.length - 2; i >= 0; i -= 2) { if (listeners[i] == PlotChangeListener.class) { ((PlotChangeListener) listeners[i + 1]).plotChanged(event); } } } /** * Sends a {@link PlotChangeEvent} to all registered listeners. * * @since 1.0.10 */ protected void fireChangeEvent() { notifyListeners(new PlotChangeEvent(this)); } /** * Draws the plot within the specified area. The anchor is a point on the * chart that is specified externally (for instance, it may be the last * point of the last mouse click performed by the user) - plots can use or * ignore this value as they see fit. * <br><br> * Subclasses need to provide an implementation of this method, obviously. * * @param g2 the graphics device. * @param area the plot area. * @param anchor the anchor point (<code>null</code> permitted). * @param parentState the parent state (if any). * @param info carries back plot rendering info. */ public abstract void draw(Graphics2D g2, Rectangle2D area, Point2D anchor, PlotState parentState, PlotRenderingInfo info); /** * Draws the plot background (the background color and/or image). * <P> * This method will be called during the chart drawing process and is * declared public so that it can be accessed by the renderers used by * certain subclasses. You shouldn't need to call this method directly. * * @param g2 the graphics device. * @param area the area within which the plot should be drawn. */ public void drawBackground(Graphics2D g2, Rectangle2D area) { // some subclasses override this method completely, so don't put // anything here that *must* be done if (this.backgroundPainter != null) { this.backgroundPainter.draw(g2, area); } drawBackgroundImage(g2, area); } /** * Draws the background image (if there is one) aligned within the * specified area. * * @param g2 the graphics device. * @param area the area. * * @see #getBackgroundImage() * @see #getBackgroundImageAlignment() * @see #getBackgroundImageAlpha() */ public void drawBackgroundImage(Graphics2D g2, Rectangle2D area) { if (this.backgroundImage == null) { return; // nothing to do } Composite savedComposite = g2.getComposite(); g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, this.backgroundImageAlpha)); Rectangle2D dest = new Rectangle2D.Double(0.0, 0.0, this.backgroundImage.getWidth(null), this.backgroundImage.getHeight(null)); Align.align(dest, area, this.backgroundImageAlignment); Shape savedClip = g2.getClip(); g2.clip(area); g2.drawImage(this.backgroundImage, (int) dest.getX(), (int) dest.getY(), (int) dest.getWidth() + 1, (int) dest.getHeight() + 1, null); g2.setClip(savedClip); g2.setComposite(savedComposite); } /** * Draws the plot outline. This method will be called during the chart * drawing process and is declared public so that it can be accessed by the * renderers used by certain subclasses. You shouldn't need to call this * method directly. * * @param g2 the graphics device. * @param area the area within which the plot should be drawn. */ public void drawOutline(Graphics2D g2, Rectangle2D area) { if (!this.outlineVisible) { return; } if ((this.outlineStroke != null) && (this.outlinePaint != null)) { g2.setStroke(this.outlineStroke); g2.setPaint(this.outlinePaint); g2.draw(area); } } /** * Draws a message to state that there is no data to plot. * * @param g2 the graphics device. * @param area the area within which the plot should be drawn. */ protected void drawNoDataMessage(Graphics2D g2, Rectangle2D area) { Shape savedClip = g2.getClip(); g2.clip(area); String message = this.noDataMessage; if (message != null) { g2.setFont(this.noDataMessageFont); g2.setPaint(this.noDataMessagePaint); TextBlock block = TextUtilities.createTextBlock( this.noDataMessage, this.noDataMessageFont, this.noDataMessagePaint, 0.9f * (float) area.getWidth(), new G2TextMeasurer(g2)); block.draw(g2, (float) area.getCenterX(), (float) area.getCenterY(), TextBlockAnchor.CENTER); } g2.setClip(savedClip); } /** * Creates a plot entity that contains a reference to the plot and the * data area as shape. * * @param dataArea the data area used as hot spot for the entity. * @param plotState the plot rendering info containing a reference to the * EntityCollection. * @param toolTip the tool tip (defined in the respective Plot * subclass) (<code>null</code> permitted). * @param urlText the url (defined in the respective Plot subclass) * (<code>null</code> permitted). * * @since 1.0.13 */ protected void createAndAddEntity(Rectangle2D dataArea, PlotRenderingInfo plotState, String toolTip, String urlText) { if (plotState != null && plotState.getOwner() != null) { EntityCollection e = plotState.getOwner().getEntityCollection(); if (e != null) { e.add(new PlotEntity(dataArea, this, toolTip, urlText)); } } } /** * Handles a 'click' on the plot. Since the plot does not maintain any * information about where it has been drawn, the plot rendering info is * supplied as an argument so that the plot dimensions can be determined. * * @param x the x coordinate (in Java2D space). * @param y the y coordinate (in Java2D space). * @param info an object containing information about the dimensions of * the plot. */ public void handleClick(int x, int y, PlotRenderingInfo info) { // provides a 'no action' default } /** * Performs a zoom on the plot. Subclasses should override if zooming is * appropriate for the type of plot. * * @param percent the zoom percentage. */ public void zoom(double percent) { // do nothing by default. } /** * Receives notification of a change to an {@link Annotation} added to * this plot. * * @param event information about the event (not used here). * * @since 1.0.14 */ @Override public void annotationChanged(AnnotationChangeEvent event) { fireChangeEvent(); } /** * Receives notification of a change to one of the plot's axes. * * @param event information about the event (not used here). */ @Override public void axisChanged(AxisChangeEvent event) { fireChangeEvent(); } /** * Receives notification of a change to the plot's dataset. * <P> * The plot reacts by passing on a plot change event to all registered * listeners. * * @param event information about the event (not used here). */ @Override public void datasetChanged(DatasetChangeEvent event) { PlotChangeEvent newEvent = new PlotChangeEvent(this); newEvent.setType(ChartChangeEventType.DATASET_UPDATED); notifyListeners(newEvent); } /** * Receives notification of a change to the selection state of the plot's data * <P> * The plot reacts by passing on a plot change event to all registered * listeners. * * @param event information about the event (not used here). */ @Override public void selectionChanged(SelectionChangeEvent event) { //could be typed but would require typing Plot and its decendents with a DatasetCursor PlotChangeEvent newEvent = new PlotChangeEvent(this); newEvent.setType(ChartChangeEventType.GENERAL); notifyListeners(newEvent); } /** * Receives notification of a change to the label information of the plot's data * <P> * The plot reacts by passing on a plot change event to all registered * listeners. * * @param event information about the event (not used here). */ @Override public void labelChanged(LabelChangeEvent event) { //could be typed but would require typing Plot and its decendents with a DatasetCursor PlotChangeEvent newEvent = new PlotChangeEvent(this); newEvent.setType(ChartChangeEventType.GENERAL); notifyListeners(newEvent); } /** * Receives notification of a change to a marker that is assigned to the * plot. * * @param event the event. * * @since 1.0.3 */ @Override public void markerChanged(MarkerChangeEvent event) { fireChangeEvent(); } /** * Adjusts the supplied x-value. * * @param x the x-value. * @param w1 width 1. * @param w2 width 2. * @param edge the edge (left or right). * * @return The adjusted x-value. */ protected double getRectX(double x, double w1, double w2, RectangleEdge edge) { double result = x; if (edge == RectangleEdge.LEFT) { result = result + w1; } else if (edge == RectangleEdge.RIGHT) { result = result + w2; } return result; } /** * Adjusts the supplied y-value. * * @param y the x-value. * @param h1 height 1. * @param h2 height 2. * @param edge the edge (top or bottom). * * @return The adjusted y-value. */ protected double getRectY(double y, double h1, double h2, RectangleEdge edge) { double result = y; if (edge == RectangleEdge.TOP) { result = result + h1; } else if (edge == RectangleEdge.BOTTOM) { result = result + h2; } return result; } /** * Tests this plot for equality with another object. * * @param obj the object (<code>null</code> permitted). * * @return <code>true</code> or <code>false</code>. */ @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof Plot)) { return false; } Plot that = (Plot) obj; if (!ObjectUtils.equal(this.noDataMessage, that.noDataMessage)) { return false; } if (!ObjectUtils.equal( this.noDataMessageFont, that.noDataMessageFont )) { return false; } if (!PaintUtils.equal(this.noDataMessagePaint, that.noDataMessagePaint)) { return false; } if (!ObjectUtils.equal(this.insets, that.insets)) { return false; } if (this.outlineVisible != that.outlineVisible) { return false; } if (!ObjectUtils.equal(this.outlineStroke, that.outlineStroke)) { return false; } if (!PaintUtils.equal(this.outlinePaint, that.outlinePaint)) { return false; } if (!ObjectUtils.equal(this.backgroundPainter, that.backgroundPainter)) { return false; } if (!ObjectUtils.equal(this.backgroundImage, that.backgroundImage)) { return false; } if (this.backgroundImageAlignment != that.backgroundImageAlignment) { return false; } if (this.backgroundImageAlpha != that.backgroundImageAlpha) { return false; } if (this.foregroundAlpha != that.foregroundAlpha) { return false; } if (this.backgroundAlpha != that.backgroundAlpha) { return false; } if (!this.drawingSupplier.equals(that.drawingSupplier)) { return false; } if (this.notify != that.notify) { return false; } return true; } /** * Creates a clone of the plot. * * @return A clone. * * @throws CloneNotSupportedException if some component of the plot does not * support cloning. */ @Override public Object clone() throws CloneNotSupportedException { Plot clone = (Plot) super.clone(); // private Plot parent <-- don't clone the parent plot, but take care // childs in combined plots instead clone.drawingSupplier = ObjectUtils.clone(this.drawingSupplier); clone.listenerList = new EventListenerList(); return clone; } /** * Provides serialization support. * * @param stream the output stream. * * @throws IOException if there is an I/O error. */ private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); SerialUtils.writePaint(this.noDataMessagePaint, stream); SerialUtils.writeStroke(this.outlineStroke, stream); SerialUtils.writePaint(this.outlinePaint, stream); // backgroundImage } /** * Provides serialization support. * * @param stream the input stream. * * @throws IOException if there is an I/O error. * @throws ClassNotFoundException if there is a classpath problem. */ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); this.noDataMessagePaint = SerialUtils.readPaint(stream); this.outlineStroke = SerialUtils.readStroke(stream); this.outlinePaint = SerialUtils.readPaint(stream); // backgroundImage this.listenerList = new EventListenerList(); } /** * Resolves a domain axis location for a given plot orientation. * * @param location the location (<code>null</code> not permitted). * @param orientation the orientation (<code>null</code> not permitted). * * @return The edge (never <code>null</code>). */ public static RectangleEdge resolveDomainAxisLocation( AxisLocation location, PlotOrientation orientation) { if (location == null) { throw new IllegalArgumentException("Null 'location' argument."); } if (orientation == null) { throw new IllegalArgumentException("Null 'orientation' argument."); } RectangleEdge result = null; if (location == AxisLocation.TOP_OR_RIGHT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.RIGHT; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.TOP; } } else if (location == AxisLocation.TOP_OR_LEFT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.LEFT; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.TOP; } } else if (location == AxisLocation.BOTTOM_OR_RIGHT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.RIGHT; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.BOTTOM; } } else if (location == AxisLocation.BOTTOM_OR_LEFT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.LEFT; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.BOTTOM; } } // the above should cover all the options... if (result == null) { throw new IllegalStateException("resolveDomainAxisLocation()"); } return result; } /** * Resolves a range axis location for a given plot orientation. * * @param location the location (<code>null</code> not permitted). * @param orientation the orientation (<code>null</code> not permitted). * * @return The edge (never <code>null</code>). */ public static RectangleEdge resolveRangeAxisLocation( AxisLocation location, PlotOrientation orientation) { if (location == null) { throw new IllegalArgumentException("Null 'location' argument."); } if (orientation == null) { throw new IllegalArgumentException("Null 'orientation' argument."); } RectangleEdge result = null; if (location == AxisLocation.TOP_OR_RIGHT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.TOP; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.RIGHT; } } else if (location == AxisLocation.TOP_OR_LEFT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.TOP; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.LEFT; } } else if (location == AxisLocation.BOTTOM_OR_RIGHT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.BOTTOM; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.RIGHT; } } else if (location == AxisLocation.BOTTOM_OR_LEFT) { if (orientation == PlotOrientation.HORIZONTAL) { result = RectangleEdge.BOTTOM; } else if (orientation == PlotOrientation.VERTICAL) { result = RectangleEdge.LEFT; } } // the above should cover all the options... if (result == null) { throw new IllegalStateException("resolveRangeAxisLocation()"); } return result; } }
Add border painter.
src/main/java/org/jfree/chart/plot/Plot.java
Add border painter.
Java
apache-2.0
3f6662ec0e4c8a38cb86edb4656cdae24a61a03c
0
taxbeans/taxbeans
package com.github.taxbeans.model; import java.math.BigDecimal; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.List; import java.util.Stack; import java.util.UUID; import javax.money.CurrencyUnit; import javax.money.Monetary; import com.github.taxbeans.currency.ExhangeRateUtils; public class AccountEntry implements Comparable<AccountEntry> { private String cryptoAddress; public AccountEntry(UUID uuid, Account account, BigDecimal amount, AccountSide accountSide, Transaction transaction, String description, BigDecimal commodityUnits, String commodityName, CurrencyUnit currency, Stack<CurrencyTranslation> currencyTranslations) { super(); this.uuid = uuid; this.account = account; this.account.addEntry(this); this.amount = amount; this.accountSide = accountSide == null ? AccountSide.BALANCE_EFFECT : accountSide; this.transaction = transaction; this.description = description; this.commodityUnits = commodityUnits; this.commodityName = commodityName; this.currency = currency; //no need to build the stack, do not include in builder when generated next //this.currencyTranslations = currencyTranslations; } public AccountEntry() { super(); uuid = UUID.randomUUID(); } public UUID getUuid() { return uuid; } public static List<Transaction> adaptToMergedTransactionList(List<AccountEntry> transactionSplits, Account debitAccount, Account creditAccount) { List<Transaction> transactionList = new ArrayList<Transaction>(); for (AccountEntry transactionSplit : transactionSplits) { transactionList.add(transactionSplit.adaptToMergedTransaction(debitAccount, creditAccount)); } return transactionList; } private UUID uuid; private Account account; // temporary value used for extracting out the trade fee entry from trades // could have used a separate Trade object private BigDecimal tradeFee; public BigDecimal getTradeFee() { return tradeFee; } public void setTradeFee(BigDecimal tradeFee) { this.tradeFee = tradeFee; } private BigDecimal amount = BigDecimal.ZERO; // Whether this entry is a debit or credit private AccountSide accountSide = AccountSide.BALANCE_EFFECT; /** * the outer transaction */ private Transaction transaction; private String description; private BigDecimal commodityUnits = BigDecimal.ZERO; private String commodityName; private CurrencyUnit currency = Monetary.getCurrency("NZD"); public CurrencyUnit getCurrency() { return currency; } public void setCurrency(CurrencyUnit currency) { this.currency = currency; } private Stack<CurrencyTranslation> currencyTranslations = new Stack<CurrencyTranslation>(); public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } private Transaction adaptToMergedTransaction(Account debitAccount, Account creditAccount) { Transaction tx = this.transaction.cloneThis(); if (debitAccount != null) { AccountEntry debitSplit = new AccountEntry(); debitSplit.setTransaction(tx); debitSplit.setAmount(this.getAmount()); debitSplit.setAccount(debitAccount); tx.getAccountEntries().add(debitSplit); } if (creditAccount != null) { AccountEntry creditSplit = new AccountEntry(); creditSplit.setTransaction(tx); creditSplit.setAmount(this.getAmount().negate()); creditSplit.setAccount(creditAccount); tx.getAccountEntries().add(creditSplit); } return tx; } public int compareTo(AccountEntry entry) { ZonedDateTime date = entry.getTransaction().getDate(); if (this.getTransaction().getDate() == null) { throw new IllegalStateException("An account transaction must have a date"); } return this.getTransaction().getDate().compareTo(date); } public Account getAccount() { return account; } public BigDecimal getAmount() { return amount; } public final Transaction getTransaction() { return this.transaction; } //automatically assigns the split to the account object as well public void setAccount(Account account) { if (account == null) { throw new IllegalArgumentException("Account may not be null"); } this.account = account; //automatically assign the split to the account to this.account.addEntry(this); } public AccountSide getAccountSide() { return accountSide; } public void setAccountSide(AccountSide accountSide) { this.accountSide = accountSide; } //+ve increases balance and -ve decreases balance so debit/credit accordingly public void setAmount(BigDecimal amount) { this.amount = amount; } public AccountEntry withAmount(BigDecimal amount) { this.amount = amount; return this; } public final void setTransaction(final Transaction argTransaction) { this.transaction = argTransaction; } public BigDecimal getCommodityUnits() { return commodityUnits; } public void setCommodityUnits(BigDecimal commodityUnits) { this.commodityUnits = commodityUnits; } public String getCommodityName() { return commodityName; } public void setCommodityName(String commodityName) { this.commodityName = commodityName; } public String getCryptoAddress() { return cryptoAddress; } public void setCryptoAddress(String cryptoAddress) { this.cryptoAddress = cryptoAddress; } public boolean isCommodity() { return commodityUnits != null || commodityName != null; } public void translate(ZonedDateTime translationDate, CurrencyUnit from, CurrencyUnit to) { CurrencyTranslation translation = new CurrencyTranslation(); translation.setOriginalCurrency(from); translation.setTranslatedCurrency(to); translation.setOriginalAmount(amount); amount = ExhangeRateUtils.exchange(translationDate, from, to, amount); currency = to; translation.setTranslatedAmount(amount); currencyTranslations.push(translation); } @Override public String toString() { return "AccountEntry [" + amount + " " + currency + " " + accountSide + ", account=" + account + ", date = " + (transaction == null ? null : transaction.getDate()) + ", description = " + this.getDescription() + (commodityName == null ? "" : ", commodityName = " + this.getCommodityName() + ", commodityUnits = " + this.getCommodityUnits()) + "]"; } public AccountEntry withCommodityUnits(BigDecimal amount2) { this.setCommodityUnits(amount2); return this; } public AccountEntry withCommodityName(String currencyCode) { this.setCommodityName(currencyCode); return this; } public AccountEntry withDescription(String string) { this.setDescription(string); return this; } public AccountEntry withAccount(Account buyOrderAccount) { this.setAccount(buyOrderAccount); return this; } public static AccountEntryBuilder accountEntry() { return new AccountEntryBuilder(); } public static class AccountEntryBuilder { private UUID uuid; private Account account; private BigDecimal amount; private AccountSide accountSide; private Transaction transaction; private String description; private BigDecimal commodityUnits; private String commodityName; private CurrencyUnit currency; private Stack<CurrencyTranslation> currencyTranslations; public AccountEntryBuilder withUuid(UUID uuid) { this.uuid = uuid; return this; } public AccountEntryBuilder withAccount(Account account) { this.account = account; return this; } public AccountEntryBuilder withAmount(BigDecimal amount) { this.amount = amount; return this; } public AccountEntryBuilder withAccountSide(AccountSide accountSide) { this.accountSide = accountSide; return this; } public AccountEntryBuilder withTransaction(Transaction transaction) { this.transaction = transaction; return this; } public AccountEntryBuilder withDescription(String description) { this.description = description; return this; } public AccountEntryBuilder withCommodityUnits(BigDecimal commodityUnits) { this.commodityUnits = commodityUnits; return this; } public AccountEntryBuilder withCommodityName(String commodityName) { this.commodityName = commodityName; return this; } public AccountEntryBuilder withCurrency(CurrencyUnit currency) { this.currency = currency; return this; } public AccountEntryBuilder withCurrencyTranslations(Stack<CurrencyTranslation> currencyTranslations) { this.currencyTranslations = currencyTranslations; return this; } public AccountEntry build() { return new AccountEntry(uuid, account, amount, accountSide, transaction, description, commodityUnits, commodityName, currency, currencyTranslations); } } }
taxbeans-forms/src/main/java/com/github/taxbeans/model/AccountEntry.java
package com.github.taxbeans.model; import java.math.BigDecimal; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.List; import java.util.Stack; import java.util.UUID; import javax.money.CurrencyUnit; import javax.money.Monetary; import com.github.taxbeans.currency.ExhangeRateUtils; public class AccountEntry implements Comparable<AccountEntry> { private String cryptoAddress; public AccountEntry(UUID uuid, Account account, BigDecimal amount, AccountSide accountSide, Transaction transaction, String description, BigDecimal commodityUnits, String commodityName, CurrencyUnit currency, Stack<CurrencyTranslation> currencyTranslations) { super(); this.uuid = uuid; this.account = account; this.account.addEntry(this); this.amount = amount; this.accountSide = accountSide == null ? AccountSide.BALANCE_EFFECT : accountSide; this.transaction = transaction; this.description = description; this.commodityUnits = commodityUnits; this.commodityName = commodityName; this.currency = currency; //no need to build the stack, do not include in builder when generated next //this.currencyTranslations = currencyTranslations; } public AccountEntry() { super(); uuid = UUID.randomUUID(); } public UUID getUuid() { return uuid; } public static List<Transaction> adaptToMergedTransactionList(List<AccountEntry> transactionSplits, Account debitAccount, Account creditAccount) { List<Transaction> transactionList = new ArrayList<Transaction>(); for (AccountEntry transactionSplit : transactionSplits) { transactionList.add(transactionSplit.adaptToMergedTransaction(debitAccount, creditAccount)); } return transactionList; } private UUID uuid; private Account account; // temporary value used for extracting out the trade fee entry from trades // could have used a separate Trade object private BigDecimal tradeFee; public BigDecimal getTradeFee() { return tradeFee; } public void setTradeFee(BigDecimal tradeFee) { this.tradeFee = tradeFee; } private BigDecimal amount = BigDecimal.ZERO; // Whether this entry is a debit or credit private AccountSide accountSide = AccountSide.BALANCE_EFFECT; /** * the outer transaction */ private Transaction transaction; private String description; private BigDecimal commodityUnits = BigDecimal.ZERO; private String commodityName; private CurrencyUnit currency = Monetary.getCurrency("NZD"); public CurrencyUnit getCurrency() { return currency; } public void setCurrency(CurrencyUnit currency) { this.currency = currency; } private Stack<CurrencyTranslation> currencyTranslations = new Stack<CurrencyTranslation>(); public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } private Transaction adaptToMergedTransaction(Account debitAccount, Account creditAccount) { Transaction tx = this.transaction.cloneThis(); if (debitAccount != null) { AccountEntry debitSplit = new AccountEntry(); debitSplit.setTransaction(tx); debitSplit.setAmount(this.getAmount()); debitSplit.setAccount(debitAccount); tx.getAccountEntries().add(debitSplit); } if (creditAccount != null) { AccountEntry creditSplit = new AccountEntry(); creditSplit.setTransaction(tx); creditSplit.setAmount(this.getAmount().negate()); creditSplit.setAccount(creditAccount); tx.getAccountEntries().add(creditSplit); } return tx; } public int compareTo(AccountEntry entry) { ZonedDateTime date = entry.getTransaction().getDate(); if (this.getTransaction().getDate() == null) { throw new IllegalStateException("An account transaction must have a date"); } return this.getTransaction().getDate().compareTo(date); } public Account getAccount() { return account; } public BigDecimal getAmount() { return amount; } public final Transaction getTransaction() { return this.transaction; } //automatically assigns the split to the account object as well public void setAccount(Account account) { if (account == null) { throw new IllegalArgumentException("Account may not be null"); } this.account = account; //automatically assign the split to the account to this.account.addEntry(this); } public AccountSide getAccountSide() { return accountSide; } public void setAccountSide(AccountSide accountSide) { this.accountSide = accountSide; } //+ve increases balance and -ve decreases balance so debit/credit accordingly public void setAmount(BigDecimal amount) { this.amount = amount; } public AccountEntry withAmount(BigDecimal amount) { this.amount = amount; return this; } public final void setTransaction(final Transaction argTransaction) { this.transaction = argTransaction; } public BigDecimal getCommodityUnits() { return commodityUnits; } public void setCommodityUnits(BigDecimal commodityUnits) { this.commodityUnits = commodityUnits; } public String getCommodityName() { return commodityName; } public void setCommodityName(String commodityName) { this.commodityName = commodityName; } public String getCryptoAddress() { return cryptoAddress; } public void setCryptoAddress(String cryptoAddress) { this.cryptoAddress = cryptoAddress; } public boolean isCommodity() { return commodityUnits != null || commodityName != null; } public void translate(ZonedDateTime translationDate, CurrencyUnit from, CurrencyUnit to) { CurrencyTranslation translation = new CurrencyTranslation(); translation.setOriginalCurrency(from); translation.setTranslatedCurrency(to); translation.setOriginalAmount(amount); amount = ExhangeRateUtils.exchange(translationDate, from, to, amount); currency = to; translation.setTranslatedAmount(amount); currencyTranslations.push(translation); } @Override public String toString() { return "AccountEntry [" + amount + " " + currency + " " + accountSide + ", account=" + account + ", date = " + (transaction == null ? null : transaction.getDate()) + ", description = " + this.getDescription() + ", commodityName = " + this.getCommodityName() + ", commodityUnits = " + this.getCommodityUnits() + "]"; } public AccountEntry withCommodityUnits(BigDecimal amount2) { this.setCommodityUnits(amount2); return this; } public AccountEntry withCommodityName(String currencyCode) { this.setCommodityName(currencyCode); return this; } public AccountEntry withDescription(String string) { this.setDescription(string); return this; } public AccountEntry withAccount(Account buyOrderAccount) { this.setAccount(buyOrderAccount); return this; } public static AccountEntryBuilder accountEntry() { return new AccountEntryBuilder(); } public static class AccountEntryBuilder { private UUID uuid; private Account account; private BigDecimal amount; private AccountSide accountSide; private Transaction transaction; private String description; private BigDecimal commodityUnits; private String commodityName; private CurrencyUnit currency; private Stack<CurrencyTranslation> currencyTranslations; public AccountEntryBuilder withUuid(UUID uuid) { this.uuid = uuid; return this; } public AccountEntryBuilder withAccount(Account account) { this.account = account; return this; } public AccountEntryBuilder withAmount(BigDecimal amount) { this.amount = amount; return this; } public AccountEntryBuilder withAccountSide(AccountSide accountSide) { this.accountSide = accountSide; return this; } public AccountEntryBuilder withTransaction(Transaction transaction) { this.transaction = transaction; return this; } public AccountEntryBuilder withDescription(String description) { this.description = description; return this; } public AccountEntryBuilder withCommodityUnits(BigDecimal commodityUnits) { this.commodityUnits = commodityUnits; return this; } public AccountEntryBuilder withCommodityName(String commodityName) { this.commodityName = commodityName; return this; } public AccountEntryBuilder withCurrency(CurrencyUnit currency) { this.currency = currency; return this; } public AccountEntryBuilder withCurrencyTranslations(Stack<CurrencyTranslation> currencyTranslations) { this.currencyTranslations = currencyTranslations; return this; } public AccountEntry build() { return new AccountEntry(uuid, account, amount, accountSide, transaction, description, commodityUnits, commodityName, currency, currencyTranslations); } } }
Only print the associated commodity details if they are set
taxbeans-forms/src/main/java/com/github/taxbeans/model/AccountEntry.java
Only print the associated commodity details if they are set
Java
apache-2.0
f2253f37d1e799bf181c5ec1867f1ef01db3b234
0
UltimateOgee/PixelEater
package pixeleater; public class PixelEater { sdfssfFsdssfsdsssss public static void main(String[] args) { } }
PixelEater/src/pixeleater/PixelEater.java
package pixeleater; public class PixelEater { sdfssf public static void main(String[] args) { } }
Update PixelEater.java
PixelEater/src/pixeleater/PixelEater.java
Update PixelEater.java
Java
apache-2.0
7b07584b9d4e346312525ce4f1ab1b667e2e514d
0
apache/sis,apache/sis,apache/sis
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.gui.map; import java.util.Locale; import java.util.Arrays; import java.util.Objects; import java.awt.geom.AffineTransform; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import javafx.application.Platform; import javafx.geometry.Bounds; import javafx.geometry.Point2D; import javafx.scene.layout.Pane; import javafx.scene.layout.StackPane; import javafx.scene.input.KeyEvent; import javafx.scene.input.MouseEvent; import javafx.scene.input.ScrollEvent; import javafx.scene.input.GestureEvent; import javafx.scene.Cursor; import javafx.event.EventType; import javafx.beans.property.ObjectProperty; import javafx.beans.property.ReadOnlyBooleanProperty; import javafx.beans.property.ReadOnlyBooleanWrapper; import javafx.beans.property.ReadOnlyObjectProperty; import javafx.beans.property.ReadOnlyObjectWrapper; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.beans.value.WritableValue; import javafx.concurrent.Task; import javafx.event.EventHandler; import javafx.scene.control.ContextMenu; import javafx.scene.control.ToggleGroup; import javafx.scene.transform.Affine; import javafx.scene.transform.NonInvertibleTransformException; import org.opengis.geometry.Envelope; import org.opengis.geometry.DirectPosition; import org.opengis.referencing.ReferenceSystem; import org.opengis.referencing.cs.AxisDirection; import org.opengis.referencing.datum.PixelInCell; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.TransformException; import org.apache.sis.referencing.operation.matrix.Matrices; import org.apache.sis.referencing.operation.matrix.MatrixSIS; import org.apache.sis.referencing.operation.transform.MathTransforms; import org.apache.sis.referencing.operation.transform.LinearTransform; import org.apache.sis.referencing.cs.CoordinateSystems; import org.apache.sis.geometry.DirectPosition2D; import org.apache.sis.geometry.Envelope2D; import org.apache.sis.geometry.AbstractEnvelope; import org.apache.sis.geometry.ImmutableEnvelope; import org.apache.sis.coverage.grid.GridGeometry; import org.apache.sis.coverage.grid.GridExtent; import org.apache.sis.gui.referencing.PositionableProjection; import org.apache.sis.gui.referencing.RecentReferenceSystems; import org.apache.sis.util.ArraysExt; import org.apache.sis.util.ArgumentChecks; import org.apache.sis.util.logging.Logging; import org.apache.sis.internal.util.Numerics; import org.apache.sis.internal.system.Modules; import org.apache.sis.internal.system.DelayedExecutor; import org.apache.sis.internal.system.DelayedRunnable; import org.apache.sis.internal.gui.BackgroundThreads; import org.apache.sis.internal.gui.ExceptionReporter; import org.apache.sis.internal.gui.GUIUtilities; import org.apache.sis.internal.gui.Resources; import org.apache.sis.internal.referencing.AxisDirections; import org.apache.sis.portrayal.PlanarCanvas; import org.apache.sis.portrayal.RenderException; import org.apache.sis.referencing.IdentifiedObjects; import static org.apache.sis.internal.util.StandardDateFormat.NANOS_PER_MILLISECOND; /** * A canvas for maps to be rendered on screen in a JavaFX application. * The map may be an arbitrary JavaFX node, typically an {@link javafx.scene.image.ImageView} * or {@link javafx.scene.canvas.Canvas}, which must be supplied by subclasses. * This base class provides handlers for keyboard, mouse, track pad or touch screen events * such as pans, zooms and rotations. The keyboard actions are: * * <table class="sis"> * <caption>Keyboard actions</caption> * <tr><th>Key</th> <th>Action</th></tr> * <tr><td>⇨</td> <td>Move view to the right</td></tr> * <tr><td>⇦</td> <td>Move view to the left</td></tr> * <tr><td>⇧</td> <td>Move view to the top</td></tr> * <tr><td>⇩</td> <td>Move view to the bottom</td></tr> * <tr><td>⎇ + ⇨</td> <td>Rotate clockwise</td></tr> * <tr><td>⎇ + ⇦</td> <td>Rotate anticlockwise</td></tr> * <tr><td>Page down</td> <td>Zoom in</td></tr> * <tr><td>Page up</td> <td>Zoom out</td></tr> * <tr><td>Home</td> <td>{@linkplain #reset() Reset}</td></tr> * <tr><td>Ctrl + above</td> <td>Above actions as a smaller translation, zoom or rotation</td></tr> * </table> * * <h2>Subclassing</h2> * Implementations need to add at least one JavaFX node in the {@link #floatingPane} list of children. * Map rendering involves the following steps: * * <ol> * <li>{@link #createRenderer()} is invoked in the JavaFX thread. That method shall take a snapshot * of every information needed for performing the rendering in background.</li> * <li>{@link Renderer#render()} is invoked in a background thread. That method creates or updates * the nodes to show in this {@code MapCanvas} but without interacting with the canvas yet.</li> * <li>{@link Renderer#commit(MapCanvas)} is invoked in the JavaFX thread. The nodes prepared by * {@code render()} can be transferred to {@link #floatingPane} in that method.</li> * </ol> * * @author Martin Desruisseaux (Geomatys) * @version 1.1 * @since 1.1 * @module */ public abstract class MapCanvas extends PlanarCanvas { /** * Size in pixels of a scroll or translation event. This value should be close to the * {@linkplain ScrollEvent#getDeltaY() delta of a scroll event done with mouse wheel}. */ private static final double SCROLL_EVENT_SIZE = 40; /** * The zoom factor to apply on scroll event. A value of 0.1 means that a zoom of 10% * is applied. */ private static final double ZOOM_FACTOR = 0.1; /** * Division factor to apply on translations and zooms when the control key is down. */ private static final double CONTROL_KEY_FACTOR = 10; /** * Number of milliseconds to wait before to repaint after gesture events (zooms, rotations, pans). * This delay allows to collect more events before to run a potentially costly {@link #repaint()}. * It does not apply to the immediate feedback that the user gets from JavaFX affine transforms * (an image with lower quality used until the higher quality image become ready). * * @see #requestRepaint() * @see Delayed */ private static final long REPAINT_DELAY = 100; /** * Number of nanoseconds to wait before to set mouse cursor shape to {@link Cursor#WAIT} during rendering. * If the rendering complete in a shorter time, the mouse cursor will be unchanged. * * @see #renderingStartTime */ private static final long WAIT_CURSOR_DELAY = (1000 - REPAINT_DELAY) * NANOS_PER_MILLISECOND; /** * The pane showing the map and any other JavaFX nodes to scale and translate together with the map. * This pane is initially empty; subclasses should add nodes (canvas, images, shapes, texts, <i>etc.</i>) * into the {@link Pane#getChildren()} list. * All children must specify their coordinates in units relative to the pane (absolute layout). * Those coordinates can be computed from real world coordinates by {@link #objectiveToDisplay}. * * <p>This pane contains an {@link Affine} transform which is updated by user gestures such as pans, * zooms or rotations. Visual positions of all children move together in response to user's gesture, * thus giving an appearance of pane floating around. Changes in {@code floatingPane} affine transform * are temporary; they are applied for producing immediate visual feedback while the map is recomputed * in a background thread. Once calculation is completed and the content of this pane has been updated, * the {@code floatingPane} {@link Affine} transform is reset to identity.</p> */ protected final Pane floatingPane; /** * The pane showing the map and other JavaFX nodes to keep at fixed position regardless pans, zooms or rotations * applied on the map. This pane contains at least the {@linkplain #floatingPane} (which itself contains the map), * but more children (shapes, texts, controls, <i>etc.</i>) can be added by subclasses into the * {@link StackPane#getChildren()} list. */ protected final StackPane fixedPane; /** * The data bounds to use for computing the initial value of {@link #objectiveToDisplay}. * We differ this recomputation until all parameters are known. * * @see #setObjectiveBounds(Envelope) * @see #invalidObjectiveToDisplay */ private Envelope objectiveBounds; /** * Incremented when the map needs to be rendered again. * * @see #renderedContentStamp * @see #contentsChanged() */ private int contentChangeCount; /** * Value of {@link #contentChangeCount} last time the data have been rendered. This is used for deciding * if a call to {@link #repaint()} should be done with the next layout operation. We need this check for * avoiding never-ending repaint events caused by calls to {@code ImageView.setImage(Image)} causing * themselves new layout events. It is okay if this value overflows. */ private int renderedContentStamp; /** * Value of {@link System#nanoTime()} when the last rendering started. This is used together with * {@link #WAIT_CURSOR_DELAY} for deciding if mouse cursor should be {@link Cursor#WAIT}. */ private long renderingStartTime; /** * Non-null if a rendering task is in progress. Used for avoiding to send too many {@link #repaint()} * requests; we will wait for current repaint event to finish before to send another painting request. */ private Task<?> renderingInProgress; /** * Whether the size of this canvas changed. */ private boolean sizeChanged; /** * Whether {@link #objectiveToDisplay} needs to be recomputed. * We differ this recomputation until all parameters are known. * * @see #objectiveBounds */ private boolean invalidObjectiveToDisplay; /** * The zooms, pans and rotations applied on {@link #floatingPane} since last time the map has been painted. * This is the identity transform except during the short time between a gesture (zoom, pan, <i>etc.</i>) * and the completion of latest {@link #repaint()} event. This is used for giving immediate feedback to user * while waiting for the new rendering to be ready. Since this transform is a member of {@link #floatingPane} * {@linkplain Pane#getTransforms() transform list}, changes in this transform are immediately visible to user. */ private final Affine transform; /** * The {@link #transform} values at the time the {@link #repaint()} method has been invoked. * This is a change applied on {@link #objectiveToDisplay} but not yet visible in the map. * After the map has been updated, this transform is reset to identity. */ private final Affine changeInProgress; /** * The value to assign to {@link #transform} after the {@link #floatingPane} has been updated * with transformed content. */ private final Affine transformOnNewImage; /** * Cursor position at the time pan event started. * This is used for computing the {@linkplain #floatingPane} translation to apply during drag events. * * @see #onDrag(MouseEvent) */ private double xPanStart, yPanStart; /** * {@code true} if a drag even is in progress. * * @see #onDrag(MouseEvent) */ private boolean isDragging; /** * Whether a {@link CursorChange} is already scheduled, in which case there is no need to schedule more. */ private boolean isMouseChangeScheduled; /** * Whether a rendering is in progress. This property is set to {@code true} when {@code MapCanvas} * is about to start a background thread for performing a rendering, and is reset to {@code false} * after the {@code MapCanvas} has been updated with new rendering result. * * @see #renderingProperty() */ private final ReadOnlyBooleanWrapper isRendering; /** * The exception or error that occurred during last rendering operation. * This is reset to {@code null} when a rendering operation completes successfully. * * @see #errorProperty() */ private final ReadOnlyObjectWrapper<Throwable> error; /** * If a contextual menu is currently visible, that menu. Otherwise {@code null}. */ private ContextMenu menuShown; /** * Creates a new canvas for JavaFX application. * * @param locale the locale to use for labels and some messages, or {@code null} for default. */ public MapCanvas(final Locale locale) { super(locale); transform = new Affine(); changeInProgress = new Affine(); transformOnNewImage = new Affine(); final Pane view = new Pane() { @Override protected void layoutChildren() { super.layoutChildren(); if (contentsChanged()) { repaint(); } } }; view.getTransforms().add(transform); view.setOnZoom ((e) -> applyZoomOrRotate(e, e.getZoomFactor(), 0)); view.setOnRotate((e) -> applyZoomOrRotate(e, 1, e.getAngle())); view.setOnScroll(this::onScroll); view.setOnMousePressed(this::onDrag); view.setOnMouseDragged(this::onDrag); view.setOnMouseReleased(this::onDrag); view.setFocusTraversable(true); view.addEventHandler(KeyEvent.KEY_PRESSED, this::onKeyTyped); /* * Do not set a preferred size, otherwise `repaint()` is invoked twice: once with the preferred size * and once with the actual size of the parent window. Actually the `repaint()` method appears to be * invoked twice anyway, but without preferred size the width appears to be 0, in which case nothing * is repainted. */ view.layoutBoundsProperty().addListener((p) -> onSizeChanged()); view.setCursor(Cursor.CROSSHAIR); floatingPane = view; fixedPane = new StackPane(view); GUIUtilities.setClipToBounds(fixedPane); isRendering = new ReadOnlyBooleanWrapper(this, "isRendering"); error = new ReadOnlyObjectWrapper<>(this, "exception"); } /** * Invoked when the size of the {@linkplain #floatingPane} has changed. * This method requests a new repaint after a short wait, in order to collect more resize events. */ private void onSizeChanged() { sizeChanged = true; requestRepaint(); } /** * Invoked when the user presses the button, drags the map and releases the button. * This is interpreted as a translation applied in pixel units on the map. */ private void onDrag(final MouseEvent event) { final double x = event.getX(); final double y = event.getY(); final EventType<? extends MouseEvent> type = event.getEventType(); if (type == MouseEvent.MOUSE_PRESSED) { switch (event.getButton()) { case PRIMARY: { hideContextMenu(); floatingPane.setCursor(Cursor.CLOSED_HAND); floatingPane.requestFocus(); isDragging = true; xPanStart = x; yPanStart = y; event.consume(); break; } // Future version may add cases for FORWARD and BACK buttons. } } else if (isDragging) { if (type != MouseEvent.MOUSE_DRAGGED) { if (floatingPane.getCursor() == Cursor.CLOSED_HAND) { floatingPane.setCursor(Cursor.CROSSHAIR); } isDragging = false; } applyTranslation(x - xPanStart, y - yPanStart, type == MouseEvent.MOUSE_RELEASED); event.consume(); } } /** * Restores the cursor to its normal state after rendering completion. * The purpose of this method is to hide the {@link Cursor#WAIT} shape. */ private void restoreCursorAfterPaint() { floatingPane.setCursor(isDragging ? Cursor.CLOSED_HAND : Cursor.CROSSHAIR); } /** * Translates the map in response to user event (keyboard, mouse, track pad, touch screen). * * @param tx horizontal translation in pixel units. * @param ty vertical translation in pixel units. * @param isFinal {@code false} if more translations are expected soon, or * {@code true} if this is the last translation for now. * * @see #applyZoomOrRotate(GestureEvent, double, double) */ private void applyTranslation(final double tx, final double ty, final boolean isFinal) { if (tx != 0 || ty != 0) { transform.appendTranslation(tx, ty); final Point2D p = changeInProgress.deltaTransform(tx, ty); transformOnNewImage.appendTranslation(p.getX(), p.getY()); if (!isFinal) { requestRepaint(); } } if (isFinal && !transform.isIdentity()) { repaint(); } } /** * Invoked when the user rotates the mouse wheel. * This method performs a zoom-in or zoom-out event. */ private void onScroll(final ScrollEvent event) { if (event.getTouchCount() != 0) { // Do not interpret scroll events on touch pad as a zoom. return; } final double delta = event.getDeltaY(); double zoom = Math.abs(delta) / SCROLL_EVENT_SIZE * ZOOM_FACTOR; if (event.isControlDown()) { zoom /= CONTROL_KEY_FACTOR; } zoom++; if (delta < 0) { zoom = 1/zoom; } applyZoomOrRotate(event, zoom, 0); } /** * Zooms or rotates the map in response to user event (keyboard, mouse, track pad, touch screen). * If the given event is non-null, it will be consumed. * * @param event the mouse, track pad or touch screen event, or {@code null} if the event was a keyboard event. * @param zoom the zoom factor to apply, or 1 if none. * @param angle the rotation angle in degrees, or 0 if nine. * * @see #applyTranslation(double, double, boolean) */ private void applyZoomOrRotate(final GestureEvent event, final double zoom, final double angle) { if (zoom != 1 || angle != 0) { double x, y; if (event != null) { x = event.getX(); y = event.getY(); } else { final Bounds bounds = floatingPane.getLayoutBounds(); x = bounds.getCenterX(); y = bounds.getCenterY(); try { final Point2D p = transform.inverseTransform(x, y); x = p.getX(); y = p.getY(); } catch (NonInvertibleTransformException e) { /* * `event` is null only when this method is invoked from `onKeyTyped(…)`. * Keep old coordinates. The map may appear shifted, but its location will * be fixed when `repaint()` completes its work. */ unexpectedException("onKeyTyped", e); } } final Point2D p = changeInProgress.transform(x, y); if (zoom != 1) { transform.appendScale(zoom, zoom, x, y); transformOnNewImage.appendScale(zoom, zoom, p.getX(), p.getY()); } if (angle != 0) { transform.appendRotation(angle, x, y); transformOnNewImage.appendRotation(angle, p.getX(), p.getY()); } requestRepaint(); } if (event != null) { event.consume(); } } /** * Invoked when the user presses a key. This handler provides navigation in the direction of arrow keys, * or zoom-in / zoom-out with page-down / page-up keys. If the control key is down, navigation is finer. */ private void onKeyTyped(final KeyEvent event) { double tx = 0, ty = 0, zoom = 1, angle = 0; if (event.isAltDown()) { switch (event.getCode()) { case RIGHT: case KP_RIGHT: angle = +7.5; break; case LEFT: case KP_LEFT: angle = -7.5; break; default: return; } } else { switch (event.getCode()) { case RIGHT: case KP_RIGHT: tx = -SCROLL_EVENT_SIZE; break; case LEFT: case KP_LEFT: tx = +SCROLL_EVENT_SIZE; break; case DOWN: case KP_DOWN: ty = -SCROLL_EVENT_SIZE; break; case UP: case KP_UP: ty = +SCROLL_EVENT_SIZE; break; case PAGE_UP: zoom = 1/(1 + ZOOM_FACTOR); break; case PAGE_DOWN: zoom = (1 + ZOOM_FACTOR); break; case HOME: reset(); break; default: return; } } if (event.isControlDown()) { tx /= CONTROL_KEY_FACTOR; ty /= CONTROL_KEY_FACTOR; angle /= CONTROL_KEY_FACTOR; zoom = (zoom - 1) / CONTROL_KEY_FACTOR + 1; } try { final Point2D p = transform.inverseDeltaTransform(tx, ty); tx = p.getX(); ty = p.getY(); } catch (NonInvertibleTransformException e) { /* * Should never happen. If happen anyway, keep old coordinates. The map may appear * shifted, but its location will be fixed when `repaint()` completes its work. */ unexpectedException("onKeyTyped", e); } applyZoomOrRotate(null, zoom, angle); applyTranslation(tx, ty, false); event.consume(); } /** * Resets the map view to its default zoom level and default position with no rotation. * Contrarily to {@link #clear()}, this method does not remove the map content. */ public void reset() { invalidObjectiveToDisplay = true; requestRepaint(); } /** * If a context menu is currently shown, hide that menu. Otherwise does nothing. */ private void hideContextMenu() { if (menuShown != null) { menuShown.hide(); menuShown = null; } } /** * Shows or hides the contextual menu when the right mouse button is clicked. This handler can determine * the geographic location where the click occurred. This information is used for changing the projection * while preserving approximately the location, scale and rotation of pixels around the mouse cursor. */ @SuppressWarnings({"serial","CloneableImplementsClone"}) // Not intended to be serialized. final class MenuHandler extends DirectPosition2D implements EventHandler<MouseEvent>, ChangeListener<ReferenceSystem>, PropertyChangeListener { /** * The contextual menu to show or hide when mouse button is clicked on the canvas. */ private final ContextMenu menu; /** * The property to update if a change of CRS occurs in the enclosing canvas. This property is provided * by {@link RecentReferenceSystems}, which listen to changes. Setting this property to a new value * causes the "Referencing systems" radio menus to change the item where the check mark appear. * * <p>This field is initialized by {@link MapMenu#addReferenceSystems(RecentReferenceSystems)} * and should be considered final after initialization.</p> */ ObjectProperty<ReferenceSystem> selectedCrsProperty; /** * The group of {@link PositionableProjection} items for projections created on-the-fly at mouse position. * Those items are not managed by {@link RecentReferenceSystems} so they need to be handled there. * * <p>This field is initialized by {@link MapMenu#addReferenceSystems(RecentReferenceSystems)} * and should be considered final after initialization.</p> */ ToggleGroup positionables; /** * {@code true} if we are in the process of setting a CRS generated by {@link PositionableProjection}. */ private boolean isPositionableProjection; /** * Creates and registers a new handler for showing a contextual menu in the enclosing canvas. * It is caller responsibility to ensure that this method is invoked only once. */ @SuppressWarnings("ThisEscapedInObjectConstruction") MenuHandler(final ContextMenu menu) { super(getDisplayCRS()); this.menu = menu; fixedPane.setOnMousePressed (this); fixedPane.setOnMouseReleased(this); // As recommended by MouseEvent.isPopupTrigger(). } /** * Invoked when the user clicks on the canvas. * Shows the menu on right mouse click, hide otherwise. */ @Override public void handle(final MouseEvent event) { if (event.isPopupTrigger()) { hideContextMenu(); x = event.getX(); y = event.getY(); menu.show((Pane) event.getSource(), event.getScreenX(), event.getScreenY()); menuShown = menu; event.consume(); } } /** * Invoked when user selected a new coordinate reference system among the choices of predefined CRS. * Those CRS are the ones managed by {@link RecentReferenceSystems}, not the ones created on-the-fly. */ @Override public void changed(final ObservableValue<? extends ReferenceSystem> property, final ReferenceSystem oldValue, final ReferenceSystem newValue) { if (newValue instanceof CoordinateReferenceSystem) { setObjectiveCRS((CoordinateReferenceSystem) newValue, this, property); } } /** * Invoked when user selected a projection centered on mouse position. Those CRS are generated on-the-fly * and are generally not on the list of CRS managed by {@link RecentReferenceSystems}. */ final void createProjectedCRS(final PositionableProjection projection) { try { DirectPosition2D center = new DirectPosition2D(); center = (DirectPosition2D) objectiveToDisplay.inverseTransform(this, center); center.setCoordinateReferenceSystem(getObjectiveCRS()); CoordinateReferenceSystem crs = projection.createProjectedCRS(center); try { isPositionableProjection = true; setObjectiveCRS(crs, this, null); } finally { isPositionableProjection = false; } } catch (Exception e) { errorOccurred(e); final Resources i18n = Resources.forLocale(getLocale()); ExceptionReporter.show(fixedPane, null, i18n.getString(Resources.Keys.CanNotUseRefSys_1, projection), e); } } /** * Invoked when a canvas property changed, typically after new data are shown. * The property of interest is {@value MapCanvas#OBJECTIVE_CRS_PROPERTY}. * This method updates the CRS selected in the contextual menu. */ @Override public void propertyChange(final PropertyChangeEvent event) { if (OBJECTIVE_CRS_PROPERTY.equals(event.getPropertyName())) { final Object value = event.getNewValue(); if (value instanceof CoordinateReferenceSystem) { selectedCrsProperty.set((CoordinateReferenceSystem) value); } if (!isPositionableProjection) { positionables.selectToggle(null); } } } } /** * Invoked when the user changed the CRS from a JavaFX control. If the CRS can not be set to the specified * value, then an error message is shown in the status bar and the property is reset to its previous value. * * @param crs the new Coordinate Reference System in which to transform all data before displaying. * @param anchor the point to keep at fixed display coordinates, or {@code null} for default value. * @param property the property to reset if the operation fails. */ private void setObjectiveCRS(final CoordinateReferenceSystem crs, DirectPosition anchor, final ObservableValue<? extends ReferenceSystem> property) { final CoordinateReferenceSystem previous = getObjectiveCRS(); if (crs != previous) try { /* * If no anchor is specified, the first default is the center of the region currently visible * in the canvas. If that center can not be determined neither, null anchor defaults to the * point of interest (POI) managed by the Canvas parent class. */ if (anchor == null) { final Envelope2D bounds = getDisplayBounds(); if (bounds != null) { anchor = AbstractEnvelope.castOrCopy(bounds).getMedian(); } } setObjectiveCRS(crs, anchor); requestRepaint(); } catch (Exception e) { if (property instanceof WritableValue<?>) { ((WritableValue<ReferenceSystem>) property).setValue(previous); } errorOccurred(e); final Locale locale = getLocale(); final Resources i18n = Resources.forLocale(locale); ExceptionReporter.show(fixedPane, null, i18n.getString(Resources.Keys.CanNotUseRefSys_1, IdentifiedObjects.getDisplayName(crs, locale)), e); } } /** * Sets the data bounds to use for computing the initial value of {@link #objectiveToDisplay}. * Invoking this method also sets the {@link #getObjectiveCRS() objective CRS} of this canvas * to the CRS of given envelope. * * <p>This method should be invoked only when new data have been loaded, or when the caller wants * to discard any zoom or translation and reset the view to the given bounds. This method does not * cause new repaint event; {@link #requestRepaint()} must be invoked by the caller if desired.</p> * * @param visibleArea bounding box in (new) objective CRS of the initial area to show, * or {@code null} if unknown (in which case an identity transform will be set). * * @see #setObjectiveCRS(CoordinateReferenceSystem, DirectPosition) */ protected void setObjectiveBounds(final Envelope visibleArea) { ArgumentChecks.ensureDimensionMatches("bounds", BIDIMENSIONAL, visibleArea); objectiveBounds = ImmutableEnvelope.castOrCopy(visibleArea); invalidObjectiveToDisplay = true; } /** * Given axis directions in the objective CRS, returns axis directions in display CRS. * This method will typically reverse the North direction to a South direction because * <var>y</var> axis is oriented toward down. It may also swap axis order. * * <p>The rules implemented in this method are empirical and may be augmented in any future version. * This method may become {@code protected} in a future version if we want to allow user to override * with her own rules.</p> * * @param srcAxes axis directions in objective CRS. * @return axis directions in display CRS. */ private static AxisDirection[] toDisplayDirections(final AxisDirection[] srcAxes) { final AxisDirection[] dstAxes = Arrays.copyOf(srcAxes, 2); if (AxisDirections.absolute(dstAxes[0]) == AxisDirection.NORTH && AxisDirections.absolute(dstAxes[1]) == AxisDirection.EAST) { ArraysExt.swap(dstAxes, 0, 1); } if (AxisDirections.absolute(dstAxes[0]) == AxisDirection.WEST) dstAxes[0] = AxisDirection.EAST; if (AxisDirections.absolute(dstAxes[1]) == AxisDirection.NORTH) dstAxes[1] = AxisDirection.SOUTH; return dstAxes; } /** * Invoked in JavaFX thread for creating a renderer to be executed in a background thread. * Subclasses shall copy in this method all {@code MapCanvas} properties that the background thread * will need for performing the rendering process. * * @return rendering process to be executed in background thread, * or {@code null} if there is nothing to paint. */ protected abstract Renderer createRenderer(); /** * A snapshot of {@link MapCanvas} state to render as a map, together with rendering code. * This class is instantiated and used as below: * * <ol> * <li>{@link MapCanvas} invokes {@link MapCanvas#createRenderer()} in the JavaFX thread. * That method shall take a snapshot of every information needed for performing the rendering * in a background thread.</li> * <li>{@link MapCanvas} invokes {@link #render()} in a background thread. That method creates or * updates the nodes to show in the canvas but without reading or writing any canvas property; * that method should use only the snapshot taken in step 1.</li> * <li>{@link MapCanvas} invokes {@link #commit(MapCanvas)} in the JavaFX thread. The nodes prepared * at step 2 can be transferred to {@link MapCanvas#floatingPane} in that method.</li> * </ol> * * @author Martin Desruisseaux (Geomatys) * @version 1.1 * @since 1.1 * @module */ protected abstract static class Renderer { /** * The canvas size. */ private int width, height; /** * Creates a new renderer. The {@linkplain #getWidth() width} and {@linkplain #getHeight() height} * are initially zero; they will get a non-zero values before {@link #render()} is invoked. */ protected Renderer() { } /** * Sets the width and height to the size of the given view, * then returns {@code true} if the view is non-empty. * * <p>This method is invoked after {@link #createRenderer()} * and before {@link #createWorker(Renderer)}.</p> */ private boolean initialize(final Pane view) { width = Numerics.clamp(Math.round(view.getWidth())); height = Numerics.clamp(Math.round(view.getHeight())); return width > 0 && height > 0; } /** * Returns the width (number of columns) of the view, in pixels. * * @return number of pixels to render horizontally. */ public int getWidth() { return width; } /** * Returns the height (number of rows) of the view, in pixels. * * @return number of pixels to render vertically. */ public int getHeight() { return height; } /** * Invoked in a background thread for rendering the map. This method should not access any * {@link MapCanvas} property; if some canvas properties are needed, they should have been * copied at construction time. * * @throws Exception if an error occurred while preparing data or rendering them. */ protected abstract void render() throws Exception; /** * Invoked in JavaFX thread after {@link #render()} completion. This method can update the * {@link #floatingPane} children with the nodes (images, shaped, <i>etc.</i>) created by * {@link #render()}. * * @param canvas the canvas where drawing has been done. * @return {@code true} on success, or {@code false} if the rendering should be redone * (for example because a change has been detected in the data). */ protected abstract boolean commit(MapCanvas canvas); } /** * Returns {@code true} if content changed since the last {@link #repaint()} execution. * This is used for checking if a new call to {@link #repaint()} is necessary. */ final boolean contentsChanged() { return contentChangeCount != renderedContentStamp; } /** * Requests the map to be rendered again, possibly with new data. Invoking this * method does not necessarily causes the repaint process to start immediately. * The request will be queued and executed at an arbitrary (short) time later. */ public final void requestRepaint() { contentChangeCount++; if (renderingInProgress == null) { final Delayed delay = new Delayed(); BackgroundThreads.execute(delay); renderingInProgress = delay; // Set last after we know that the task has been scheduled. } } /** * Invoked when the map content needs to be rendered again. * It may be because the map has new content, or because the viewed region moved or has been zoomed. * * @see #requestRepaint() */ final void repaint() { assert Platform.isFxApplicationThread(); /* * If a rendering is already in progress, do not send a new request now. * Wait for current rendering to finish; a new one will be automatically * requested if content changes are detected after the rendering. */ if (renderingInProgress != null) { if (renderingInProgress instanceof Delayed) { renderingInProgress.cancel(true); renderingInProgress = null; } else { contentChangeCount++; return; } } renderingStartTime = System.nanoTime(); renderedContentStamp = contentChangeCount; /* * If a new canvas size is known, inform the parent `PlanarCanvas` about that. * It may cause a recomputation of the "objective to display" transform. */ try { if (sizeChanged) { sizeChanged = false; final Pane view = floatingPane; Envelope2D bounds = new Envelope2D(null, view.getLayoutX(), view.getLayoutY(), view.getWidth(), view.getHeight()); if (bounds.isEmpty()) return; setDisplayBounds(bounds); } /* * Compute the `objectiveToDisplay` only before the first rendering, because the display * bounds may not be known before (it may be zero at the time `MapCanvas` is initialized). * This code is executed only once for a new map. */ if (invalidObjectiveToDisplay) { final Envelope2D target = getDisplayBounds(); if (target == null) { // Bounds are still unknown. Another repaint event will happen when they will become known. return; } invalidObjectiveToDisplay = false; final GridExtent extent = new GridExtent(null, new long[] {Math.round(target.getMinX()), Math.round(target.getMinY())}, new long[] {Math.round(target.getMaxX()), Math.round(target.getMaxY())}, false); /* * If `setObjectiveBounds(…)` has been invoked (as it should be), initialize the affine * transform to values which will allow this canvas to contain fully the objective bounds. * Otherwise the transform is initialized to an identity transform (should not happen often). * If a CRS is present, it is used for deciding if we need to swap or flip axes. */ CoordinateReferenceSystem objectiveCRS; final LinearTransform crsToDisplay; if (objectiveBounds != null) { objectiveCRS = objectiveBounds.getCoordinateReferenceSystem(); final MatrixSIS m; if (objectiveCRS != null) { AxisDirection[] srcAxes = CoordinateSystems.getAxisDirections(objectiveCRS.getCoordinateSystem()); m = Matrices.createTransform(objectiveBounds, srcAxes, target, toDisplayDirections(srcAxes)); } else { m = Matrices.createTransform(objectiveBounds, target); } Matrices.forceUniformScale(m, 0, new double[] {target.getCenterX(), target.getCenterY()}); crsToDisplay = MathTransforms.linear(m); if (objectiveCRS == null) { objectiveCRS = extent.toEnvelope(crsToDisplay.inverse()).getCoordinateReferenceSystem(); // CRS computed above should not be null. } } else { objectiveCRS = getDisplayCRS(); crsToDisplay = MathTransforms.identity(BIDIMENSIONAL); } setGridGeometry(new GridGeometry(extent, PixelInCell.CELL_CORNER, crsToDisplay.inverse(), objectiveCRS)); transform.setToIdentity(); } } catch (TransformException | RenderException ex) { restoreCursorAfterPaint(); errorOccurred(ex); return; } /* * If a temporary zoom, rotation or translation has been applied using JavaFX transform API, * replace that temporary transform by a "permanent" adjustment of the `objectiveToDisplay` * transform. It allows SIS to get new data for the new visible area and resolution. */ changeInProgress.setToTransform(transform); transformOnNewImage.setToIdentity(); isRendering.set(true); if (!transform.isIdentity()) { transformDisplayCoordinates(new AffineTransform( transform.getMxx(), transform.getMyx(), transform.getMxy(), transform.getMyy(), transform.getTx(), transform.getTy())); } /* * Invoke `createWorker(…)` only after we finished above configuration, because that method * may take a snapshot of current canvas state in preparation for use in background threads. */ final Renderer context = createRenderer(); if (context != null && context.initialize(floatingPane)) { final Task<?> worker = createWorker(context); assert renderingInProgress == null; BackgroundThreads.execute(worker); renderingInProgress = worker; // Set after we know that the task has been scheduled. if (!isMouseChangeScheduled) { DelayedExecutor.schedule(new CursorChange()); isMouseChangeScheduled = true; } } else { error.set(null); isRendering.set(false); restoreCursorAfterPaint(); } } /** * Creates the background task which will invoke {@link Renderer#render()} in a background thread. * The tasks must invoke {@link #renderingCompleted(Task)} in JavaFX thread after completion, * either successful or not. */ Task<?> createWorker(final Renderer renderer) { return new Task<Void>() { /** Invoked in background thread. */ @Override protected Void call() throws Exception { renderer.render(); return null; } /** Invoked in JavaFX thread on success. */ @Override protected void succeeded() { final boolean done = renderer.commit(MapCanvas.this); renderingCompleted(this); if (!done || contentsChanged()) { repaint(); } } /** Invoked in JavaFX thread on failure. */ @Override protected void failed() {renderingCompleted(this);} @Override protected void cancelled() {renderingCompleted(this);} }; } /** * Invoked after the background thread created by {@link #repaint()} finished to update map content. * The {@link #changeInProgress} is the JavaFX transform at the time the repaint event was trigged and * which is now integrated in the map. That transform will be removed from {@link #floatingPane} transforms. * It may be identity if no zoom, rotation or pan gesture has been applied since last rendering. */ final void renderingCompleted(final Task<?> task) { assert Platform.isFxApplicationThread(); // Keep cursor unchanged if contents changed because caller will invoke `repaint()`. if (!contentsChanged() || task.getState() != Task.State.SUCCEEDED) { restoreCursorAfterPaint(); } renderingInProgress = null; final Point2D p = changeInProgress.transform(xPanStart, yPanStart); xPanStart = p.getX(); yPanStart = p.getY(); changeInProgress.setToIdentity(); transform.setToTransform(transformOnNewImage); error.set(task.getException()); isRendering.set(false); } /** * A pseudo-rendering task which wait for some delay before to perform the real repaint. * The intent is to collect some more gesture events (pans, zooms, <i>etc.</i>) before consuming CPU time. * This is especially useful when the first gesture event is a tiny change because the user just started * panning or zooming. * * <div class="note"><b>Design note:</b> * using a thread for waiting seems a waste of resources, but a thread (likely this one) is going to be used * for real after the waiting time is elapsed. That thread usually exists anyway in {@link BackgroundThreads} * as an idle thread, and it is unlikely that other parts of this JavaFX application need that thread in same * time (if it happens, other threads will be created).</div> * * @see #requestRepaint() */ private final class Delayed extends Task<Void> { @Override protected Void call() { try { Thread.sleep(REPAINT_DELAY); } catch (InterruptedException e) { // Task.cancel(true) has been invoked: do nothing and terminate now. } return null; } @Override protected void succeeded() {paintAfterDelay();} @Override protected void failed() {paintAfterDelay();} // Do not override `cancelled()` because a repaint is already in progress. } /** * Invoked after {@link #REPAINT_DELAY} has been elapsed for performing the real repaint request. * * @see #requestRepaint() */ private void paintAfterDelay() { renderingInProgress = null; repaint(); } /** * The action to execute if rendering appear to be slow. If the rendering did not completed * after about one second, the mouse cursor shaped will be set to the wait cursor. We do not * do this change immediately because the mouse cursor changes become disturbing if applied * continuously for a series of fast renderings. */ private final class CursorChange extends DelayedRunnable { /** * Value of {@link #renderingStartTime} when this delayed task has been created. */ private final long startTime; /** * Creates a new action to execute if rendering takes longer than * {@link #WAIT_CURSOR_DELAY} nanoseconds. */ CursorChange() { super(renderingStartTime + WAIT_CURSOR_DELAY); startTime = renderingStartTime; } /** * Invoked in a daemon thread after the delay elapsed. * The mouse cursor change must be done in JavaFX thread. */ @Override public void run() { Platform.runLater(() -> setWaitCursor(startTime)); } } /** * Invoked in JavaFX thread {@link #WAIT_CURSOR_DELAY} nanoseconds after a rendering started. * If the same rendering is still under progress, the mouse cursor is set to {@link Cursor#WAIT}. * If a different rendering is in progress, do not set the cursor because the GUI is fast enough * but schedule a new {@link CursorChange} in case the next rendering is slow. */ private void setWaitCursor(final long startTime) { isMouseChangeScheduled = false; if (renderingInProgress != null) { if (startTime == renderingStartTime) { floatingPane.setCursor(Cursor.WAIT); } DelayedExecutor.schedule(new CursorChange()); isMouseChangeScheduled = true; } } /** * Returns a property telling whether a rendering is in progress. This property become {@code true} * when this {@code MapCanvas} is about to start a background thread for performing a rendering, and * is reset to {@code false} after this {@code MapCanvas} has been updated with new rendering result. * * @return a property telling whether a rendering is in progress. */ public final ReadOnlyBooleanProperty renderingProperty() { return isRendering.getReadOnlyProperty(); } /** * Returns a property giving the exception or error that occurred during last rendering operation. * The property value is reset to {@code null} when a rendering operation completed successfully. * * @return a property giving the exception or error that occurred during last rendering operation. */ public final ReadOnlyObjectProperty<Throwable> errorProperty() { return error.getReadOnlyProperty(); } /** * Sets the error property to the given value. This method is provided for subclasses that perform * processing outside the {@link Renderer}. It does not need to be invoked if the error occurred * during the rendering process. * * @param ex the exception that occurred (can not be null). */ protected void errorOccurred(final Throwable ex) { error.set(Objects.requireNonNull(ex)); } /** * Invoked when an unexpected exception occurred but it is okay to continue despite it. */ private static void unexpectedException(final String method, final NonInvertibleTransformException e) { Logging.unexpectedException(Logging.getLogger(Modules.APPLICATION), MapCanvas.class, method, e); } /** * Removes map content and clears all properties of this canvas. * * @see #reset() */ protected void clear() { transform.setToIdentity(); changeInProgress.setToIdentity(); invalidObjectiveToDisplay = true; objectiveBounds = null; error.set(null); isRendering.set(false); requestRepaint(); } }
application/sis-javafx/src/main/java/org/apache/sis/gui/map/MapCanvas.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.gui.map; import java.util.Locale; import java.util.Arrays; import java.util.Objects; import java.awt.geom.AffineTransform; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import javafx.application.Platform; import javafx.geometry.Bounds; import javafx.geometry.Point2D; import javafx.scene.layout.Pane; import javafx.scene.layout.StackPane; import javafx.scene.input.KeyEvent; import javafx.scene.input.MouseEvent; import javafx.scene.input.ScrollEvent; import javafx.scene.input.GestureEvent; import javafx.scene.Cursor; import javafx.event.EventType; import javafx.beans.property.ObjectProperty; import javafx.beans.property.ReadOnlyBooleanProperty; import javafx.beans.property.ReadOnlyBooleanWrapper; import javafx.beans.property.ReadOnlyObjectProperty; import javafx.beans.property.ReadOnlyObjectWrapper; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.beans.value.WritableValue; import javafx.concurrent.Task; import javafx.event.EventHandler; import javafx.scene.control.ContextMenu; import javafx.scene.control.ToggleGroup; import javafx.scene.transform.Affine; import javafx.scene.transform.NonInvertibleTransformException; import org.opengis.geometry.Envelope; import org.opengis.geometry.DirectPosition; import org.opengis.referencing.ReferenceSystem; import org.opengis.referencing.cs.AxisDirection; import org.opengis.referencing.datum.PixelInCell; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.TransformException; import org.apache.sis.referencing.operation.matrix.Matrices; import org.apache.sis.referencing.operation.matrix.MatrixSIS; import org.apache.sis.referencing.operation.transform.MathTransforms; import org.apache.sis.referencing.operation.transform.LinearTransform; import org.apache.sis.referencing.cs.CoordinateSystems; import org.apache.sis.geometry.DirectPosition2D; import org.apache.sis.geometry.Envelope2D; import org.apache.sis.geometry.AbstractEnvelope; import org.apache.sis.geometry.ImmutableEnvelope; import org.apache.sis.coverage.grid.GridGeometry; import org.apache.sis.coverage.grid.GridExtent; import org.apache.sis.gui.referencing.PositionableProjection; import org.apache.sis.gui.referencing.RecentReferenceSystems; import org.apache.sis.util.ArraysExt; import org.apache.sis.util.ArgumentChecks; import org.apache.sis.util.logging.Logging; import org.apache.sis.internal.util.Numerics; import org.apache.sis.internal.system.Modules; import org.apache.sis.internal.system.DelayedExecutor; import org.apache.sis.internal.system.DelayedRunnable; import org.apache.sis.internal.gui.BackgroundThreads; import org.apache.sis.internal.gui.ExceptionReporter; import org.apache.sis.internal.gui.GUIUtilities; import org.apache.sis.internal.gui.Resources; import org.apache.sis.internal.referencing.AxisDirections; import org.apache.sis.portrayal.PlanarCanvas; import org.apache.sis.portrayal.RenderException; import org.apache.sis.referencing.IdentifiedObjects; import static org.apache.sis.internal.util.StandardDateFormat.NANOS_PER_MILLISECOND; /** * A canvas for maps to be rendered on screen in a JavaFX application. * The map may be an arbitrary JavaFX node, typically an {@link javafx.scene.image.ImageView} * or {@link javafx.scene.canvas.Canvas}, which must be supplied by subclasses. * This base class provides handlers for keyboard, mouse, track pad or touch screen events * such as pans, zooms and rotations. The keyboard actions are: * * <table class="sis"> * <caption>Keyboard actions</caption> * <tr><th>Key</th> <th>Action</th></tr> * <tr><td>⇨</td> <td>Move view to the right</td></tr> * <tr><td>⇦</td> <td>Move view to the left</td></tr> * <tr><td>⇧</td> <td>Move view to the top</td></tr> * <tr><td>⇩</td> <td>Move view to the bottom</td></tr> * <tr><td>⎇ + ⇨</td> <td>Rotate clockwise</td></tr> * <tr><td>⎇ + ⇦</td> <td>Rotate anticlockwise</td></tr> * <tr><td>Page down</td> <td>Zoom in</td></tr> * <tr><td>Page up</td> <td>Zoom out</td></tr> * <tr><td>Home</td> <td>{@linkplain #reset() Reset}</td></tr> * <tr><td>Ctrl + above</td> <td>Above actions as a smaller translation, zoom or rotation</td></tr> * </table> * * <h2>Subclassing</h2> * Implementations need to add at least one JavaFX node in the {@link #floatingPane} list of children. * Map rendering involves the following steps: * * <ol> * <li>{@link #createRenderer()} is invoked in the JavaFX thread. That method shall take a snapshot * of every information needed for performing the rendering in background.</li> * <li>{@link Renderer#render()} is invoked in a background thread. That method creates or updates * the nodes to show in this {@code MapCanvas} but without interacting with the canvas yet.</li> * <li>{@link Renderer#commit(MapCanvas)} is invoked in the JavaFX thread. The nodes prepared by * {@code render()} can be transferred to {@link #floatingPane} in that method.</li> * </ol> * * @author Martin Desruisseaux (Geomatys) * @version 1.1 * @since 1.1 * @module */ public abstract class MapCanvas extends PlanarCanvas { /** * Size in pixels of a scroll or translation event. This value should be close to the * {@linkplain ScrollEvent#getDeltaY() delta of a scroll event done with mouse wheel}. */ private static final double SCROLL_EVENT_SIZE = 40; /** * The zoom factor to apply on scroll event. A value of 0.1 means that a zoom of 10% * is applied. */ private static final double ZOOM_FACTOR = 0.1; /** * Division factor to apply on translations and zooms when the control key is down. */ private static final double CONTROL_KEY_FACTOR = 10; /** * Number of milliseconds to wait before to repaint after gesture events (zooms, rotations, pans). * This delay allows to collect more events before to run a potentially costly {@link #repaint()}. * It does not apply to the immediate feedback that the user gets from JavaFX affine transforms * (an image with lower quality used until the higher quality image become ready). * * @see #requestRepaint() * @see Delayed */ private static final long REPAINT_DELAY = 100; /** * Number of nanoseconds to wait before to set mouse cursor shape to {@link Cursor#WAIT} during rendering. * If the rendering complete in a shorter time, the mouse cursor will be unchanged. * * @see #renderingStartTime */ private static final long WAIT_CURSOR_DELAY = (1000 - REPAINT_DELAY) * NANOS_PER_MILLISECOND; /** * The pane showing the map and any other JavaFX nodes to scale and translate together with the map. * This pane is initially empty; subclasses should add nodes (canvas, images, shapes, texts, <i>etc.</i>) * into the {@link Pane#getChildren()} list. * All children must specify their coordinates in units relative to the pane (absolute layout). * Those coordinates can be computed from real world coordinates by {@link #objectiveToDisplay}. * * <p>This pane contains an {@link Affine} transform which is updated by user gestures such as pans, * zooms or rotations. Visual positions of all children move together in response to user's gesture, * thus giving an appearance of pane floating around. Changes in {@code floatingPane} affine transform * are temporary; they are applied for producing immediate visual feedback while the map is recomputed * in a background thread. Once calculation is completed and the content of this pane has been updated, * the {@code floatingPane} {@link Affine} transform is reset to identity.</p> */ protected final Pane floatingPane; /** * The pane showing the map and other JavaFX nodes to keep at fixed position regardless pans, zooms or rotations * applied on the map. This pane contains at least the {@linkplain #floatingPane} (which itself contains the map), * but more children (shapes, texts, controls, <i>etc.</i>) can be added by subclasses into the * {@link StackPane#getChildren()} list. */ protected final StackPane fixedPane; /** * The data bounds to use for computing the initial value of {@link #objectiveToDisplay}. * We differ this recomputation until all parameters are known. * * @see #setObjectiveBounds(Envelope) * @see #invalidObjectiveToDisplay */ private Envelope objectiveBounds; /** * Incremented when the map needs to be rendered again. * * @see #renderedContentStamp * @see #contentsChanged() */ private int contentChangeCount; /** * Value of {@link #contentChangeCount} last time the data have been rendered. This is used for deciding * if a call to {@link #repaint()} should be done with the next layout operation. We need this check for * avoiding never-ending repaint events caused by calls to {@code ImageView.setImage(Image)} causing * themselves new layout events. It is okay if this value overflows. */ private int renderedContentStamp; /** * Value of {@link System#nanoTime()} when the last rendering started. This is used together with * {@link #WAIT_CURSOR_DELAY} for deciding if mouse cursor should be {@link Cursor#WAIT}. */ private long renderingStartTime; /** * Non-null if a rendering task is in progress. Used for avoiding to send too many {@link #repaint()} * requests; we will wait for current repaint event to finish before to send another painting request. */ private Task<?> renderingInProgress; /** * Whether the size of this canvas changed. */ private boolean sizeChanged; /** * Whether {@link #objectiveToDisplay} needs to be recomputed. * We differ this recomputation until all parameters are known. * * @see #objectiveBounds */ private boolean invalidObjectiveToDisplay; /** * The zooms, pans and rotations applied on {@link #floatingPane} since last time the map has been painted. * This is the identity transform except during the short time between a gesture (zoom, pan, <i>etc.</i>) * and the completion of latest {@link #repaint()} event. This is used for giving immediate feedback to user * while waiting for the new rendering to be ready. Since this transform is a member of {@link #floatingPane} * {@linkplain Pane#getTransforms() transform list}, changes in this transform are immediately visible to user. */ private final Affine transform; /** * The {@link #transform} values at the time the {@link #repaint()} method has been invoked. * This is a change applied on {@link #objectiveToDisplay} but not yet visible in the map. * After the map has been updated, this transform is reset to identity. */ private final Affine changeInProgress; /** * The value to assign to {@link #transform} after the {@link #floatingPane} has been updated * with transformed content. */ private final Affine transformOnNewImage; /** * Cursor position at the time pan event started. * This is used for computing the {@linkplain #floatingPane} translation to apply during drag events. * * @see #onDrag(MouseEvent) */ private double xPanStart, yPanStart; /** * {@code true} if a drag even is in progress. * * @see #onDrag(MouseEvent) */ private boolean isDragging; /** * Whether a {@link CursorChange} is already scheduled, in which case there is no need to schedule more. */ private boolean isMouseChangeScheduled; /** * Whether a rendering is in progress. This property is set to {@code true} when {@code MapCanvas} * is about to start a background thread for performing a rendering, and is reset to {@code false} * after the {@code MapCanvas} has been updated with new rendering result. * * @see #renderingProperty() */ private final ReadOnlyBooleanWrapper isRendering; /** * The exception or error that occurred during last rendering operation. * This is reset to {@code null} when a rendering operation completes successfully. * * @see #errorProperty() */ private final ReadOnlyObjectWrapper<Throwable> error; /** * If a contextual menu is currently visible, that menu. Otherwise {@code null}. */ private ContextMenu menuShown; /** * Creates a new canvas for JavaFX application. * * @param locale the locale to use for labels and some messages, or {@code null} for default. */ public MapCanvas(final Locale locale) { super(locale); transform = new Affine(); changeInProgress = new Affine(); transformOnNewImage = new Affine(); final Pane view = new Pane() { @Override protected void layoutChildren() { super.layoutChildren(); if (contentsChanged()) { repaint(); } } }; view.getTransforms().add(transform); view.setOnZoom ((e) -> applyZoomOrRotate(e, e.getZoomFactor(), 0)); view.setOnRotate((e) -> applyZoomOrRotate(e, 1, e.getAngle())); view.setOnScroll(this::onScroll); view.setOnMousePressed(this::onDrag); view.setOnMouseDragged(this::onDrag); view.setOnMouseReleased(this::onDrag); view.setFocusTraversable(true); view.addEventHandler(KeyEvent.KEY_PRESSED, this::onKeyTyped); /* * Do not set a preferred size, otherwise `repaint()` is invoked twice: once with the preferred size * and once with the actual size of the parent window. Actually the `repaint()` method appears to be * invoked twice anyway, but without preferred size the width appears to be 0, in which case nothing * is repainted. */ view.layoutBoundsProperty().addListener((p) -> onSizeChanged()); view.setCursor(Cursor.CROSSHAIR); floatingPane = view; fixedPane = new StackPane(view); GUIUtilities.setClipToBounds(fixedPane); isRendering = new ReadOnlyBooleanWrapper(this, "isRendering"); error = new ReadOnlyObjectWrapper<>(this, "exception"); } /** * Invoked when the size of the {@linkplain #floatingPane} has changed. * This method requests a new repaint after a short wait, in order to collect more resize events. */ private void onSizeChanged() { sizeChanged = true; requestRepaint(); } /** * Invoked when the user presses the button, drags the map and releases the button. * This is interpreted as a translation applied in pixel units on the map. */ private void onDrag(final MouseEvent event) { final double x = event.getX(); final double y = event.getY(); final EventType<? extends MouseEvent> type = event.getEventType(); if (type == MouseEvent.MOUSE_PRESSED) { switch (event.getButton()) { case PRIMARY: { hideContextMenu(); floatingPane.setCursor(Cursor.CLOSED_HAND); floatingPane.requestFocus(); isDragging = true; xPanStart = x; yPanStart = y; event.consume(); break; } // Future version may add cases for FORWARD and BACK buttons. } } else if (isDragging) { if (type != MouseEvent.MOUSE_DRAGGED) { if (floatingPane.getCursor() == Cursor.CLOSED_HAND) { floatingPane.setCursor(Cursor.CROSSHAIR); } isDragging = false; } applyTranslation(x - xPanStart, y - yPanStart, type == MouseEvent.MOUSE_RELEASED); event.consume(); } } /** * Restores the cursor to its normal state after rendering completion. * The purpose of this method is to hide the {@link Cursor#WAIT} shape. */ private void restoreCursorAfterPaint() { floatingPane.setCursor(isDragging ? Cursor.CLOSED_HAND : Cursor.CROSSHAIR); } /** * Translates the map in response to user event (keyboard, mouse, track pad, touch screen). * * @param tx horizontal translation in pixel units. * @param ty vertical translation in pixel units. * @param isFinal {@code false} if more translations are expected soon, or * {@code true} if this is the last translation for now. * * @see #applyZoomOrRotate(GestureEvent, double, double) */ private void applyTranslation(final double tx, final double ty, final boolean isFinal) { if (tx != 0 || ty != 0) { transform.appendTranslation(tx, ty); final Point2D p = changeInProgress.deltaTransform(tx, ty); transformOnNewImage.appendTranslation(p.getX(), p.getY()); if (!isFinal) { requestRepaint(); } } if (isFinal && !transform.isIdentity()) { repaint(); } } /** * Invoked when the user rotates the mouse wheel. * This method performs a zoom-in or zoom-out event. */ private void onScroll(final ScrollEvent event) { if (event.getTouchCount() != 0) { // Do not interpret scroll events on touch pad as a zoom. return; } final double delta = event.getDeltaY(); double zoom = Math.abs(delta) / SCROLL_EVENT_SIZE * ZOOM_FACTOR; if (event.isControlDown()) { zoom /= CONTROL_KEY_FACTOR; } zoom++; if (delta < 0) { zoom = 1/zoom; } applyZoomOrRotate(event, zoom, 0); } /** * Zooms or rotates the map in response to user event (keyboard, mouse, track pad, touch screen). * If the given event is non-null, it will be consumed. * * @param event the mouse, track pad or touch screen event, or {@code null} if the event was a keyboard event. * @param zoom the zoom factor to apply, or 1 if none. * @param angle the rotation angle in degrees, or 0 if nine. * * @see #applyTranslation(double, double, boolean) */ private void applyZoomOrRotate(final GestureEvent event, final double zoom, final double angle) { if (zoom != 1 || angle != 0) { double x, y; if (event != null) { x = event.getX(); y = event.getY(); } else { final Bounds bounds = floatingPane.getLayoutBounds(); x = bounds.getCenterX(); y = bounds.getCenterY(); try { final Point2D p = transform.inverseTransform(x, y); x = p.getX(); y = p.getY(); } catch (NonInvertibleTransformException e) { /* * `event` is null only when this method is invoked from `onKeyTyped(…)`. * Keep old coordinates. The map may appear shifted, but its location will * be fixed when `repaint()` completes its work. */ unexpectedException("onKeyTyped", e); } } final Point2D p = changeInProgress.transform(x, y); if (zoom != 1) { transform.appendScale(zoom, zoom, x, y); transformOnNewImage.appendScale(zoom, zoom, p.getX(), p.getY()); } if (angle != 0) { transform.appendRotation(angle, x, y); transformOnNewImage.appendRotation(angle, p.getX(), p.getY()); } requestRepaint(); } if (event != null) { event.consume(); } } /** * Invoked when the user presses a key. This handler provides navigation in the direction of arrow keys, * or zoom-in / zoom-out with page-down / page-up keys. If the control key is down, navigation is finer. */ private void onKeyTyped(final KeyEvent event) { double tx = 0, ty = 0, zoom = 1, angle = 0; if (event.isAltDown()) { switch (event.getCode()) { case RIGHT: case KP_RIGHT: angle = +7.5; break; case LEFT: case KP_LEFT: angle = -7.5; break; default: return; } } else { switch (event.getCode()) { case RIGHT: case KP_RIGHT: tx = -SCROLL_EVENT_SIZE; break; case LEFT: case KP_LEFT: tx = +SCROLL_EVENT_SIZE; break; case DOWN: case KP_DOWN: ty = -SCROLL_EVENT_SIZE; break; case UP: case KP_UP: ty = +SCROLL_EVENT_SIZE; break; case PAGE_UP: zoom = 1/(1 + ZOOM_FACTOR); break; case PAGE_DOWN: zoom = (1 + ZOOM_FACTOR); break; case HOME: reset(); break; default: return; } } if (event.isControlDown()) { tx /= CONTROL_KEY_FACTOR; ty /= CONTROL_KEY_FACTOR; angle /= CONTROL_KEY_FACTOR; zoom = (zoom - 1) / CONTROL_KEY_FACTOR + 1; } try { final Point2D p = transform.inverseDeltaTransform(tx, ty); tx = p.getX(); ty = p.getY(); } catch (NonInvertibleTransformException e) { /* * Should never happen. If happen anyway, keep old coordinates. The map may appear * shifted, but its location will be fixed when `repaint()` completes its work. */ unexpectedException("onKeyTyped", e); } applyZoomOrRotate(null, zoom, angle); applyTranslation(tx, ty, false); event.consume(); } /** * Resets the map view to its default zoom level and default position with no rotation. * Contrarily to {@link #clear()}, this method does not remove the map content. */ public void reset() { invalidObjectiveToDisplay = true; requestRepaint(); } /** * If a context menu is currently shown, hide that menu. Otherwise does nothing. */ private void hideContextMenu() { if (menuShown != null) { menuShown.hide(); menuShown = null; } } /** * Shows or hides the contextual menu when the right mouse button is clicked. This handler can determine * the geographic location where the click occurred. This information is used for changing the projection * while preserving approximately the location, scale and rotation of pixels around the mouse cursor. */ @SuppressWarnings({"serial","CloneableImplementsClone"}) // Not intended to be serialized. final class MenuHandler extends DirectPosition2D implements EventHandler<MouseEvent>, ChangeListener<ReferenceSystem>, PropertyChangeListener { /** * The contextual menu to show or hide when mouse button is clicked on the canvas. */ private final ContextMenu menu; /** * The property to update if a change of CRS occurs in the enclosing canvas. This property is provided * by {@link RecentReferenceSystems}, which listen to changes. Setting this property to a new value * causes the "Referencing systems" radio menus to change the item where the check mark appear. * * <p>This field is initialized by {@link MapMenu#addReferenceSystems(RecentReferenceSystems)} * and should be considered final after initialization.</p> */ ObjectProperty<ReferenceSystem> selectedCrsProperty; /** * The group of {@link PositionableProjection} items for projections created on-the-fly at mouse position. * Those items are not managed by {@link RecentReferenceSystems} so they need to be handled there. * * <p>This field is initialized by {@link MapMenu#addReferenceSystems(RecentReferenceSystems)} * and should be considered final after initialization.</p> */ ToggleGroup positionables; /** * {@code true} if we are in the process of setting a CRS generated by {@link PositionableProjection}. */ private boolean isPositionableProjection; /** * Creates and registers a new handler for showing a contextual menu in the enclosing canvas. * It is caller responsibility to ensure that this method is invoked only once. */ @SuppressWarnings("ThisEscapedInObjectConstruction") MenuHandler(final ContextMenu menu) { super(getDisplayCRS()); this.menu = menu; fixedPane.setOnMousePressed (this); fixedPane.setOnMouseReleased(this); // As recommended by MouseEvent.isPopupTrigger(). } /** * Invoked when the user clicks on the canvas. * Shows the menu on right mouse click, hide otherwise. */ @Override public void handle(final MouseEvent event) { if (event.isPopupTrigger()) { hideContextMenu(); x = event.getX(); y = event.getY(); menu.show((Pane) event.getSource(), event.getScreenX(), event.getScreenY()); menuShown = menu; event.consume(); } } /** * Invoked when user selected a new coordinate reference system among the choices of predefined CRS. * Those CRS are the ones managed by {@link RecentReferenceSystems}, not the ones created on-the-fly. */ @Override public void changed(final ObservableValue<? extends ReferenceSystem> property, final ReferenceSystem oldValue, final ReferenceSystem newValue) { if (newValue instanceof CoordinateReferenceSystem) { setObjectiveCRS((CoordinateReferenceSystem) newValue, this, property); } } /** * Invoked when user selected a projection centered on mouse position. Those CRS are generated on-the-fly * and are generally not on the list of CRS managed by {@link RecentReferenceSystems}. */ final void createProjectedCRS(final PositionableProjection projection) { try { DirectPosition2D center = new DirectPosition2D(); center = (DirectPosition2D) objectiveToDisplay.inverseTransform(this, center); center.setCoordinateReferenceSystem(getObjectiveCRS()); CoordinateReferenceSystem crs = projection.createProjectedCRS(center); try { isPositionableProjection = true; setObjectiveCRS(crs, this, null); } finally { isPositionableProjection = false; } } catch (Exception e) { errorOccurred(e); final Resources i18n = Resources.forLocale(getLocale()); ExceptionReporter.show(fixedPane, null, i18n.getString(Resources.Keys.CanNotUseRefSys_1, projection), e); } } /** * Invoked when a canvas property changed, typically after new data are shown. * The property of interest is {@value MapCanvas#OBJECTIVE_CRS_PROPERTY}. * This method updates the CRS selected in the contextual menu. */ @Override public void propertyChange(final PropertyChangeEvent event) { if (OBJECTIVE_CRS_PROPERTY.equals(event.getPropertyName())) { final Object value = event.getNewValue(); if (value instanceof CoordinateReferenceSystem) { selectedCrsProperty.set((CoordinateReferenceSystem) value); } if (!isPositionableProjection) { positionables.selectToggle(null); } } } } /** * Invoked when the user changed the CRS from a JavaFX control. If the CRS can not be set to the specified * value, then an error message is shown in the status bar and the property is reset to its previous value. * * @param crs the new Coordinate Reference System in which to transform all data before displaying. * @param anchor the point to keep at fixed display coordinates, or {@code null} for default value. * @param property the property to reset if the operation fails. */ private void setObjectiveCRS(final CoordinateReferenceSystem crs, DirectPosition anchor, final ObservableValue<? extends ReferenceSystem> property) { final CoordinateReferenceSystem previous = getObjectiveCRS(); if (crs != previous) try { /* * If no anchor is specified, the first default is the center of the region currently visible * in the canvas. If that center can not be determined neither, null anchor defaults to the * point of interest (POI) managed by the Canvas parent class. */ if (anchor == null) { final Envelope2D bounds = getDisplayBounds(); if (bounds != null) { anchor = AbstractEnvelope.castOrCopy(bounds).getMedian(); } } setObjectiveCRS(crs, anchor); requestRepaint(); } catch (Exception e) { if (property instanceof WritableValue<?>) { ((WritableValue<ReferenceSystem>) property).setValue(previous); } errorOccurred(e); final Locale locale = getLocale(); final Resources i18n = Resources.forLocale(locale); ExceptionReporter.show(fixedPane, null, i18n.getString(Resources.Keys.CanNotUseRefSys_1, IdentifiedObjects.getDisplayName(crs, locale)), e); } } /** * Sets the data bounds to use for computing the initial value of {@link #objectiveToDisplay}. * Invoking this method also sets the {@link #getObjectiveCRS() objective CRS} of this canvas * to the CRS of given envelope. * * <p>This method should be invoked only when new data have been loaded, or when the caller wants * to discard any zoom or translation and reset the view to the given bounds. This method does not * cause new repaint event; {@link #requestRepaint()} must be invoked by the caller if desired.</p> * * @param visibleArea bounding box in (new) objective CRS of the initial area to show, * or {@code null} if unknown (in which case an identity transform will be set). * * @see #setObjectiveCRS(CoordinateReferenceSystem, DirectPosition) */ protected void setObjectiveBounds(final Envelope visibleArea) { ArgumentChecks.ensureDimensionMatches("bounds", BIDIMENSIONAL, visibleArea); objectiveBounds = ImmutableEnvelope.castOrCopy(visibleArea); invalidObjectiveToDisplay = true; } /** * Given axis directions in the objective CRS, returns axis directions in display CRS. * This method will typically reverse the North direction to a South direction because * <var>y</var> axis is oriented toward down. It may also swap axis order. * * <p>The rules implemented in this method are empirical and may be augmented in any future version. * This method may become {@code protected} in a future version if we want to allow user to override * with her own rules.</p> * * @param srcAxes axis directions in objective CRS. * @return axis directions in display CRS. */ private static AxisDirection[] toDisplayDirections(final AxisDirection[] srcAxes) { final AxisDirection[] dstAxes = Arrays.copyOf(srcAxes, 2); if (AxisDirections.absolute(dstAxes[0]) == AxisDirection.NORTH && AxisDirections.absolute(dstAxes[1]) == AxisDirection.EAST) { ArraysExt.swap(dstAxes, 0, 1); } if (AxisDirections.absolute(dstAxes[0]) == AxisDirection.WEST) dstAxes[0] = AxisDirection.EAST; if (AxisDirections.absolute(dstAxes[1]) == AxisDirection.NORTH) dstAxes[1] = AxisDirection.SOUTH; return dstAxes; } /** * Invoked in JavaFX thread for creating a renderer to be executed in a background thread. * Subclasses shall copy in this method all {@code MapCanvas} properties that the background thread * will need for performing the rendering process. * * @return rendering process to be executed in background thread, * or {@code null} if there is nothing to paint. */ protected abstract Renderer createRenderer(); /** * A snapshot of {@link MapCanvas} state to render as a map, together with rendering code. * This class is instantiated and used as below: * * <ol> * <li>{@link MapCanvas} invokes {@link MapCanvas#createRenderer()} in the JavaFX thread. * That method shall take a snapshot of every information needed for performing the rendering * in a background thread.</li> * <li>{@link MapCanvas} invokes {@link #render()} in a background thread. That method creates or * updates the nodes to show in the canvas but without reading or writing any canvas property; * that method should use only the snapshot taken in step 1.</li> * <li>{@link MapCanvas} invokes {@link #commit(MapCanvas)} in the JavaFX thread. The nodes prepared * at step 2 can be transferred to {@link MapCanvas#floatingPane} in that method.</li> * </ol> * * @author Martin Desruisseaux (Geomatys) * @version 1.1 * @since 1.1 * @module */ protected abstract static class Renderer { /** * The canvas size. */ private int width, height; /** * Creates a new renderer. The {@linkplain #getWidth() width} and {@linkplain #getHeight() height} * are initially zero; they will get a non-zero values before {@link #render()} is invoked. */ protected Renderer() { } /** * Sets the width and height to the size of the given view, * then returns {@code true} if the view is non-empty. * * <p>This method is invoked after {@link #createRenderer()} * and before {@link #createWorker(Renderer)}.</p> */ private boolean initialize(final Pane view) { width = Numerics.clamp(Math.round(view.getWidth())); height = Numerics.clamp(Math.round(view.getHeight())); return width > 0 && height > 0; } /** * Returns the width (number of columns) of the view, in pixels. * * @return number of pixels to render horizontally. */ public int getWidth() { return width; } /** * Returns the height (number of rows) of the view, in pixels. * * @return number of pixels to render vertically. */ public int getHeight() { return height; } /** * Invoked in a background thread for rendering the map. This method should not access any * {@link MapCanvas} property; if some canvas properties are needed, they should have been * copied at construction time. * * @throws Exception if an error occurred while preparing data or rendering them. */ protected abstract void render() throws Exception; /** * Invoked in JavaFX thread after {@link #render()} completion. This method can update the * {@link #floatingPane} children with the nodes (images, shaped, <i>etc.</i>) created by * {@link #render()}. * * @param canvas the canvas where drawing has been done. * @return {@code true} on success, or {@code false} if the rendering should be redone * (for example because a change has been detected in the data). */ protected abstract boolean commit(MapCanvas canvas); } /** * Returns {@code true} if content changed since the last {@link #repaint()} execution. * This is used for checking if a new call to {@link #repaint()} is necessary. */ final boolean contentsChanged() { return contentChangeCount != renderedContentStamp; } /** * Requests the map to be rendered again, possibly with new data. Invoking this * method does not necessarily causes the repaint process to start immediately. * The request will be queued and executed at an arbitrary (short) time later. */ public final void requestRepaint() { contentChangeCount++; if (renderingInProgress == null) { final Delayed delay = new Delayed(); BackgroundThreads.execute(delay); renderingInProgress = delay; // Set last after we know that the task has been scheduled. } } /** * Invoked when the map content needs to be rendered again. * It may be because the map has new content, or because the viewed region moved or has been zoomed. * * @see #requestRepaint() */ final void repaint() { assert Platform.isFxApplicationThread(); /* * If a rendering is already in progress, do not send a new request now. * Wait for current rendering to finish; a new one will be automatically * requested if content changes are detected after the rendering. */ if (renderingInProgress != null) { if (renderingInProgress instanceof Delayed) { renderingInProgress.cancel(true); renderingInProgress = null; } else { contentChangeCount++; return; } } renderingStartTime = System.nanoTime(); renderedContentStamp = contentChangeCount; /* * If a new canvas size is known, inform the parent `PlanarCanvas` about that. * It may cause a recomputation of the "objective to display" transform. */ try { if (sizeChanged) { sizeChanged = false; final Pane view = floatingPane; Envelope2D bounds = new Envelope2D(null, view.getLayoutX(), view.getLayoutY(), view.getWidth(), view.getHeight()); if (bounds.isEmpty()) return; setDisplayBounds(bounds); } /* * Compute the `objectiveToDisplay` only before the first rendering, because the display * bounds may not be known before (it may be zero at the time `MapCanvas` is initialized). * This code is executed only once for a new map. */ if (invalidObjectiveToDisplay) { invalidObjectiveToDisplay = false; final Envelope2D target = getDisplayBounds(); final GridExtent extent = new GridExtent(null, new long[] {Math.round(target.getMinX()), Math.round(target.getMinY())}, new long[] {Math.round(target.getMaxX()), Math.round(target.getMaxY())}, false); /* * If `setObjectiveBounds(…)` has been invoked (as it should be), initialize the affine * transform to values which will allow this canvas to contain fully the objective bounds. * Otherwise the transform is initialized to an identity transform (should not happen often). * If a CRS is present, it is used for deciding if we need to swap or flip axes. */ CoordinateReferenceSystem objectiveCRS; final LinearTransform crsToDisplay; if (objectiveBounds != null) { objectiveCRS = objectiveBounds.getCoordinateReferenceSystem(); final MatrixSIS m; if (objectiveCRS != null) { AxisDirection[] srcAxes = CoordinateSystems.getAxisDirections(objectiveCRS.getCoordinateSystem()); m = Matrices.createTransform(objectiveBounds, srcAxes, target, toDisplayDirections(srcAxes)); } else { m = Matrices.createTransform(objectiveBounds, target); } Matrices.forceUniformScale(m, 0, new double[] {target.getCenterX(), target.getCenterY()}); crsToDisplay = MathTransforms.linear(m); if (objectiveCRS == null) { objectiveCRS = extent.toEnvelope(crsToDisplay.inverse()).getCoordinateReferenceSystem(); // CRS computed above should not be null. } } else { objectiveCRS = getDisplayCRS(); crsToDisplay = MathTransforms.identity(BIDIMENSIONAL); } setGridGeometry(new GridGeometry(extent, PixelInCell.CELL_CORNER, crsToDisplay.inverse(), objectiveCRS)); transform.setToIdentity(); } } catch (TransformException | RenderException ex) { restoreCursorAfterPaint(); errorOccurred(ex); return; } /* * If a temporary zoom, rotation or translation has been applied using JavaFX transform API, * replace that temporary transform by a "permanent" adjustment of the `objectiveToDisplay` * transform. It allows SIS to get new data for the new visible area and resolution. */ changeInProgress.setToTransform(transform); transformOnNewImage.setToIdentity(); isRendering.set(true); if (!transform.isIdentity()) { transformDisplayCoordinates(new AffineTransform( transform.getMxx(), transform.getMyx(), transform.getMxy(), transform.getMyy(), transform.getTx(), transform.getTy())); } /* * Invoke `createWorker(…)` only after we finished above configuration, because that method * may take a snapshot of current canvas state in preparation for use in background threads. */ final Renderer context = createRenderer(); if (context != null && context.initialize(floatingPane)) { final Task<?> worker = createWorker(context); assert renderingInProgress == null; BackgroundThreads.execute(worker); renderingInProgress = worker; // Set after we know that the task has been scheduled. if (!isMouseChangeScheduled) { DelayedExecutor.schedule(new CursorChange()); isMouseChangeScheduled = true; } } else { error.set(null); isRendering.set(false); restoreCursorAfterPaint(); } } /** * Creates the background task which will invoke {@link Renderer#render()} in a background thread. * The tasks must invoke {@link #renderingCompleted(Task)} in JavaFX thread after completion, * either successful or not. */ Task<?> createWorker(final Renderer renderer) { return new Task<Void>() { /** Invoked in background thread. */ @Override protected Void call() throws Exception { renderer.render(); return null; } /** Invoked in JavaFX thread on success. */ @Override protected void succeeded() { final boolean done = renderer.commit(MapCanvas.this); renderingCompleted(this); if (!done || contentsChanged()) { repaint(); } } /** Invoked in JavaFX thread on failure. */ @Override protected void failed() {renderingCompleted(this);} @Override protected void cancelled() {renderingCompleted(this);} }; } /** * Invoked after the background thread created by {@link #repaint()} finished to update map content. * The {@link #changeInProgress} is the JavaFX transform at the time the repaint event was trigged and * which is now integrated in the map. That transform will be removed from {@link #floatingPane} transforms. * It may be identity if no zoom, rotation or pan gesture has been applied since last rendering. */ final void renderingCompleted(final Task<?> task) { assert Platform.isFxApplicationThread(); // Keep cursor unchanged if contents changed because caller will invoke `repaint()`. if (!contentsChanged() || task.getState() != Task.State.SUCCEEDED) { restoreCursorAfterPaint(); } renderingInProgress = null; final Point2D p = changeInProgress.transform(xPanStart, yPanStart); xPanStart = p.getX(); yPanStart = p.getY(); changeInProgress.setToIdentity(); transform.setToTransform(transformOnNewImage); error.set(task.getException()); isRendering.set(false); } /** * A pseudo-rendering task which wait for some delay before to perform the real repaint. * The intent is to collect some more gesture events (pans, zooms, <i>etc.</i>) before consuming CPU time. * This is especially useful when the first gesture event is a tiny change because the user just started * panning or zooming. * * <div class="note"><b>Design note:</b> * using a thread for waiting seems a waste of resources, but a thread (likely this one) is going to be used * for real after the waiting time is elapsed. That thread usually exists anyway in {@link BackgroundThreads} * as an idle thread, and it is unlikely that other parts of this JavaFX application need that thread in same * time (if it happens, other threads will be created).</div> * * @see #requestRepaint() */ private final class Delayed extends Task<Void> { @Override protected Void call() { try { Thread.sleep(REPAINT_DELAY); } catch (InterruptedException e) { // Task.cancel(true) has been invoked: do nothing and terminate now. } return null; } @Override protected void succeeded() {paintAfterDelay();} @Override protected void failed() {paintAfterDelay();} // Do not override `cancelled()` because a repaint is already in progress. } /** * Invoked after {@link #REPAINT_DELAY} has been elapsed for performing the real repaint request. * * @see #requestRepaint() */ private void paintAfterDelay() { renderingInProgress = null; repaint(); } /** * The action to execute if rendering appear to be slow. If the rendering did not completed * after about one second, the mouse cursor shaped will be set to the wait cursor. We do not * do this change immediately because the mouse cursor changes become disturbing if applied * continuously for a series of fast renderings. */ private final class CursorChange extends DelayedRunnable { /** * Value of {@link #renderingStartTime} when this delayed task has been created. */ private final long startTime; /** * Creates a new action to execute if rendering takes longer than * {@link #WAIT_CURSOR_DELAY} nanoseconds. */ CursorChange() { super(renderingStartTime + WAIT_CURSOR_DELAY); startTime = renderingStartTime; } /** * Invoked in a daemon thread after the delay elapsed. * The mouse cursor change must be done in JavaFX thread. */ @Override public void run() { Platform.runLater(() -> setWaitCursor(startTime)); } } /** * Invoked in JavaFX thread {@link #WAIT_CURSOR_DELAY} nanoseconds after a rendering started. * If the same rendering is still under progress, the mouse cursor is set to {@link Cursor#WAIT}. * If a different rendering is in progress, do not set the cursor because the GUI is fast enough * but schedule a new {@link CursorChange} in case the next rendering is slow. */ private void setWaitCursor(final long startTime) { isMouseChangeScheduled = false; if (renderingInProgress != null) { if (startTime == renderingStartTime) { floatingPane.setCursor(Cursor.WAIT); } DelayedExecutor.schedule(new CursorChange()); isMouseChangeScheduled = true; } } /** * Returns a property telling whether a rendering is in progress. This property become {@code true} * when this {@code MapCanvas} is about to start a background thread for performing a rendering, and * is reset to {@code false} after this {@code MapCanvas} has been updated with new rendering result. * * @return a property telling whether a rendering is in progress. */ public final ReadOnlyBooleanProperty renderingProperty() { return isRendering.getReadOnlyProperty(); } /** * Returns a property giving the exception or error that occurred during last rendering operation. * The property value is reset to {@code null} when a rendering operation completed successfully. * * @return a property giving the exception or error that occurred during last rendering operation. */ public final ReadOnlyObjectProperty<Throwable> errorProperty() { return error.getReadOnlyProperty(); } /** * Sets the error property to the given value. This method is provided for subclasses that perform * processing outside the {@link Renderer}. It does not need to be invoked if the error occurred * during the rendering process. * * @param ex the exception that occurred (can not be null). */ protected void errorOccurred(final Throwable ex) { error.set(Objects.requireNonNull(ex)); } /** * Invoked when an unexpected exception occurred but it is okay to continue despite it. */ private static void unexpectedException(final String method, final NonInvertibleTransformException e) { Logging.unexpectedException(Logging.getLogger(Modules.APPLICATION), MapCanvas.class, method, e); } /** * Removes map content and clears all properties of this canvas. * * @see #reset() */ protected void clear() { transform.setToIdentity(); changeInProgress.setToIdentity(); invalidObjectiveToDisplay = true; objectiveBounds = null; error.set(null); isRendering.set(false); requestRepaint(); } }
Fix a `NullPointerException` that occurs when a new window is created.
application/sis-javafx/src/main/java/org/apache/sis/gui/map/MapCanvas.java
Fix a `NullPointerException` that occurs when a new window is created.
Java
apache-2.0
e2414653a3f034dc63bb90638546c8383aaede4a
0
andreaturli/legacy-brooklyn,aledsage/legacy-brooklyn,aledsage/legacy-brooklyn,neykov/incubator-brooklyn,andreaturli/legacy-brooklyn,andreaturli/legacy-brooklyn,neykov/incubator-brooklyn,bmwshop/brooklyn,bmwshop/brooklyn,bmwshop/brooklyn,andreaturli/legacy-brooklyn,neykov/incubator-brooklyn,neykov/incubator-brooklyn,aledsage/legacy-brooklyn,bmwshop/brooklyn,bmwshop/brooklyn,aledsage/legacy-brooklyn,andreaturli/legacy-brooklyn,neykov/incubator-brooklyn,bmwshop/brooklyn,aledsage/legacy-brooklyn,andreaturli/legacy-brooklyn,neykov/incubator-brooklyn,aledsage/legacy-brooklyn,bmwshop/brooklyn,andreaturli/legacy-brooklyn,aledsage/legacy-brooklyn
package brooklyn.entity.dns; import java.net.InetAddress; import brooklyn.entity.Entity; import brooklyn.entity.dns.geoscaling.LocationUtils; /** * Encapsulates geo-IP information for a given server. */ public class ServerGeoInfo { public final String address; public final String displayName; public final double latitude; public final double longitude; public static ServerGeoInfo fromEntity(Entity e) { String displayName = e.getDisplayName(); InetAddress address = LocationUtils.findIpAddress(e); Double latitude = LocationUtils.findLatitude(e); Double longitude = LocationUtils.findLongitude(e); return new ServerGeoInfo(address.toString(), displayName, (latitude == null ? 0.0 : latitude), (longitude == null ? 0.0 : longitude)); } public ServerGeoInfo(String address, String displayName, double latitude, double longitude) { this.address = address; this.displayName = displayName; this.latitude = latitude; this.longitude = longitude; } @Override public String toString() { return "ServerGeoInfo["+displayName+": "+address+" at ("+latitude+","+longitude+")]"; } @Override public boolean equals(Object o) { // Slight cheat: only tests the address field. return (o instanceof ServerGeoInfo) && address.equals(((ServerGeoInfo) o).address); } @Override public int hashCode() { // Slight cheat: only includes the address field. return address.hashCode(); } }
extra/src/main/java/brooklyn/entity/dns/ServerGeoInfo.java
package brooklyn.entity.dns; /** * Encapsulates geo-IP information for a given server. */ public class ServerGeoInfo { public final String address; public final String displayName; public final double latitude; public final double longitude; public ServerGeoInfo(String address, String displayName, double latitude, double longitude) { this.address = address; this.displayName = displayName; this.latitude = latitude; this.longitude = longitude; } @Override public String toString() { return "ServerGeoInfo["+displayName+": "+address+" at ("+latitude+","+longitude+")]"; } @Override public boolean equals(Object o) { // Slight cheat: only tests the address field. return (o instanceof ServerGeoInfo) && address.equals(((ServerGeoInfo) o).address); } @Override public int hashCode() { // Slight cheat: only includes the address field. return address.hashCode(); } }
ENGR-1594 Add 'fromEntity' convenience factory method
extra/src/main/java/brooklyn/entity/dns/ServerGeoInfo.java
ENGR-1594 Add 'fromEntity' convenience factory method
Java
apache-2.0
20bc10da24691f8d386754c8cece4d21c4fd63b5
0
google/iosched,google/iosched,google/iosched
/* * Copyright 2015 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.samples.apps.iosched.map.util; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.support.v4.content.AsyncTaskLoader; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.model.LatLng; import com.google.maps.android.geojson.GeoJsonFeature; import com.google.maps.android.geojson.GeoJsonLayer; import com.google.maps.android.geojson.GeoJsonPoint; import com.google.maps.android.geojson.GeoJsonPointStyle; import com.google.maps.android.ui.IconGenerator; import com.google.samples.apps.iosched.provider.ScheduleContract; import com.google.samples.apps.iosched.util.MapUtils; import org.json.JSONException; import org.json.JSONObject; import java.util.Iterator; /** * Background task that queries the content provider and prepares a {@link GeoJsonLayer} that can be * used to create Markers. */ public class MarkerLoadingTask extends AsyncTaskLoader<GeoJsonLayer> { private GoogleMap mMap; private Context mContext; public MarkerLoadingTask(GoogleMap map, Context context) { super(context); mContext = context; mMap = map; } @Override public GeoJsonLayer loadInBackground() { try { final Uri uri = ScheduleContract.MapGeoJson.buildGeoJsonUri(); Cursor cursor = getContext().getContentResolver().query(uri, MarkerQuery.PROJECTION, null, null, null); GeoJsonLayer layer = null; if (cursor != null) { if (cursor.moveToFirst()) { final String id = cursor.getString(MarkerQuery.GEOJSON); JSONObject j = new JSONObject(id); //GeoJsonLayer stores a map, which is only modified when addLayerToMap is called layer = new GeoJsonLayer(mMap, j); } else { return null; } cursor.close(); } Iterator<GeoJsonFeature> iterator = layer.getFeatures().iterator(); final IconGenerator labelIconGenerator = MapUtils.getLabelIconGenerator(getContext()); while (iterator.hasNext()) { GeoJsonFeature feature = iterator.next(); // get data final String id = feature.getProperty("id"); GeoJsonPoint point = (GeoJsonPoint) feature.getGeometry(); final LatLng position = point.getCoordinates(); final String typeString = feature.getProperty("type"); final int type = MapUtils.detectMarkerType(typeString); final String label = feature.getProperty("title"); GeoJsonPointStyle pointStyle = new GeoJsonPointStyle(); if (type == MarkerModel.TYPE_LABEL) { // Label markers contain the label as its icon pointStyle = MapUtils.createLabelMarker(labelIconGenerator, id, label); } else if (type == MarkerModel.TYPE_ICON) { // An icon marker is mapped to a drawable based on its full type name pointStyle = MapUtils.createIconMarker(typeString, id, getContext()); } else if (type != MarkerModel.TYPE_INACTIVE) { // All other markers (that are not inactive) contain a pin icon pointStyle = MapUtils.createPinMarker(id); } // If the marker is invalid (e.g. the icon does not exist), remove it from the map. if (pointStyle == null) { iterator.remove(); } else { pointStyle.setVisible(true); feature.setPointStyle(pointStyle); } } return layer; } catch (JSONException e) { e.printStackTrace(); } return null; } private interface MarkerQuery { String[] PROJECTION = { ScheduleContract.MapGeoJson.GEOJSON }; int GEOJSON = 0; } }
lib/src/main/java/com/google/samples/apps/iosched/map/util/MarkerLoadingTask.java
/* * Copyright 2015 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.samples.apps.iosched.map.util; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.support.v4.content.AsyncTaskLoader; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.model.LatLng; import com.google.maps.android.geojson.GeoJsonFeature; import com.google.maps.android.geojson.GeoJsonLayer; import com.google.maps.android.geojson.GeoJsonPoint; import com.google.maps.android.geojson.GeoJsonPointStyle; import com.google.maps.android.ui.IconGenerator; import com.google.samples.apps.iosched.provider.ScheduleContract; import com.google.samples.apps.iosched.util.MapUtils; import org.json.JSONException; import org.json.JSONObject; /** * Background task that queries the content provider and prepares a {@link GeoJsonLayer} that can be * used to create Markers. */ public class MarkerLoadingTask extends AsyncTaskLoader<GeoJsonLayer> { private GoogleMap mMap; private Context mContext; public MarkerLoadingTask(GoogleMap map, Context context) { super(context); mContext = context; mMap = map; } @Override public GeoJsonLayer loadInBackground() { try { final Uri uri = ScheduleContract.MapGeoJson.buildGeoJsonUri(); Cursor cursor = getContext().getContentResolver().query(uri, MarkerQuery.PROJECTION, null, null, null); GeoJsonLayer layer = null; if (cursor != null) { if (cursor.moveToFirst()) { final String id = cursor.getString(MarkerQuery.GEOJSON); JSONObject j = new JSONObject(id); //GeoJsonLayer stores a map, which is only modified when addLayerToMap is called layer = new GeoJsonLayer(mMap, j); } else { return null; } cursor.close(); } Iterable<GeoJsonFeature> features = layer.getFeatures(); final IconGenerator labelIconGenerator = MapUtils.getLabelIconGenerator(getContext()); for (GeoJsonFeature feature : features) { // get data final String id = feature.getProperty("id"); GeoJsonPoint point = (GeoJsonPoint) feature.getGeometry(); final LatLng position = point.getCoordinates(); final String typeString = feature.getProperty("type"); final int type = MapUtils.detectMarkerType(typeString); final String label = feature.getProperty("title"); GeoJsonPointStyle pointStyle = new GeoJsonPointStyle(); if (type == MarkerModel.TYPE_LABEL) { // Label markers contain the label as its icon pointStyle = MapUtils.createLabelMarker(labelIconGenerator, id, label); } else if (type == MarkerModel.TYPE_ICON) { // An icon marker is mapped to a drawable based on its full type name pointStyle = MapUtils.createIconMarker(typeString, id, getContext()); } else if (type != MarkerModel.TYPE_INACTIVE) { // All other markers (that are not inactive) contain a pin icon pointStyle = MapUtils.createPinMarker(id); } pointStyle.setVisible(true); feature.setPointStyle(pointStyle); } return layer; } catch (JSONException e) { e.printStackTrace(); } return null; } private interface MarkerQuery { String[] PROJECTION = { ScheduleContract.MapGeoJson.GEOJSON }; int GEOJSON = 0; } }
Fix NPE when marker icon does not exist. Instead, hide the marker from the map. Bug: 37488027 Change-Id: I2f5363746a5c4cabe20ca2f04430919ee81b57a3
lib/src/main/java/com/google/samples/apps/iosched/map/util/MarkerLoadingTask.java
Fix NPE when marker icon does not exist.
Java
apache-2.0
163ecd89978aa6258d2cdda455f10782a6e9cb3b
0
duanjp8617/floodlight,duanjp8617/floodlight,duanjp8617/floodlight,duanjp8617/floodlight,duanjp8617/floodlight
package net.floodlightcontroller.nfvtest.message; import java.util.ArrayList; import org.zeromq.ZMQ.Socket; import net.floodlightcontroller.nfvtest.message.Message; import net.floodlightcontroller.nfvtest.nfvutils.HostServer.VmInstance; import net.floodlightcontroller.nfvtest.nfvutils.HostServer; import net.floodlightcontroller.nfvtest.nfvcorestructure.NFVServiceChain; public class ConcreteMessage { /* * General messages received by all processor for termination. */ static public class KillSelfRequest extends Message{ private final String sourceId; public KillSelfRequest(String sourceId){ this.sourceId = sourceId; } public String getSourceId(){ return this.sourceId; } } //message first sent to vmworker, then allocator /* * The following requests are sent to VmWorker processor to prepare the * NFV environment and create VMs. * The following replies are generated by the VmWorker processor to respond * to the requests sent by other processors. */ //create vm static public class CreateVmRequest extends Message { private final String sourceId; private final VmInstance vmInstance; public CreateVmRequest(String sourceId, VmInstance vmInstance){ this.sourceId = sourceId; this.vmInstance = vmInstance; } public String getSourceId(){ return sourceId; } public VmInstance getVmInstance(){ return vmInstance; } } static public class CreateVmReply extends Message { private final String sourceId; private final boolean successful; private final CreateVmRequest request; public CreateVmReply(String sourceId, CreateVmRequest request, boolean isRequestSuccessful){ this.sourceId = sourceId; this.successful = isRequestSuccessful; this.request = request; } public String getSourceId(){ return this.sourceId; } public CreateVmRequest getRequest(){ return request; } public boolean getSuccessful(){ return this.successful; } public VmInstance getVmInstance(){ return this.request.getVmInstance(); } } //initialize a host static public class HostInitializationRequest extends Message { private final String sourceId; private final HostServer hostServer; public HostInitializationRequest(String sourceId, HostServer hostServer){ this.sourceId = sourceId; this.hostServer = hostServer; } public String getSourceId(){ return this.sourceId; } public HostServer getHostServer(){ return this.hostServer; } } //destroy vm static public class DestroyVmRequest extends Message { private final String sourceId; private final VmInstance vmInstance; public DestroyVmRequest(String sourceId, VmInstance vmInstance){ this.sourceId = sourceId; this.vmInstance = vmInstance; } public String getSourceId(){ return sourceId; } public VmInstance getVmInstance(){ return vmInstance; } } static public class DestroyVmReply extends Message { private final String sourceId; private final DestroyVmRequest request; private final boolean isSuccessful; public DestroyVmReply(String sourceId, DestroyVmRequest request, boolean isSuccessful){ this.sourceId = sourceId; this.request = request; this.isSuccessful = isSuccessful; } public String getSourceId(){ return sourceId; } public DestroyVmRequest getRequest(){ return this.request; } public boolean getSuccessful(){ return this.isSuccessful; } } /* * The following requests are sent to the VmAllocator processor. */ //allocate a vm static public class AllocateVmRequest extends Message { private final String sourceId; private final String chainName; private final int stageIndex; public AllocateVmRequest(String sourceId, String chainName, int stageIndex){ this.sourceId = sourceId; this.chainName = chainName; this.stageIndex = stageIndex; } public String getSourceId(){ return this.sourceId; } public String getChainName(){ return this.chainName; } public int getStageIndex(){ return this.stageIndex; } } static public class AllocateVmReply extends Message { private final String sourceId; private final VmInstance vmInstance; private final AllocateVmRequest request; public AllocateVmReply(String sourceId, VmInstance vmInstance, AllocateVmRequest request){ this.sourceId = sourceId; this.vmInstance = vmInstance; this.request = request; } public String getSourceId(){ return this.sourceId; } public VmInstance getVmInstance(){ return this.vmInstance; } public AllocateVmRequest getAllocateVmRequest(){ return this.request; } } //add another host server, an easy operation, no need for reply static public class AddHostServerRequest extends Message { private final String sourceId; private final HostServer hostServer; public AddHostServerRequest(String sourceId, HostServer hostServer){ this.sourceId = sourceId; this.hostServer = hostServer; } public String getSourceId(){ return this.sourceId; } public HostServer getHostServer(){ return this.hostServer; } } //reclaim a vm, a reply? static public class DeallocateVmRequest extends Message { private final String sourceId; private final VmInstance vmInstance; public DeallocateVmRequest(String sourceId, VmInstance vmInstance){ this.sourceId = sourceId; this.vmInstance = vmInstance; } public String getSourceId(){ return this.sourceId; } public VmInstance getVmInstance(){ return this.vmInstance; } } //The following messages are sent to ServiceChainHandler for processing. static public class InitServiceChainRequset extends Message{ private final String sourceId; private final NFVServiceChain serviceChain; public InitServiceChainRequset(String sourceId, NFVServiceChain serviceChain){ this.sourceId = sourceId; this.serviceChain = serviceChain; } public String getSourceId(){ return this.sourceId; } public NFVServiceChain getServiceChain(){ return this.serviceChain; } } static public class StatUpdateRequest extends Message{ private final String sourceId; private final ArrayList<String> statList; private final String managementIp; public StatUpdateRequest(String sourceId, String managementIp, ArrayList<String> statList){ this.sourceId = sourceId; this.managementIp = managementIp; this.statList = statList; } public String getSourceId(){ return this.sourceId; } public String getManagementIp(){ return this.managementIp; } public ArrayList<String> getStatList(){ return this.statList; } } //do not understand the following class //The following messages are sent to SubscriberConnector for processing. static public class SubConnRequest extends Message{ private final String sourceId; private final String managementIp; private final String port1; private final String port2; private final VmInstance vmInstance; public SubConnRequest(String sourceId, String managementIp, String port1, String port2, VmInstance vmInstance){ this.sourceId = sourceId; this.managementIp = managementIp; this.port1 = port1; this.port2 = port2; this.vmInstance = vmInstance; } public String getSourceId(){ return this.sourceId; } public String getManagementIp(){ return this.managementIp; } public String getPort1(){ return this.port1; } public String getPort2(){ return this.port2; } public VmInstance getVmInstance(){ return this.vmInstance; } } static public class SubConnReply extends Message{ private final String sourceId; private final SubConnRequest request; private final Socket subscriber1; private final Socket subscriber2; public SubConnReply(String sourceId, SubConnRequest request, Socket subscriber1, Socket subscriber2){ this.sourceId = sourceId; this.request = request; this.subscriber1 = subscriber1; this.subscriber2 = subscriber2; } public String getSourceId(){ return this.sourceId; } public SubConnRequest getSubConnRequest(){ return this.request; } public Socket getSubscriber1(){ return this.subscriber1; } public Socket getSubscriber2(){ return this.subscriber2; } } /* * The following requests are sent to the DNSUpdator processor. */ static public class DNSUpdateRequest extends Message{ private final String sourceId; private final String domainName; private final String ipAddress; private final String addOrDelete; private final Socket socket1; //why a socket private final Socket socket2; private final VmInstance vmInstance; public DNSUpdateRequest(String sourceId, String domainName, String ipAddress, String addOrDelete, Socket socket1, Socket socket2, VmInstance vmInstance){ this.sourceId = sourceId; this.domainName = domainName; this.ipAddress = ipAddress; this.addOrDelete = addOrDelete; this.socket1 = socket1; this.socket2 = socket2; this.vmInstance = vmInstance; } public String getSourceId(){ return this.sourceId; } public String getDomainName(){ return this.domainName; } public String getIpAddress(){ return this.ipAddress; } public String getAddOrDelete(){ return this.addOrDelete; } public Socket getSocket1(){ return this.socket1; } public Socket getSocket2(){ return this.socket2; } public VmInstance getVmInstance(){ return this.vmInstance; } } //no message showing success static public class DNSUpdateReply extends Message{ private final DNSUpdateRequest request; private final String sourceId; public DNSUpdateReply(String sourceId, DNSUpdateRequest request){ this.request = request; this.sourceId = sourceId; } public String getSourceId(){ return this.sourceId; } public DNSUpdateRequest getDNSUpdateReq(){ return this.request; } } //message sent by local controller interface: SCALE static public class GlobScaleRequest extends Message{ private final String sourceId; private final int provision[][]; private final boolean isCtrlScaling; public GlobScaleRequest(String sourceId, int provision[][], boolean isCtrlScaling) { this.sourceId = sourceId; this.provision = provision; this.isCtrlScaling = isCtrlScaling; } public String getSourceId(){ return this.sourceId; } public int[][] getProvision(){ return this.provision; } public boolean getIsCtrlScaling() { return this.isCtrlScaling; } } //message sent to local controller interface: SCALE reply static public class GlobScaleReply extends Message{ private final String sourceId; private final GlobScaleRequest request; private final boolean success; public GlobScaleReply(String sourceId, GlobScaleRequest request, boolean success) { this.sourceId = sourceId; this.request = request; this.success = success; } public String getSourceId(){ return this.sourceId; } public GlobScaleRequest getGlobScaleRequest(){ return this.request; } public boolean getSuccess() { return this.success; } } }
src/main/java/net/floodlightcontroller/nfvtest/message/ConcreteMessage.java
package net.floodlightcontroller.nfvtest.message; import java.util.ArrayList; import org.zeromq.ZMQ.Socket; import net.floodlightcontroller.nfvtest.message.Message; import net.floodlightcontroller.nfvtest.nfvutils.HostServer.VmInstance; import net.floodlightcontroller.nfvtest.nfvutils.HostServer; import net.floodlightcontroller.nfvtest.nfvcorestructure.NFVServiceChain; public class ConcreteMessage { /* * General messages received by all processor for termination. */ static public class KillSelfRequest extends Message{ private final String sourceId; public KillSelfRequest(String sourceId){ this.sourceId = sourceId; } public String getSourceId(){ return this.sourceId; } } /* * The following requests are sent to VmWorker processor to prepare the * NFV environment and create VMs. * The following replies are generated by the VmWorker processor to respond * to the requests sent by other processors. */ static public class CreateVmRequest extends Message { private final String sourceId; private final VmInstance vmInstance; public CreateVmRequest(String sourceId, VmInstance vmInstance){ this.sourceId = sourceId; this.vmInstance = vmInstance; } public String getSourceId(){ return sourceId; } public VmInstance getVmInstance(){ return vmInstance; } } static public class CreateVmReply extends Message { private final String sourceId; private final boolean successful; private final CreateVmRequest request; public CreateVmReply(String sourceId, CreateVmRequest request, boolean isRequestSuccessful){ this.sourceId = sourceId; this.successful = isRequestSuccessful; this.request = request; } public String getSourceId(){ return this.sourceId; } public CreateVmRequest getRequest(){ return request; } public boolean getSuccessful(){ return this.successful; } public VmInstance getVmInstance(){ return this.request.getVmInstance(); } } static public class HostInitializationRequest extends Message { private final String sourceId; private final HostServer hostServer; public HostInitializationRequest(String sourceId, HostServer hostServer){ this.sourceId = sourceId; this.hostServer = hostServer; } public String getSourceId(){ return this.sourceId; } public HostServer getHostServer(){ return this.hostServer; } } static public class DestroyVmRequest extends Message { private final String sourceId; private final VmInstance vmInstance; public DestroyVmRequest(String sourceId, VmInstance vmInstance){ this.sourceId = sourceId; this.vmInstance = vmInstance; } public String getSourceId(){ return sourceId; } public VmInstance getVmInstance(){ return vmInstance; } } static public class DestroyVmReply extends Message { private final String sourceId; private final DestroyVmRequest request; private final boolean isSuccessful; public DestroyVmReply(String sourceId, DestroyVmRequest request, boolean isSuccessful){ this.sourceId = sourceId; this.request = request; this.isSuccessful = isSuccessful; } public String getSourceId(){ return sourceId; } public DestroyVmRequest getRequest(){ return this.request; } public boolean getSuccessful(){ return this.isSuccessful; } } /* * The following requests are sent to the VmAllocator processor. */ static public class AllocateVmRequest extends Message { private final String sourceId; private final String chainName; private final int stageIndex; private final boolean isBufferNode; private final int dcIndex; private final HostServer ignoredServer; public AllocateVmRequest(String sourceId, String chainName, int stageIndex, boolean isBufferNode, int dcIndex, HostServer ignoredServer){ this.sourceId = sourceId; this.chainName = chainName; this.stageIndex = stageIndex; this.isBufferNode = isBufferNode; this.dcIndex = dcIndex; this.ignoredServer = ignoredServer; } public String getSourceId(){ return this.sourceId; } public String getChainName(){ return this.chainName; } public int getStageIndex(){ return this.stageIndex; } public boolean getIsBufferNode(){ return this.isBufferNode; } public int getDcIndex(){ return this.dcIndex; } public HostServer getIgnoredServer(){ return this.ignoredServer; } } static public class AllocateVmReply extends Message { private final String sourceId; private final VmInstance vmInstance; private final AllocateVmRequest request; public AllocateVmReply(String sourceId, VmInstance vmInstance, AllocateVmRequest request){ this.sourceId = sourceId; this.vmInstance = vmInstance; this.request = request; } public String getSourceId(){ return this.sourceId; } public VmInstance getVmInstance(){ return this.vmInstance; } public AllocateVmRequest getAllocateVmRequest(){ return this.request; } } static public class AddHostServerRequest extends Message { private final String sourceId; private final HostServer hostServer; public AddHostServerRequest(String sourceId, HostServer hostServer){ this.sourceId = sourceId; this.hostServer = hostServer; } public String getSourceId(){ return this.sourceId; } public HostServer getHostServer(){ return this.hostServer; } } static public class DeallocateVmRequest extends Message { private final String sourceId; private final VmInstance vmInstance; public DeallocateVmRequest(String sourceId, VmInstance vmInstance){ this.sourceId = sourceId; this.vmInstance = vmInstance; } public String getSourceId(){ return this.sourceId; } public VmInstance getVmInstance(){ return this.vmInstance; } } //The following messages are sent to ServiceChainHandler for processing. static public class InitServiceChainRequset extends Message{ private final String sourceId; private final NFVServiceChain serviceChain; public InitServiceChainRequset(String sourceId, NFVServiceChain serviceChain){ this.sourceId = sourceId; this.serviceChain = serviceChain; } public String getSourceId(){ return this.sourceId; } public NFVServiceChain getServiceChain(){ return this.serviceChain; } } static public class StatUpdateRequest extends Message{ private final String sourceId; private final ArrayList<String> statList; private final String managementIp; public StatUpdateRequest(String sourceId, String managementIp, ArrayList<String> statList){ this.sourceId = sourceId; this.managementIp = managementIp; this.statList = statList; } public String getSourceId(){ return this.sourceId; } public String getManagementIp(){ return this.managementIp; } public ArrayList<String> getStatList(){ return this.statList; } } static public class ServerToChainHandlerRequest extends Message{ private final String sourceId; private final HostServer hostServer; public ServerToChainHandlerRequest(String sourceId, HostServer hostServer){ this.sourceId = sourceId; this.hostServer = hostServer; } public String getSourceId(){ return this.sourceId; } public HostServer getHostServer(){ return this.hostServer; } } //The following messages are sent to SubscriberConnector for processing. static public class SubConnRequest extends Message{ private final String sourceId; private final String managementIp; private final String port1; private final String port2; private final VmInstance vmInstance; public SubConnRequest(String sourceId, String managementIp, String port1, String port2, VmInstance vmInstance){ this.sourceId = sourceId; this.managementIp = managementIp; this.port1 = port1; this.port2 = port2; this.vmInstance = vmInstance; } public String getSourceId(){ return this.sourceId; } public String getManagementIp(){ return this.managementIp; } public String getPort1(){ return this.port1; } public String getPort2(){ return this.port2; } public VmInstance getVmInstance(){ return this.vmInstance; } } static public class SubConnReply extends Message{ private final String sourceId; private final SubConnRequest request; private final Socket subscriber1; private final Socket subscriber2; public SubConnReply(String sourceId, SubConnRequest request, Socket subscriber1, Socket subscriber2){ this.sourceId = sourceId; this.request = request; this.subscriber1 = subscriber1; this.subscriber2 = subscriber2; } public String getSourceId(){ return this.sourceId; } public SubConnRequest getSubConnRequest(){ return this.request; } public Socket getSubscriber1(){ return this.subscriber1; } public Socket getSubscriber2(){ return this.subscriber2; } } /* * The following requests are sent to the DNSUpdator processor. */ static public class DNSUpdateRequest extends Message{ private final String sourceId; private final String domainName; private final String ipAddress; private final String addOrDelete; private final Socket socket1; private final Socket socket2; private final VmInstance vmInstance; public DNSUpdateRequest(String sourceId, String domainName, String ipAddress, String addOrDelete, Socket socket1, Socket socket2, VmInstance vmInstance){ this.sourceId = sourceId; this.domainName = domainName; this.ipAddress = ipAddress; this.addOrDelete = addOrDelete; this.socket1 = socket1; this.socket2 = socket2; this.vmInstance = vmInstance; } public String getSourceId(){ return this.sourceId; } public String getDomainName(){ return this.domainName; } public String getIpAddress(){ return this.ipAddress; } public String getAddOrDelete(){ return this.addOrDelete; } public Socket getSocket1(){ return this.socket1; } public Socket getSocket2(){ return this.socket2; } public VmInstance getVmInstance(){ return this.vmInstance; } } static public class DNSUpdateReply extends Message{ private final DNSUpdateRequest request; private final String sourceId; public DNSUpdateReply(String sourceId, DNSUpdateRequest request){ this.request = request; this.sourceId = sourceId; } public String getSourceId(){ return this.sourceId; } public DNSUpdateRequest getDNSUpdateReq(){ return this.request; } } static public class DNSRemoveRequest extends Message{ private final String sourceId; private final String domainName; private final String ipAddress; public DNSRemoveRequest(String sourceId, String domainName, String ipAddress){ this.sourceId = sourceId; this.domainName = domainName; this.ipAddress = ipAddress; } public String getSourceId(){ return this.sourceId; } public String getDomainName(){ return this.domainName; } public String getIpAddress(){ return this.ipAddress; } } static public class DNSAddRequest extends Message{ private final String sourceId; private final String domainName; private final String ipAddress; public DNSAddRequest(String sourceId, String domainName, String ipAddress){ this.sourceId = sourceId; this.domainName = domainName; this.ipAddress = ipAddress; } public String getSourceId(){ return this.sourceId; } public String getDomainName(){ return this.domainName; } public String getIpAddress(){ return this.ipAddress; } } //The following messages are sent from SwitchStatPoller static public class DcLinkStat extends Message{ private final String sourceId; private final int size; private final float[][] dcSendSpeed; private final float[][] dcRecvSpeed; public DcLinkStat(String sourceId, int size, float[][] dcSendSpeed, float[][] dcRecvSpeed){ this.sourceId = sourceId; this.size = size; this.dcSendSpeed = dcSendSpeed; this.dcRecvSpeed = dcRecvSpeed; } public String getSourceId(){ return this.sourceId; } public int getSize(){ return this.size; } public float[][] getDcSendSpeed(){ return this.dcSendSpeed; } public float[][] getDcRecvSpeed(){ return this.dcRecvSpeed; } } }
local interface
src/main/java/net/floodlightcontroller/nfvtest/message/ConcreteMessage.java
local interface
Java
apache-2.0
8392df2de3880117a6808bc2ee40d45630980309
0
ecki/commons-vfs,apache/commons-vfs,ecki/commons-vfs,apache/commons-vfs
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.vfs2; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.vfs2.util.Messages; import junit.framework.TestCase; /** * A base class for VFS tests. Provides utility methods for locating test resources. */ public abstract class AbstractVfsTestCase extends TestCase { private static File baseDir; /** URL pattern */ private static final Pattern URL_PATTERN = Pattern.compile("[a-z]+://.*"); /** Password pattern */ private static final Pattern PASSWORD_PATTERN = Pattern.compile(":(?:[^/]+)@"); /** * Asserts that an exception contains the expected message. */ public static void assertSameMessage(final String code, final Object param, final Throwable throwable) { assertSameMessage(code, new Object[] { param }, throwable); } /** * Asserts that an exception contains the expected message. */ private static void assertSameMessage(final String code, final Object[] params, final Throwable throwable) { Object[] parmArray = params; if (throwable instanceof FileSystemException) { final FileSystemException fse = (FileSystemException) throwable; // Compare message code and params assertEquals(code, fse.getCode()); assertEquals(params.length, fse.getInfo().length); parmArray = new Object[params.length]; for (int i = 0; i < params.length; i++) { String value = String.valueOf(params[i]); // mask passwords (VFS-169) final Matcher urlMatcher = URL_PATTERN.matcher(value); if (urlMatcher.find()) { final Matcher pwdMatcher = PASSWORD_PATTERN.matcher(value); value = pwdMatcher.replaceFirst(":***@"); } assertEquals(value, fse.getInfo()[i]); parmArray[i] = value; } } // Compare formatted message final String message = Messages.getString(code, parmArray); assertEquals(message, throwable.getMessage()); } /** * Compares 2 objects for equality, nulls are equal. Used by the test classes' equals() methods. */ public static boolean equals(final Object o1, final Object o2) { if (o1 == null && o2 == null) { return true; } if (o1 == null || o2 == null) { return false; } return o1.equals(o2); } /** * Makes a file canonical */ public static File getCanonicalFile(final File file) { try { return file.getCanonicalFile(); } catch (final IOException e) { return file.getAbsoluteFile(); } } public static String getResourceTestDirectory() { return System.getProperty("test.basedir.res", "test-data"); } /** * Returns the test directory as a String. * <p> * {@link #getTestDirectoryFile()} should be preferred. * * @return the test directory as a String */ public static String getTestDirectory() { return System.getProperty("test.basedir", "target/test-classes/test-data"); } /** * Locates a test directory, creating it if it does not exist. * * @param name path of the directory, relative to this test's base directory. */ public static File getTestDirectory(final String name) { File file = new File(getTestDirectoryFile(), name); file = getCanonicalFile(file); assertTrue("Test directory \"" + file + "\" does not exist or is not a directory.", file.isDirectory() || file.mkdirs()); return file; } /** * Locates the base directory for this test. */ public static File getTestDirectoryFile() { if (baseDir == null) { final String baseDirProp = getTestDirectory(); // the directory maybe expressed as URI in certain environments if (baseDirProp.startsWith("file://")) { try { baseDir = getCanonicalFile(new File(new URI(baseDirProp))); } catch (final URISyntaxException e) { baseDir = getCanonicalFile(new File(baseDirProp)); } } else { baseDir = getCanonicalFile(new File(baseDirProp)); } } return baseDir; } /** * Locates a test resource, and asserts that the resource exists * * @param name path of the resource, relative to this test's base directory. */ public static File getTestResource(final String name) { return getTestResource(name, true); } /** * Locates a test resource. * * @param name path of the resource, relative to this test's base directory. */ public static File getTestResource(final String name, final boolean mustExist) { File file = new File(getTestDirectoryFile(), name); file = getCanonicalFile(file); if (mustExist) { assertTrue("Test file \"" + file + "\" does not exist.", file.exists()); } else { assertFalse("Test file \"" + file + "\" should not exist.", file.exists()); } return file; } }
commons-vfs2/src/test/java/org/apache/commons/vfs2/AbstractVfsTestCase.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.vfs2; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.util.Messages; import junit.framework.TestCase; /** * A base class for VFS tests. Provides utility methods for locating test resources. */ public abstract class AbstractVfsTestCase extends TestCase { private static File baseDir; /** URL pattern */ private static final Pattern URL_PATTERN = Pattern.compile("[a-z]+://.*"); /** Password pattern */ private static final Pattern PASSWORD_PATTERN = Pattern.compile(":(?:[^/]+)@"); /** * Asserts that an exception contains the expected message. */ public static void assertSameMessage(final String code, final Object param, final Throwable throwable) { assertSameMessage(code, new Object[] { param }, throwable); } /** * Asserts that an exception contains the expected message. */ private static void assertSameMessage(final String code, final Object[] params, final Throwable throwable) { Object[] parmArray = params; if (throwable instanceof FileSystemException) { final FileSystemException fse = (FileSystemException) throwable; // Compare message code and params assertEquals(code, fse.getCode()); assertEquals(params.length, fse.getInfo().length); parmArray = new Object[params.length]; for (int i = 0; i < params.length; i++) { String value = String.valueOf(params[i]); // mask passwords (VFS-169) final Matcher urlMatcher = URL_PATTERN.matcher(value); if (urlMatcher.find()) { final Matcher pwdMatcher = PASSWORD_PATTERN.matcher(value); value = pwdMatcher.replaceFirst(":***@"); } assertEquals(value, fse.getInfo()[i]); parmArray[i] = value; } } // Compare formatted message final String message = Messages.getString(code, parmArray); assertEquals(message, throwable.getMessage()); } /** * Compares 2 objects for equality, nulls are equal. Used by the test classes' equals() methods. */ public static boolean equals(final Object o1, final Object o2) { if (o1 == null && o2 == null) { return true; } if (o1 == null || o2 == null) { return false; } return o1.equals(o2); } /** * Makes a file canonical */ public static File getCanonicalFile(final File file) { try { return file.getCanonicalFile(); } catch (final IOException e) { return file.getAbsoluteFile(); } } public static String getResourceTestDirectory() { return System.getProperty("test.basedir.res", "test-data"); } /** * Returns the test directory as a String. * <p> * {@link #getTestDirectoryFile()} should be preferred. * * @return the test directory as a String */ public static String getTestDirectory() { return System.getProperty("test.basedir", "target/test-classes/test-data"); } /** * Locates a test directory, creating it if it does not exist. * * @param name path of the directory, relative to this test's base directory. */ public static File getTestDirectory(final String name) { File file = new File(getTestDirectoryFile(), name); file = getCanonicalFile(file); assertTrue("Test directory \"" + file + "\" does not exist or is not a directory.", file.isDirectory() || file.mkdirs()); return file; } /** * Locates the base directory for this test. */ public static File getTestDirectoryFile() { if (baseDir == null) { final String baseDirProp = getTestDirectory(); // the directory maybe expressed as URI in certain environments if (baseDirProp.startsWith("file://")) { try { baseDir = getCanonicalFile(new File(new URI(baseDirProp))); } catch (final URISyntaxException e) { baseDir = getCanonicalFile(new File(baseDirProp)); } } else { baseDir = getCanonicalFile(new File(baseDirProp)); } } return baseDir; } /** * Locates a test resource, and asserts that the resource exists * * @param name path of the resource, relative to this test's base directory. */ public static File getTestResource(final String name) { return getTestResource(name, true); } /** * Locates a test resource. * * @param name path of the resource, relative to this test's base directory. */ public static File getTestResource(final String name, final boolean mustExist) { File file = new File(getTestDirectoryFile(), name); file = getCanonicalFile(file); if (mustExist) { assertTrue("Test file \"" + file + "\" does not exist.", file.exists()); } else { assertFalse("Test file \"" + file + "\" should not exist.", file.exists()); } return file; } }
Remove unused imports.
commons-vfs2/src/test/java/org/apache/commons/vfs2/AbstractVfsTestCase.java
Remove unused imports.
Java
apache-2.0
53ce65fe4a8ddfcaf8f15f5b8bd01d877cf62093
0
sidharta/sales-gallery,sidharta/sales-gallery,sidharta/sales-gallery
package com.ciandt.techgallery.service.enums; /** * Enum for mapping recommendations. * * @author Thulio Ribeiro * */ public enum RecommendationEnums { ANY("Todos"), DISCUSS_NEXT("Discutir para próxima"), NOT_RECOMMENDED_RETIRED("Não recomendadas ou aposentar"), OBSERVED_CONCEPT_TEST("Observar e fazer provas de conceito"), RECOMMENDED("Recomendada"), RECOMMENDED_ALTERNATIVE("Recomendada alternativa"), USE_LEARN("Usar e aprender"), UNINFORMED("Não informado"); private String message; private RecommendationEnums(String message) { this.message = message; } public String message() { return message; } }
src/main/java/com/ciandt/techgallery/service/enums/RecommendationEnums.java
package com.ciandt.techgallery.service.enums; import com.ciandt.techgallery.utils.i18n.I18n; /** * Enum for mapping recommendations. * * @author Thulio Ribeiro * */ public enum RecommendationEnums { ANY("Todos"), DISCUSS_NEXT("Discutir para próxima"), NOT_RECOMMENDED_RETIRED("Não recomendadas ou aposentar"), OBSERVED_CONCEPT_TEST("Observar e fazer provas de conceito"), RECOMMENDED("Recomendada"), RECOMMENDED_ALTERNATIVE("Recomendada alternativa"), USE_LEARN("Usar e aprender"), UNINFORMED("Não informado"); private String message; private I18n i18n = I18n.getInstance(); private RecommendationEnums(String message) { this.message = message; } public String message() { return message; } }
[TECG-158]-Refactoring: format
src/main/java/com/ciandt/techgallery/service/enums/RecommendationEnums.java
[TECG-158]-Refactoring: format
Java
apache-2.0
4c1ded60bb0d4e55089984298eb066b5cc228fc1
0
jayway/rest-assured,rest-assured/rest-assured,rest-assured/rest-assured,rest-assured/rest-assured,jayway/rest-assured
/* * Copyright 2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.restassured.response; import io.restassured.http.ContentType; import io.restassured.matcher.DetailedCookieMatcher; import io.restassured.matcher.ResponseAwareMatcher; import io.restassured.matcher.RestAssuredMatchers; import io.restassured.parsing.Parser; import io.restassured.specification.Argument; import io.restassured.specification.ResponseSpecification; import org.hamcrest.Matcher; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.Function; /** * A validatable response of a request made by REST Assured. * <p> * Usage example: * <pre> * get("/lotto").then().body("lotto.lottoId", is(5)); * </pre> * </p> */ public interface ValidatableResponseOptions<T extends ValidatableResponseOptions<T, R>, R extends ResponseBody<R> & ResponseOptions<R>> { /** * Same as {@link #body(String, org.hamcrest.Matcher, Object...)} expect that you can pass arguments to the key. This * is useful in situations where you have e.g. pre-defined variables that constitutes the key: * <pre> * String someSubPath = "else"; * int index = 1; * get("/x").then().body("something.%s[%d]", withArgs(someSubPath, index), equalTo("some value")). .. * </pre> * <p/> * or if you have complex root paths and don't wish to duplicate the path for small variations: * <pre> * get("/x").then(). * root("filters.filterConfig[%d].filterConfigGroups.find { it.name == 'Gold' }.includes"). * body(withArgs(0), hasItem("first")). * body(withArgs(1), hasItem("second")). * .. * </pre> * <p/> * The key and arguments follows the standard <a href="https://docs.oracle.com/javase/7/docs/api/java/util/Formatter.html#syntax">formatting syntax</a> of Java. * <p> * Note that <code>withArgs</code> can be statically imported from the <code>io.restassured.RestAssured</code> class. * </p> * * @param path The body path * @param arguments The arguments to apply to the key * @param matcher The hamcrest matcher that must response body must match. * @param additionalKeyMatcherPairs Optionally additional hamcrest matchers that must return <code>true</code>. * @return the response specification * @see #body(String, org.hamcrest.Matcher, Object...) */ T body(String path, List<Argument> arguments, Matcher matcher, Object... additionalKeyMatcherPairs); /** * This as special kind of expectation that is mainly useful when you've specified a root path with an argument placeholder. * For example: * <pre> * get(..).then(). * root("x.%s"). // Root path with a placeholder * body(withArgs("firstName"), equalTo(..)). * body(withArgs("lastName"), equalTo(..)). * </pre> * <p/> * Note that this is the same as doing: * <pre> * get(..).then(). * root("x.%s"). // Root path with a placeholder * content(withArgs("firstName"), equalTo(..)). * content(withArgs("lastName"), equalTo(..)). * </pre> * <p/> * * @param arguments The arguments to apply to the root path. * @param matcher The hamcrest matcher that must response body must match. * @param additionalKeyMatcherPairs Optionally additional hamcrest matchers that must return <code>true</code>. * @return the response specification * @see #body(String, org.hamcrest.Matcher, Object...) */ T body(List<Argument> arguments, Matcher matcher, Object... additionalKeyMatcherPairs); /** * Compare a path in the response body to something available in the response using arguments when root path is used. * For example imagine that a resource "/x" returns the following JSON document: * <pre> * { * "data" : { * "user1" : { * "userId" : "my-id1", * "href" : "http://localhost:8080/my-id1" * }, * "user2" : { * "userId" : "my-id2", * "href" : "http://localhost:8080/my-id2" * }, * } * } * </pre> * you can then verify the href using: * <pre> * when(). * get("/x"). * then(). * root("data.%s"). * body(withArgs("user1"), new ResponseAwareMatcher<Response>() { * public Matcher<?> matcher(Response response) { * return equalTo("http://localhost:8080/" + response.path("userId")); * } * }); * </pre> * Note that you can also use some of the predefined methods in {@link RestAssuredMatchers}. * * @param responseAwareMatcher The {@link ResponseAwareMatcher} that creates the Hamcrest matcher. * @return the response specification * @see #body(String, ResponseAwareMatcher) * @see RestAssuredMatchers#endsWithPath(String) * @see RestAssuredMatchers#startsWithPath(String) * @see RestAssuredMatchers#containsPath(String) * @see RestAssuredMatchers#equalToPath(String) */ T body(List<Argument> arguments, ResponseAwareMatcher<R> responseAwareMatcher); /** * Validate that the response status code matches the given Hamcrest matcher. E.g. * <pre> * get("/something").then().assertThat().statusCode(equalTo(200)); * </pre> * * @param expectedStatusCode The expected status code matcher. * @return the response specification */ T statusCode(Matcher<? super Integer> expectedStatusCode); /** * Validate that the response status code matches an integer. E.g. * <pre> * get("/something").then().assertThat().statusCode(200); * </pre> * <p/> * This is the same as: * <pre> * get("/something").then().assertThat().statusCode(equalTo(200)); * </pre> * * @param expectedStatusCode The expected status code. * @return the response specification */ T statusCode(int expectedStatusCode); /** * Validate that the response status line matches the given Hamcrest matcher. E.g. * <pre> * expect().statusLine(equalTo("No Content")).when().get("/something"); * </pre> * * @param expectedStatusLine The expected status line matcher. * @return the response specification */ T statusLine(Matcher<? super String> expectedStatusLine); /** * Validate that the response status line matches the given String. E.g. * <pre> * expect().statusLine("No Content").when().get("/something"); * </pre> * <p/> * This is the same as: * <pre> * expect().statusLine(equalTo("No Content")).when().get("/something"); * </pre> * * @param expectedStatusLine The expected status line. * @return the response specification */ T statusLine(String expectedStatusLine); /** * Validate that response headers matches those specified in a Map. * <p> * E.g. expect that the response of the GET request to "/something" contains header <tt>headerName1=headerValue1</tt> * and <tt>headerName2=headerValue2</tt>: * <pre> * Map expectedHeaders = new HashMap(); * expectedHeaders.put("headerName1", "headerValue1")); * expectedHeaders.put("headerName2", "headerValue2"); * * get("/something").then().assertThat().headers(expectedHeaders); * </pre> * </p> * <p/> * <p> * You can also use Hamcrest matchers: * <pre> * Map expectedHeaders = new HashMap(); * expectedHeaders.put("Content-Type", containsString("charset=UTF-8")); * expectedHeaders.put("Content-Length", "160"); * * get("/something").then().assertThat().headers(expectedHeaders); * </pre> * </p> * * @param expectedHeaders The Map of expected response headers * @return the response specification */ T headers(Map<String, ?> expectedHeaders); /** * Validate that response headers matches the supplied headers and values. * <p> * E.g. expect that the response of the GET request to "/something" contains header <tt>Pragma=no-cache</tt> * and <tt>Content-Encoding=gzip</tt>: * <pre> * get("/something").then().assertThat().headers("Pragma", "no-cache", "Content-Encoding", "gzip"); * </pre> * </p> * <p/> * <p> * You can also use Hamcrest matchers: * <pre> * get("/something").then().assertThat().headers("Content-Type", containsString("application/json"), "Pragma", equalsTo("no-cache")); * </pre> * <p/> * and you can even mix string matching and hamcrest matching: * <pre> * get("/something").then().assertThat().headers("Content-Type", containsString("application/json"), "Pragma", "no-cache"); * </pre> * </p> * * @param firstExpectedHeaderName The name of the first header * @param firstExpectedHeaderValue The value of the first header * @param expectedHeaders A list of expected "header name" - "header value" pairs. * @return the response specification */ T headers(String firstExpectedHeaderName, Object firstExpectedHeaderValue, Object... expectedHeaders); /** * Validate that a response header matches the supplied header name and hamcrest matcher. * <p> * E.g. expect that the response of the GET request to "/something" contains header <tt>Pragma=no-cache</tt>: * <pre> * get("/something").then().assertThat().header("Pragma", containsString("no")); * </pre> * </p> * <p/> * <p> * You can also expect several headers: * <pre> * get("/something").then().assertThat().header("Pragma", equalsTo("no-cache")).and().header("Content-Encoding", containsString("zip")); * </pre> * Also take a look at {@link #headers(String, Object, Object...)} )} for a short version of passing multiple headers. * </p> * * @param headerName The name of the expected header * @param expectedValueMatcher The Hamcrest matcher that must conform to the value * @return the response specification */ T header(String headerName, Matcher<?> expectedValueMatcher); /** * Compare a header in the response to something else available in the response. * <p> * For example imagine that a POST to resource "/x" returns "201 Created" and sets a Location header * that should end with "/x/{id}" where <code>{id}</code> is present in the response body: * <pre> * { "id" : 5 } * </pre> * To verify that the Location header ends with "/x/{id}" you can do like this: * <p> * <pre> * given().param("id", 1).body(..).post("/x").then().assertThat().header("Location", response -> endsWith("/x/" + response.path("id"))); * </pre> * </p> * <p/> * <p> * </p> * * @param headerName The name of the expected header * @param expectedValueMatcher The Hamcrest matcher that must conform to the value * @return the response specification */ T header(String headerName, ResponseAwareMatcher<R> expectedValueMatcher); /** * Expect that a response header matches the supplied header name and hamcrest matcher using a mapping function. * <p> * E.g. expect that the response of the GET request to "/something" contains header <tt>Content-Length: 500</tt> and you want to * validate that the length must always be less than 600: * <pre> * when(). * get("/something"). * then(). * header("Content-Length", Integer::parseInt, lessThan(600)); * </pre> * </p> * * @param headerName The name of the expected header * @param mappingFunction Map the header to another value type before exposing it to the Hamcrest matcher * @param expectedValueMatcher The Hamcrest matcher that must conform to the value * @return the response specification */ <V> T header(String headerName, Function<String, V> mappingFunction, Matcher<? super V> expectedValueMatcher); /** * Validate that a response header matches the supplied name and value. * <p> * E.g. expect that the response of the GET request to "/something" contains header <tt>Pragma=no-cache</tt>: * <pre> * get("/something").then().assertThat().header("Pragma", "no-cache"); * </pre> * </p> * <p/> * <p> * You can also expect several headers: * <pre> * get("/something").then().assertThat().header("Pragma", "no-cache").and().header("Content-Encoding", "gzip"); * </pre> * Also take a look at {@link #headers(String, Object, Object...)} for a short version of passing multiple headers. * </p> * * @param headerName The name of the expected header * @param expectedValue The value of the expected header * @return the response specification */ T header(String headerName, String expectedValue); /** * Validate that response cookies matches those specified in a Map. * <p> * E.g. expect that the response of the GET request to "/something" contains cookies <tt>cookieName1=cookieValue1</tt> * and <tt>cookieName2=cookieValue2</tt>: * <pre> * Map expectedCookies = new HashMap(); * expectedCookies.put("cookieName1", "cookieValue1")); * expectedCookies.put("cookieName2", "cookieValue2"); * * get("/something").then().assertThat().cookies(expectedCookies); * </pre> * </p> * <p/> * <p> * You can also use Hamcrest matchers: * <pre> * Map expectedCookies = new HashMap(); * expectedCookies.put("cookieName1", containsString("Value1")); * expectedCookies.put("cookieName2", "cookieValue2"); * * get("/something").then().assertThat().cookies(expectedCookies); * </pre> * </p> * * @param expectedCookies A Map of expected response cookies * @return the response specification */ T cookies(Map<String, ?> expectedCookies); /** * Validate that a cookie exist in the response, regardless of value (it may have no value at all). * * @param cookieName the cookie to validate that it exists * @return the response specification */ T cookie(String cookieName); /** * Validate that response cookies matches the supplied cookie names and values. * <p> * E.g. expect that the response of the GET request to "/something" contains cookies <tt>cookieName1=cookieValue1</tt> * and <tt>cookieName2=cookieValue2</tt>: * <pre> * get("/something").then().assertThat().cookies("cookieName1", "cookieValue1", "cookieName2", "cookieValue2"); * </pre> * </p> * <p/> * <p> * You can also use Hamcrest matchers: * <pre> * get("/something").then().assertThat().cookies("cookieName1", containsString("Value1"), "cookieName2", equalsTo("cookieValue2")); * </pre> * <p/> * and you can even mix string matching and hamcrest matching: * <pre> * get("/something").then().assertThat().cookies("cookieName1", containsString("Value1"), "cookieName2", "cookieValue2"); * </pre> * </p> * * @param firstExpectedCookieName The name of the first cookie * @param firstExpectedCookieValue The value of the first cookie * @param expectedCookieNameValuePairs A list of expected "cookie name" - "cookie value" pairs. * @return the response specification */ T cookies(String firstExpectedCookieName, Object firstExpectedCookieValue, Object... expectedCookieNameValuePairs); /** * Validate that a response cookie matches the supplied cookie name and hamcrest matcher. * <p> * E.g. expect that the response of the GET request to "/something" contain cookie <tt>cookieName1=cookieValue1</tt> * <pre> * get("/something").then().assertThat().cookie("cookieName1", containsString("Value1")); * </pre> * </p> * <p/> * <p> * You can also expect several cookies: * <pre> * get("/something").then().assertThat().cookie("cookieName1", equalsTo("cookieValue1")).and().cookie("cookieName2", containsString("Value2")); * </pre> * Also take a look at {@link #cookies(String, Object, Object...)} for a short version of passing multiple cookies. * </p> * * @param cookieName The name of the expected cookie * @param expectedValueMatcher The Hamcrest matcher that must conform to the value * @return the response specification */ T cookie(String cookieName, Matcher<?> expectedValueMatcher); /** * Validate that a detailed response cookie matches the supplied cookie name and hamcrest matcher (see {@link DetailedCookieMatcher}). * <p> * E.g. expect that the response of the GET request to "/something" contain cookie <tt>cookieName1=cookieValue1</tt> * <pre> * get("/something").then().assertThat() * .detailedCookie("cookieName1", detailedCookie().value("cookieValue1").secured(true)); * </pre> * </p> * <p/> * <p> * You can also expect several cookies: * <pre> * get("/something").then().assertThat().detailedCookie("cookieName1", detailedCookie().value("cookieValue1").secured(true)) * .and().detailedCookie("cookieName2", detailedCookie().value("cookieValue2").secured(false)); * </pre> * </p> * * @param cookieName The name of the expected cookie * @param detailedCookieMatcher The Hamcrest matcher that must conform to the cookie * @return the response specification */ T cookie(String cookieName, DetailedCookieMatcher detailedCookieMatcher); /** * Validate that a response cookie matches the supplied name and value. * <p> * E.g. expect that the response of the GET request to "/something" contain cookie <tt>cookieName1=cookieValue1</tt>: * <pre> * get("/something").then().assertThat().cookie("cookieName1", "cookieValue1"); * </pre> * </p> * <p/> * <p> * You can also expect several cookies: * <pre> * get("/something").then().assertThat().cookie("cookieName1", "cookieValue1").and().cookie("cookieName2", "cookieValue2"); * </pre> * Also take a look at {@link #cookies(String, Object, Object...)} for a short version of passing multiple cookies. * </p> * * @param cookieName The name of the expected cookie * @param expectedValue The value of the expected cookie * @return the response specification */ T cookie(String cookieName, Object expectedValue); /** * Set the root path of the response body so that you don't need to write the entire path for each expectation. * E.g. instead of writing: * <p/> * <pre> * get(..).then(). * body("x.y.firstName", is(..)). * body("x.y.lastName", is(..)). * body("x.y.age", is(..)). * body("x.y.gender", is(..)). * </pre> * <p/> * you can use a root path and do: * <pre> * get(..).then(). * rootPath("x.y"). * body("firstName", is(..)). * body("lastName", is(..)). * body("age", is(..)). * body("gender", is(..)); * </pre> * <p/> * Note that this method is exactly the same as {@link #root(String)}. * * @param rootPath The root path to use. */ T rootPath(String rootPath); /** * Set the root path with arguments of the response body so that you don't need to write the entire path for each expectation. * <p/> * Note that this method is exactly the same as {@link #root(String, java.util.List)}. * * @param rootPath The root path to use. * @param arguments A list of arguments. The path and arguments follows the standard <a href="https://docs.oracle.com/javase/7/docs/api/java/util/Formatter.html#syntax">formatting syntax</a> of Java. * @see #rootPath(String) */ T rootPath(String rootPath, List<Argument> arguments); /** * Set the root path with arguments of the response body so that you don't need to write the entire path for each expectation. * <p/> * Note that this method is exactly the same as {@link #rootPath(String, java.util.List)}. * * @param rootPath The root path to use. * @param arguments The list of substitution arguments. The path and arguments follows the standard <a href="https://docs.oracle.com/javase/7/docs/api/java/util/Formatter.html#syntax">formatting syntax</a> of Java.. * @see #rootPath(String) * @deprecated Use {@link #rootPath(String, List)} instead */ @Deprecated T root(String rootPath, List<Argument> arguments); /** * Set the root path of the response body so that you don't need to write the entire path for each expectation. * E.g. instead of writing: * <p/> * <pre> * get(..).then(). * body("x.y.firstName", is(..)). * body("x.y.lastName", is(..)). * body("x.y.age", is(..)). * body("x.y.gender", is(..)); * </pre> * <p/> * you can use a root and do: * <pre> * get(..).then(). * root("x.y"). * body("firstName", is(..)). * body("lastName", is(..)). * body("age", is(..)). * body("gender", is(..)). * </pre> * <p/> * Note that this method is exactly the same as {@link #rootPath(String)} but slightly shorter. * * @param rootPath The root path to use. * @deprecated Use {@link #rootPath(String)} instead */ @Deprecated T root(String rootPath); /** * Reset the root path of the response body so that you don't need to write the entire path for each expectation. * For example: * <p/> * <pre> * get(..).then(). * root("x.y"). * body("firstName", is(..)). * body("lastName", is(..)). * noRoot() * body("z.something1", is(..)). * body("w.something2", is(..)); * </pre> * <p/> * This is the same as calling <code>rootPath("")</code> but more expressive. * Note that this method is exactly the same as {@link #noRootPath()} but slightly shorter. * * @see #root(String) * @deprecated Use {@link #noRootPath()} instead */ @Deprecated T noRoot(); /** * Reset the root path of the response body so that you don't need to write the entire path for each expectation. * For example: * <p/> * <pre> * get(..).then(). * rootPath("x.y"). * body("firstName", is(..)). * body("lastName", is(..)). * noRootPath() * body("z.something1", is(..)). * body("w.something2", is(..)). * </pre> * <p/> * This is the same as calling <code>rootPath("")</code> but more expressive. * Note that this method is exactly the same as {@link #noRoot()} but slightly more expressive. * * @see #rootPath(String) */ T noRootPath(); /** * Append the given path to the root path of the response body so that you don't need to write the entire path for each expectation. * E.g. instead of writing: * <p/> * <pre> * get(..).then(). * root("x.y"). * body("age", is(..)). * body("gender", is(..)). * body("name.firstName", is(..)). * body("name.lastName", is(..)); * </pre> * <p/> * you can use a append root and do: * <pre> * get(..).then(). * root("x.y"). * body("age", is(..)). * body("gender", is(..)). * appendRoot("name"). * body("firstName", is(..)). * body("lastName", is(..)); * </pre> * * @param pathToAppend The root path to append. */ T appendRootPath(String pathToAppend); /** * @param pathToAppend The root path to append. * @see #appendRootPath(String) * @deprecated Use {@link #appendRootPath(String)} instead */ @Deprecated default T appendRoot(String pathToAppend) { return appendRootPath(pathToAppend); } /** * Append the given path to the root path with arguments supplied of the response body so that you don't need to write the entire path for each expectation. * This is mainly useful when you have parts of the path defined in variables. * E.g. instead of writing: * <p/> * <pre> * String namePath = "name"; * get(..).then(). * root("x.y"). * body("age", is(..)). * body("gender", is(..)). * body(namePath + "first", is(..)). * body(namePath + "last", is(..)). * </pre> * <p/> * you can use a append root and do: * <pre> * String namePath = "name"; * get(..).then(). * root("x.y"). * body("age", is(..)). * body("gender", is(..)). * appendRoot("%s", withArgs(namePath)). * body("first", is(..)). * body("last", is(..)). * </pre> * * @param pathToAppend The root path to append. The path and arguments follows the standard <a href="https://docs.oracle.com/javase/7/docs/api/java/util/Formatter.html#syntax">formatting syntax</a> of Java. */ T appendRootPath(String pathToAppend, List<Argument> arguments); /** * @param pathToAppend The root path to append. * @see #appendRootPath(String, List) * @deprecated Use {@link #appendRootPath(String, List)} instead */ @Deprecated default T appendRoot(String pathToAppend, List<Argument> arguments) { return appendRootPath(pathToAppend, arguments); } /** * Detach the given path from the root path. * E.g. instead of writing: * <p/> * <pre> * when(). * get(..); * then(). * root("x.y"). * body("age", is(..)). * body("gender", is(..)). * root("x"). * body("firstName", is(..)). * body("lastName", is(..)). * </pre> * <p/> * you can use a append root and do: * <pre> * when(). * get(..); * then(). * root("x.y"). * body("age", is(..)). * body("gender", is(..)). * detachRoot("y"). * body("firstName", is(..)). * body("lastName", is(..)). * </pre> * * @param pathToDetach The root path to detach. */ T detachRootPath(String pathToDetach); /** * @param pathToDetach The root path to detach. * @see #detachRootPath(String) * @deprecated Use {@link #detachRootPath(String)} instead */ @Deprecated default T detachRoot(String pathToDetach) { return detachRootPath(pathToDetach); } /** * Set the response content type to be <code>contentType</code>. * <p>Note that this will affect the way the response is decoded. * E,g. if you can't use JSON/XML matching (see e.g. {@link #body(String, org.hamcrest.Matcher, Object...)}) if you specify a * content-type of "text/plain". If you don't specify the response content type REST Assured will automatically try to * figure out which content type to use.</p> * * @param contentType The content type of the response. * @return the response specification */ T contentType(ContentType contentType); /** * Set the response content type to be <code>contentType</code>. * <p>Note that this will affect the way the response is decoded. * E,g. if you can't use JSON/XML matching (see e.g. {@link #body(String, org.hamcrest.Matcher, Object...)}) if you specify a * content-type of "text/plain". If you don't specify the response content type REST Assured will automatically try to * figure out which content type to use.</p> * * @param contentType The content type of the response. * @return the response specification */ T contentType(String contentType); /** * Validate the response content type to be <code>contentType</code>. * * @param contentType The expected content type of the response. * @return the response specification */ T contentType(Matcher<? super String> contentType); /** * Validate that the response body conforms to one or more Hamcrest matchers. E.g. * <pre> * // Validate that the response body (content) contains the string "winning-numbers" * get("/lotto").then().assertThat().body(containsString("winning-numbers")); * * // Validate that the response body (content) contains the string "winning-numbers" and "winners" * get("/lotto").then().assertThat().body(containsString("winning-numbers"), containsString("winners")); * </pre> * * @param matcher The hamcrest matcher that must response body must match. * @param additionalMatchers Optionally additional hamcrest matchers that must return <code>true</code>. * @return the response specification */ T body(Matcher<?> matcher, Matcher<?>... additionalMatchers); /** * Compare a path in the response body to something available in the response using arguments. * For example imagine that a resource "/x" returns the following JSON document: * <pre> * { * "userId" : "my-id", * "my.href" : "http://localhost:8080/my-id" * } * </pre> * you can then verify the href using: * <pre> * get("/x").then().body("%s.href", withArgs("my"), new ResponseAwareMatcher<Response>() { * public Matcher<?> matcher(Response response) { * return equalTo("http://localhost:8080/" + response.path("userId")); * } * }); * </pre> * Note that you can also use some of the predefined methods in {@link RestAssuredMatchers}. * * @param path The body path * @param responseAwareMatcher The {@link ResponseAwareMatcher} that creates the Hamcrest matcher. * @return the response specification * @see #body(String, ResponseAwareMatcher) * @see RestAssuredMatchers#endsWithPath(String) * @see RestAssuredMatchers#startsWithPath(String) * @see RestAssuredMatchers#containsPath(String) * @see RestAssuredMatchers#equalToPath(String) */ T body(String path, List<Argument> arguments, ResponseAwareMatcher<R> responseAwareMatcher); /** * Compare a path in the response body to something available in the response, for example another path. * For example imagine that a resource "/x" returns the following JSON document: * <pre> * { * "userId" : "my-id", * "href" : "http://localhost:8080/my-id" * } * </pre> * you can then verify the href using: * <pre> * get("/x").then().body("href", new ResponseAwareMatcher<Response>() { * public Matcher<?> matcher(Response response) { * return equalTo("http://localhost:8080/" + response.path("userId")); * } * }); * </pre> * Note that you can also use some of the predefined methods in {@link RestAssuredMatchers}. * * @param path The body path * @param responseAwareMatcher The {@link ResponseAwareMatcher} that creates the Hamcrest matcher. * @return the response specification * @see #body(String, java.util.List, ResponseAwareMatcher) * @see RestAssuredMatchers#endsWithPath(String) * @see RestAssuredMatchers#startsWithPath(String) * @see RestAssuredMatchers#containsPath(String) * @see RestAssuredMatchers#equalToPath(String) */ T body(String path, ResponseAwareMatcher<R> responseAwareMatcher); /** * Validate that the JSON or XML response body conforms to one or more Hamcrest matchers.<br> * <h3>JSON example</h3> * <p> * Assume that a GET request to "/lotto" returns a JSON response containing: * <pre> * { "lotto":{ * "lottoId":5, * "winning-numbers":[2,45,34,23,7,5,3], * "winners":[{ * "winnerId":23, * "numbers":[2,45,34,23,3,5] * },{ * "winnerId":54, * "numbers":[52,3,12,11,18,22] * }] * }} * </pre> * <p/> * You can verify that the lottoId is equal to 5 like this: * <pre> * get("/lotto").then().assertThat().body("lotto.lottoId", equalTo(5)); * </pre> * <p/> * You can also verify that e.g. one of the the winning numbers is 45. * <pre> * get("/lotto").then().assertThat().body("lotto.winning-numbers", hasItem(45)); * </pre> * <p/> * Or both at the same time: * <pre> * get("/lotto").then().assertThat().body("lotto.lottoId", equalTo(5)).and().body("lotto.winning-numbers", hasItem(45)); * </pre> * <p/> * or a slightly short version: * <pre> * get("/lotto").then().assertThat().body("lotto.lottoId", equalTo(5), "lotto.winning-numbers", hasItem(45)); * </pre> * </p> * <h3>XML example</h3> * <p> * Assume that a GET request to "/xml" returns a XML response containing: * <pre> * &lt;greeting&gt; * &lt;firstName&gt;John&lt;/firstName&gt; * &lt;lastName&gt;Doe&lt;/lastName&gt; * &lt;/greeting&gt; * </pre> * </p> * <p/> * You can now verify that the firstName is equal to "John" like this: * <pre> * get("/xml").then().assertThat().body("greeting.firstName", equalTo("John")); * </pre> * <p/> * To verify both the first name and last name you can do like this: * <pre> * get("/xml").then().assertThat().body("greeting.firstName", equalTo("John")).and().body("greeting.lastName", equalTo("Doe")); * </pre> * <p/> * Or the slightly shorter version of: * <pre> * get("/xml").then().assertThat().body("greeting.firstName", equalTo("John"), "greeting.lastName", equalTo("Doe")); * </pre> * <h3>Notes</h3> * <p> * Note that if the response body type is not of type <tt>application/xml</tt> or <tt>application/json</tt> you * <i>cannot</i> use this verification. * </p> * <p/> * <p> * The only difference between the <code>content</code> and <code>body</code> methods are of syntactic nature. * </p> * * @param path The body path * @param matcher The hamcrest matcher that must response body must match. * @param additionalKeyMatcherPairs Optionally additional hamcrest matchers that must return <code>true</code>. * @return the response specification */ T body(String path, Matcher<?> matcher, Object... additionalKeyMatcherPairs); /** * Syntactic sugar, e.g. * <pre> * get("/something").then().assertThat().body(containsString("OK")).and().body(containsString("something else")); * </pre> * <p/> * is that same as: * <pre> * get("/something").then().assertThat().body(containsString("OK")).body(containsString("something else")); * </pre> * * @return the response specification */ T and(); /** * Syntactic sugar, e.g. * <pre> * get("/something").then().using().defaultParser(JSON).assertThat().body(containsString("OK")).and().body(containsString("something else")); * </pre> * <p/> * is that same as: * <pre> * get("/something").then().defaultParser(JSON).body(containsString("OK")).body(containsString("something else")); * </pre> * * @return the response specification */ T using(); /** * Syntactic sugar, e.g. * <pre> * get("/something").then().assertThat().body(containsString("OK")).and().body(containsString("something else")); * </pre> * <p/> * is that same as: * <pre> * get("/something").then().body(containsString("OK")).body(containsString("something else")); * </pre> * * @return the response specification */ T assertThat(); /** * Validate that the response matches an entire specification. * <pre> * ResponseSpecification responseSpec = new ResponseSpecBuilder().expectStatusCode(200).build(); * get("/something").then() * spec(responseSpec). * body("x.y.z", equalTo("something")); * </pre> * <p/> * This is useful when you want to reuse multiple different expectations in several tests. * <p/> * The specification passed to this method is merged with the current specification. Note that the supplied specification * can overwrite data in the current specification. The following settings are overwritten: * <ul> * <li>Content type</li> * <li>Root path</ * <li>Status code</li> * <li>Status line</li> * </ul> * The following settings are merged: * <ul> * <li>Response body expectations</li> * <li>Cookies</li> * <li>Headers</li> * </ul> * <p/> * @param responseSpecificationToMerge The specification to merge with. * @return the response specification */ T spec(ResponseSpecification responseSpecificationToMerge); /** * Register a content-type to be parsed using a predefined parser. E.g. let's say you want parse * content-type <tt>application/custom</tt> with the XML parser to be able to verify the response using the XML dot notations: * <pre> * get("/x").then().assertThat().body("document.child", equalsTo("something") * </pre> * Since <tt>application/custom</tt> is not registered to be processed by the XML parser by default you need to explicitly * tell REST Assured to use this parser before making the request: * <pre> * get("/x").then().parser("application/custom", Parser.XML).assertThat(). ..; * </pre> * <p/> * You can also specify by it for every response by using: * <pre> * RestAssured.registerParser("application/custom", Parser.XML); * </pre> * * @param contentType The content-type to register * @param parser The parser to use when verifying the response. */ T parser(String contentType, Parser parser); /** * Register a default predefined parser that will be used if no other parser (registered or pre-defined) matches the response * content-type. E.g. let's say that for some reason no content-type is defined in the response but the content is nevertheless * JSON encoded. To be able to expect the content in REST Assured you need to set the default parser: * <pre> * get("/x").then().using().defaultParser(Parser.JSON).assertThat(). ..; * </pre> * <p/> * You can also specify it for every response by using: * <pre> * RestAssured.defaultParser(Parser.JSON); * </pre> * * @param parser The parser to use when verifying the response if no other parser is found for the response content-type. */ T defaultParser(Parser parser); /** * Extract values from the response or return the response instance itself. This is useful for example if you want to use values from the * response in sequent requests. For example given that the resource <code>title</code> returns the following JSON * <pre> * { * "title" : "My Title", * "_links": { * "self": { "href": "/title" }, * "next": { "href": "/title?page=2" } * } * } * </pre> * and you want to validate that content type is equal to <code>JSON</code> and the title is equal to <code>My Title</code> * but you also want to extract the link to the "next" title to use that in a subsequent request. This is how: * <pre> * String nextTitleLink = * given(). * param("param_name", "param_value"). * when(). * get("/title"). * then(). * contentType(JSON). * body("title", equalTo("My Title")). * extract(). * path("_links.next.href"); * * get(nextTitleLink). .. * </pre> * * @return An instance of {@link ExtractableResponse}. */ ExtractableResponse<R> extract(); /** * Returns the {@link ValidatableResponseLogSpec} that allows you to log different parts of the {@link Response}. * This is mainly useful for debug purposes when writing your tests. * * @return the validatable response log specification */ ValidatableResponseLogSpec<T, R> log(); /** * Validate that the response time (in milliseconds) matches the supplied <code>matcher</code>. For example: * <p/> * <pre> * when(). * get("/something"). * then(). * time(lessThan(2000)); * </pre> * <p/> * where <code>lessThan</code> is a Hamcrest matcher * * @return The {@link ValidatableResponse} instance. */ T time(Matcher<Long> matcher); /** * Validate that the response time matches the supplied <code>matcher</code> and time unit. For example: * <p/> * <pre> * when(). * get("/something"). * then(). * time(lessThan(2), TimeUnit.SECONDS); * </pre> * <p/> * where <code>lessThan</code> is a Hamcrest matcher * * @return The {@link ValidatableResponse} instance. */ T time(Matcher<Long> matcher, TimeUnit timeUnit); }
rest-assured/src/main/java/io/restassured/response/ValidatableResponseOptions.java
/* * Copyright 2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.restassured.response; import io.restassured.http.ContentType; import io.restassured.matcher.DetailedCookieMatcher; import io.restassured.matcher.ResponseAwareMatcher; import io.restassured.matcher.RestAssuredMatchers; import io.restassured.parsing.Parser; import io.restassured.specification.Argument; import io.restassured.specification.ResponseSpecification; import org.hamcrest.Matcher; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.Function; /** * A validatable response of a request made by REST Assured. * <p> * Usage example: * <pre> * get("/lotto").then().body("lotto.lottoId", is(5)); * </pre> * </p> */ public interface ValidatableResponseOptions<T extends ValidatableResponseOptions<T, R>, R extends ResponseBody<R> & ResponseOptions<R>> { /** * Same as {@link #body(String, org.hamcrest.Matcher, Object...)} expect that you can pass arguments to the key. This * is useful in situations where you have e.g. pre-defined variables that constitutes the key: * <pre> * String someSubPath = "else"; * int index = 1; * get("/x").then().body("something.%s[%d]", withArgs(someSubPath, index), equalTo("some value")). .. * </pre> * <p/> * or if you have complex root paths and don't wish to duplicate the path for small variations: * <pre> * get("/x").then(). * root("filters.filterConfig[%d].filterConfigGroups.find { it.name == 'Gold' }.includes"). * body(withArgs(0), hasItem("first")). * body(withArgs(1), hasItem("second")). * .. * </pre> * <p/> * The key and arguments follows the standard <a href="https://docs.oracle.com/javase/7/docs/api/java/util/Formatter.html#syntax">formatting syntax</a> of Java. * <p> * Note that <code>withArgs</code> can be statically imported from the <code>io.restassured.RestAssured</code> class. * </p> * * @param path The body path * @param arguments The arguments to apply to the key * @param matcher The hamcrest matcher that must response body must match. * @param additionalKeyMatcherPairs Optionally additional hamcrest matchers that must return <code>true</code>. * @return the response specification * @see #body(String, org.hamcrest.Matcher, Object...) */ T body(String path, List<Argument> arguments, Matcher matcher, Object... additionalKeyMatcherPairs); /** * This as special kind of expectation that is mainly useful when you've specified a root path with an argument placeholder. * For example: * <pre> * get(..).then(). * root("x.%s"). // Root path with a placeholder * body(withArgs("firstName"), equalTo(..)). * body(withArgs("lastName"), equalTo(..)). * </pre> * <p/> * Note that this is the same as doing: * <pre> * get(..).then(). * root("x.%s"). // Root path with a placeholder * content(withArgs("firstName"), equalTo(..)). * content(withArgs("lastName"), equalTo(..)). * </pre> * <p/> * * @param arguments The arguments to apply to the root path. * @param matcher The hamcrest matcher that must response body must match. * @param additionalKeyMatcherPairs Optionally additional hamcrest matchers that must return <code>true</code>. * @return the response specification * @see #body(String, org.hamcrest.Matcher, Object...) */ T body(List<Argument> arguments, Matcher matcher, Object... additionalKeyMatcherPairs); /** * Compare a path in the response body to something available in the response using arguments when root path is used. * For example imagine that a resource "/x" returns the following JSON document: * <pre> * { * "data" : { * "user1" : { * "userId" : "my-id1", * "href" : "http://localhost:8080/my-id1" * }, * "user2" : { * "userId" : "my-id2", * "href" : "http://localhost:8080/my-id2" * }, * } * } * </pre> * you can then verify the href using: * <pre> * when(). * get("/x"). * then(). * root("data.%s"). * body(withArgs("user1"), new ResponseAwareMatcher<Response>() { * public Matcher<?> matcher(Response response) { * return equalTo("http://localhost:8080/" + response.path("userId")); * } * }); * </pre> * Note that you can also use some of the predefined methods in {@link RestAssuredMatchers}. * * @param responseAwareMatcher The {@link ResponseAwareMatcher} that creates the Hamcrest matcher. * @return the response specification * @see #body(String, ResponseAwareMatcher) * @see RestAssuredMatchers#endsWithPath(String) * @see RestAssuredMatchers#startsWithPath(String) * @see RestAssuredMatchers#containsPath(String) * @see RestAssuredMatchers#equalToPath(String) */ T body(List<Argument> arguments, ResponseAwareMatcher<R> responseAwareMatcher); /** * Validate that the response status code matches the given Hamcrest matcher. E.g. * <pre> * get("/something").then().assertThat().statusCode(equalTo(200)); * </pre> * * @param expectedStatusCode The expected status code matcher. * @return the response specification */ T statusCode(Matcher<? super Integer> expectedStatusCode); /** * Validate that the response status code matches an integer. E.g. * <pre> * get("/something").then().assertThat().statusCode(200); * </pre> * <p/> * This is the same as: * <pre> * get("/something").then().assertThat().statusCode(equalTo(200)); * </pre> * * @param expectedStatusCode The expected status code. * @return the response specification */ T statusCode(int expectedStatusCode); /** * Validate that the response status line matches the given Hamcrest matcher. E.g. * <pre> * expect().statusLine(equalTo("No Content")).when().get("/something"); * </pre> * * @param expectedStatusLine The expected status line matcher. * @return the response specification */ T statusLine(Matcher<? super String> expectedStatusLine); /** * Validate that the response status line matches the given String. E.g. * <pre> * expect().statusLine("No Content").when().get("/something"); * </pre> * <p/> * This is the same as: * <pre> * expect().statusLine(equalTo("No Content")).when().get("/something"); * </pre> * * @param expectedStatusLine The expected status line. * @return the response specification */ T statusLine(String expectedStatusLine); /** * Validate that response headers matches those specified in a Map. * <p> * E.g. expect that the response of the GET request to "/something" contains header <tt>headerName1=headerValue1</tt> * and <tt>headerName2=headerValue2</tt>: * <pre> * Map expectedHeaders = new HashMap(); * expectedHeaders.put("headerName1", "headerValue1")); * expectedHeaders.put("headerName2", "headerValue2"); * * get("/something").then().assertThat().headers(expectedHeaders); * </pre> * </p> * <p/> * <p> * You can also use Hamcrest matchers: * <pre> * Map expectedHeaders = new HashMap(); * expectedHeaders.put("Content-Type", containsString("charset=UTF-8")); * expectedHeaders.put("Content-Length", "160"); * * get("/something").then().assertThat().headers(expectedHeaders); * </pre> * </p> * * @param expectedHeaders The Map of expected response headers * @return the response specification */ T headers(Map<String, ?> expectedHeaders); /** * Validate that response headers matches the supplied headers and values. * <p> * E.g. expect that the response of the GET request to "/something" contains header <tt>Pragma=no-cache</tt> * and <tt>Content-Encoding=gzip</tt>: * <pre> * get("/something").then().assertThat().headers("Pragma", "no-cache", "Content-Encoding", "gzip"); * </pre> * </p> * <p/> * <p> * You can also use Hamcrest matchers: * <pre> * get("/something").then().assertThat().headers("Content-Type", containsString("application/json"), "Pragma", equalsTo("no-cache")); * </pre> * <p/> * and you can even mix string matching and hamcrest matching: * <pre> * get("/something").then().assertThat().headers("Content-Type", containsString("application/json"), "Pragma", "no-cache"); * </pre> * </p> * * @param firstExpectedHeaderName The name of the first header * @param firstExpectedHeaderValue The value of the first header * @param expectedHeaders A list of expected "header name" - "header value" pairs. * @return the response specification */ T headers(String firstExpectedHeaderName, Object firstExpectedHeaderValue, Object... expectedHeaders); /** * Validate that a response header matches the supplied header name and hamcrest matcher. * <p> * E.g. expect that the response of the GET request to "/something" contains header <tt>Pragma=no-cache</tt>: * <pre> * get("/something").then().assertThat().header("Pragma", containsString("no")); * </pre> * </p> * <p/> * <p> * You can also expect several headers: * <pre> * get("/something").then().assertThat().header("Pragma", equalsTo("no-cache")).and().header("Content-Encoding", containsString("zip")); * </pre> * Also take a look at {@link #headers(String, Object, Object...)} )} for a short version of passing multiple headers. * </p> * * @param headerName The name of the expected header * @param expectedValueMatcher The Hamcrest matcher that must conform to the value * @return the response specification */ T header(String headerName, Matcher<?> expectedValueMatcher); /** * Compare a header in the response to something else available in the response. * <p> * For example imagine that a POST to resource "/x" returns "201 Created" and sets a Location header * that should end with "/x/{id}" where <code>{id}</code> is present in the response body: * <pre> * { "id" : 5 } * </pre> * To verify that the Location header ends with "/x/{id}" you can do like this: * <p> * <pre> * given().param("id", 1).body(..).post("/x").then().assertThat().header("Location", response -> endsWith("/x/" + response.path("id"))); * </pre> * </p> * <p/> * <p> * </p> * * @param headerName The name of the expected header * @param expectedValueMatcher The Hamcrest matcher that must conform to the value * @return the response specification */ T header(String headerName, ResponseAwareMatcher<R> expectedValueMatcher); /** * Expect that a response header matches the supplied header name and hamcrest matcher using a mapping function. * <p> * E.g. expect that the response of the GET request to "/something" contains header <tt>Content-Length: 500</tt> and you want to * validate that the length must always be less than 600: * <pre> * when(). * get("/something"). * then(). * header("Content-Length", Integer::parseInt, lessThan(600)); * </pre> * </p> * * @param headerName The name of the expected header * @param mappingFunction Map the header to another value type before exposing it to the Hamcrest matcher * @param expectedValueMatcher The Hamcrest matcher that must conform to the value * @return the response specification */ <V> T header(String headerName, Function<String, V> mappingFunction, Matcher<? super V> expectedValueMatcher); /** * Validate that a response header matches the supplied name and value. * <p> * E.g. expect that the response of the GET request to "/something" contains header <tt>Pragma=no-cache</tt>: * <pre> * get("/something").then().assertThat().header("Pragma", "no-cache"); * </pre> * </p> * <p/> * <p> * You can also expect several headers: * <pre> * get("/something").then().assertThat().header("Pragma", "no-cache").and().header("Content-Encoding", "gzip"); * </pre> * Also take a look at {@link #headers(String, Object, Object...)} for a short version of passing multiple headers. * </p> * * @param headerName The name of the expected header * @param expectedValue The value of the expected header * @return the response specification */ T header(String headerName, String expectedValue); /** * Validate that response cookies matches those specified in a Map. * <p> * E.g. expect that the response of the GET request to "/something" contains cookies <tt>cookieName1=cookieValue1</tt> * and <tt>cookieName2=cookieValue2</tt>: * <pre> * Map expectedCookies = new HashMap(); * expectedCookies.put("cookieName1", "cookieValue1")); * expectedCookies.put("cookieName2", "cookieValue2"); * * get("/something").then().assertThat().cookies(expectedCookies); * </pre> * </p> * <p/> * <p> * You can also use Hamcrest matchers: * <pre> * Map expectedCookies = new HashMap(); * expectedCookies.put("cookieName1", containsString("Value1")); * expectedCookies.put("cookieName2", "cookieValue2"); * * get("/something").then().assertThat().cookies(expectedCookies); * </pre> * </p> * * @param expectedCookies A Map of expected response cookies * @return the response specification */ T cookies(Map<String, ?> expectedCookies); /** * Validate that a cookie exist in the response, regardless of value (it may have no value at all). * * @param cookieName the cookie to validate that it exists * @return the response specification */ T cookie(String cookieName); /** * Validate that response cookies matches the supplied cookie names and values. * <p> * E.g. expect that the response of the GET request to "/something" contains cookies <tt>cookieName1=cookieValue1</tt> * and <tt>cookieName2=cookieValue2</tt>: * <pre> * get("/something").then().assertThat().cookies("cookieName1", "cookieValue1", "cookieName2", "cookieValue2"); * </pre> * </p> * <p/> * <p> * You can also use Hamcrest matchers: * <pre> * get("/something").then().assertThat().cookies("cookieName1", containsString("Value1"), "cookieName2", equalsTo("cookieValue2")); * </pre> * <p/> * and you can even mix string matching and hamcrest matching: * <pre> * get("/something").then().assertThat().cookies("cookieName1", containsString("Value1"), "cookieName2", "cookieValue2"); * </pre> * </p> * * @param firstExpectedCookieName The name of the first cookie * @param firstExpectedCookieValue The value of the first cookie * @param expectedCookieNameValuePairs A list of expected "cookie name" - "cookie value" pairs. * @return the response specification */ T cookies(String firstExpectedCookieName, Object firstExpectedCookieValue, Object... expectedCookieNameValuePairs); /** * Validate that a response cookie matches the supplied cookie name and hamcrest matcher. * <p> * E.g. expect that the response of the GET request to "/something" contain cookie <tt>cookieName1=cookieValue1</tt> * <pre> * get("/something").then().assertThat().cookie("cookieName1", containsString("Value1")); * </pre> * </p> * <p/> * <p> * You can also expect several cookies: * <pre> * get("/something").then().assertThat().cookie("cookieName1", equalsTo("cookieValue1")).and().cookie("cookieName2", containsString("Value2")); * </pre> * Also take a look at {@link #cookies(String, Object, Object...)} for a short version of passing multiple cookies. * </p> * * @param cookieName The name of the expected cookie * @param expectedValueMatcher The Hamcrest matcher that must conform to the value * @return the response specification */ T cookie(String cookieName, Matcher<?> expectedValueMatcher); /** * Validate that a detailed response cookie matches the supplied cookie name and hamcrest matcher (see {@link DetailedCookieMatcher}). * <p> * E.g. expect that the response of the GET request to "/something" contain cookie <tt>cookieName1=cookieValue1</tt> * <pre> * get("/something").then().assertThat() * .detailedCookie("cookieName1", detailedCookie().value("cookieValue1").secured(true)); * </pre> * </p> * <p/> * <p> * You can also expect several cookies: * <pre> * get("/something").then().assertThat().detailedCookie("cookieName1", detailedCookie().value("cookieValue1").secured(true)) * .and().detailedCookie("cookieName2", detailedCookie().value("cookieValue2").secured(false)); * </pre> * </p> * * @param cookieName The name of the expected cookie * @param detailedCookieMatcher The Hamcrest matcher that must conform to the cookie * @return the response specification */ T cookie(String cookieName, DetailedCookieMatcher detailedCookieMatcher); /** * Validate that a response cookie matches the supplied name and value. * <p> * E.g. expect that the response of the GET request to "/something" contain cookie <tt>cookieName1=cookieValue1</tt>: * <pre> * get("/something").then().assertThat().cookie("cookieName1", "cookieValue1"); * </pre> * </p> * <p/> * <p> * You can also expect several cookies: * <pre> * get("/something").then().assertThat().cookie("cookieName1", "cookieValue1").and().cookie("cookieName2", "cookieValue2"); * </pre> * Also take a look at {@link #cookies(String, Object, Object...)} for a short version of passing multiple cookies. * </p> * * @param cookieName The name of the expected cookie * @param expectedValue The value of the expected cookie * @return the response specification */ T cookie(String cookieName, Object expectedValue); /** * Set the root path of the response body so that you don't need to write the entire path for each expectation. * E.g. instead of writing: * <p/> * <pre> * get(..).then(). * body("x.y.firstName", is(..)). * body("x.y.lastName", is(..)). * body("x.y.age", is(..)). * body("x.y.gender", is(..)). * </pre> * <p/> * you can use a root path and do: * <pre> * get(..).then(). * rootPath("x.y"). * body("firstName", is(..)). * body("lastName", is(..)). * body("age", is(..)). * body("gender", is(..)); * </pre> * <p/> * Note that this method is exactly the same as {@link #root(String)}. * * @param rootPath The root path to use. */ T rootPath(String rootPath); /** * Set the root path with arguments of the response body so that you don't need to write the entire path for each expectation. * <p/> * Note that this method is exactly the same as {@link #root(String, java.util.List)}. * * @param rootPath The root path to use. * @param arguments A list of arguments. The path and arguments follows the standard <a href="https://docs.oracle.com/javase/7/docs/api/java/util/Formatter.html#syntax">formatting syntax</a> of Java. * @see #rootPath(String) */ T rootPath(String rootPath, List<Argument> arguments); /** * Set the root path with arguments of the response body so that you don't need to write the entire path for each expectation. * <p/> * Note that this method is exactly the same as {@link #rootPath(String, java.util.List)}. * * @param rootPath The root path to use. * @param arguments The list of substitution arguments. The path and arguments follows the standard <a href="https://docs.oracle.com/javase/7/docs/api/java/util/Formatter.html#syntax">formatting syntax</a> of Java.. * @see #rootPath(String) * @deprecated Use {@link #rootPath(String, List)} instead */ @Deprecated T root(String rootPath, List<Argument> arguments); /** * Set the root path of the response body so that you don't need to write the entire path for each expectation. * E.g. instead of writing: * <p/> * <pre> * get(..).then(). * body("x.y.firstName", is(..)). * body("x.y.lastName", is(..)). * body("x.y.age", is(..)). * body("x.y.gender", is(..)); * </pre> * <p/> * you can use a root and do: * <pre> * get(..).then(). * root("x.y"). * body("firstName", is(..)). * body("lastName", is(..)). * body("age", is(..)). * body("gender", is(..)). * </pre> * <p/> * Note that this method is exactly the same as {@link #rootPath(String)} but slightly shorter. * * @param rootPath The root path to use. * @deprecated Use {@link #rootPath(String)} instead */ @Deprecated T root(String rootPath); /** * Reset the root path of the response body so that you don't need to write the entire path for each expectation. * For example: * <p/> * <pre> * get(..).then(). * root("x.y"). * body("firstName", is(..)). * body("lastName", is(..)). * noRoot() * body("z.something1", is(..)). * body("w.something2", is(..)); * </pre> * <p/> * This is the same as calling <code>rootPath("")</code> but more expressive. * Note that this method is exactly the same as {@link #noRootPath()} but slightly shorter. * * @see #root(String) * @deprecated Use {@link #noRootPath()} instead */ @Deprecated T noRoot(); /** * Reset the root path of the response body so that you don't need to write the entire path for each expectation. * For example: * <p/> * <pre> * get(..).then(). * rootPath("x.y"). * body("firstName", is(..)). * body("lastName", is(..)). * noRootPath() * body("z.something1", is(..)). * body("w.something2", is(..)). * </pre> * <p/> * This is the same as calling <code>rootPath("")</code> but more expressive. * Note that this method is exactly the same as {@link #noRoot()} but slightly more expressive. * * @see #rootPath(String) */ T noRootPath(); /** * Append the given path to the root path of the response body so that you don't need to write the entire path for each expectation. * E.g. instead of writing: * <p/> * <pre> * get(..).then(). * root("x.y"). * body("age", is(..)). * body("gender", is(..)). * body("name.firstName", is(..)). * body("name.lastName", is(..)); * </pre> * <p/> * you can use a append root and do: * <pre> * get(..).then(). * root("x.y"). * body("age", is(..)). * body("gender", is(..)). * appendRoot("name"). * body("firstName", is(..)). * body("lastName", is(..)); * </pre> * * @param pathToAppend The root path to append. */ T appendRootPath(String pathToAppend); /** * @param pathToAppend The root path to append. * @see #appendRootPath(String) * @deprecated Use {@link #appendRootPath(String)} instead */ @Deprecated default T appendRoot(String pathToAppend) { return appendRootPath(pathToAppend); } /** * Append the given path to the root path with arguments supplied of the response body so that you don't need to write the entire path for each expectation. * This is mainly useful when you have parts of the path defined in variables. * E.g. instead of writing: * <p/> * <pre> * String namePath = "name"; * get(..).then(). * root("x.y"). * body("age", is(..)). * body("gender", is(..)). * body(namePath + "first", is(..)). * body(namePath + "last", is(..)). * </pre> * <p/> * you can use a append root and do: * <pre> * String namePath = "name"; * get(..).then(). * root("x.y"). * body("age", is(..)). * body("gender", is(..)). * appendRoot("%s", withArgs(namePath)). * body("first", is(..)). * body("last", is(..)). * </pre> * * @param pathToAppend The root path to append. The path and arguments follows the standard <a href="https://docs.oracle.com/javase/7/docs/api/java/util/Formatter.html#syntax">formatting syntax</a> of Java. */ T appendRootPath(String pathToAppend, List<Argument> arguments); /** * @param pathToAppend The root path to append. * @see #appendRootPath(String, List) * @deprecated Use {@link #appendRootPath(String, List)} instead */ @Deprecated default T appendRoot(String pathToAppend, List<Argument> arguments) { return appendRootPath(pathToAppend, arguments); } /** * Detach the given path from the root path. * E.g. instead of writing: * <p/> * <pre> * when(). * get(..); * then(). * root("x.y"). * body("age", is(..)). * body("gender", is(..)). * root("x"). * body("firstName", is(..)). * body("lastName", is(..)). * </pre> * <p/> * you can use a append root and do: * <pre> * when(). * get(..); * then(). * root("x.y"). * body("age", is(..)). * body("gender", is(..)). * detachRoot("y"). * body("firstName", is(..)). * body("lastName", is(..)). * </pre> * * @param pathToDetach The root path to detach. */ T detachRootPath(String pathToDetach); /** * @param pathToDetach The root path to detach. * @see #detachRootPath(String) * @deprecated Use {@link #detachRootPath(String)} instead */ @Deprecated default T detachRoot(String pathToDetach) { return appendRootPath(pathToDetach); } /** * Set the response content type to be <code>contentType</code>. * <p>Note that this will affect the way the response is decoded. * E,g. if you can't use JSON/XML matching (see e.g. {@link #body(String, org.hamcrest.Matcher, Object...)}) if you specify a * content-type of "text/plain". If you don't specify the response content type REST Assured will automatically try to * figure out which content type to use.</p> * * @param contentType The content type of the response. * @return the response specification */ T contentType(ContentType contentType); /** * Set the response content type to be <code>contentType</code>. * <p>Note that this will affect the way the response is decoded. * E,g. if you can't use JSON/XML matching (see e.g. {@link #body(String, org.hamcrest.Matcher, Object...)}) if you specify a * content-type of "text/plain". If you don't specify the response content type REST Assured will automatically try to * figure out which content type to use.</p> * * @param contentType The content type of the response. * @return the response specification */ T contentType(String contentType); /** * Validate the response content type to be <code>contentType</code>. * * @param contentType The expected content type of the response. * @return the response specification */ T contentType(Matcher<? super String> contentType); /** * Validate that the response body conforms to one or more Hamcrest matchers. E.g. * <pre> * // Validate that the response body (content) contains the string "winning-numbers" * get("/lotto").then().assertThat().body(containsString("winning-numbers")); * * // Validate that the response body (content) contains the string "winning-numbers" and "winners" * get("/lotto").then().assertThat().body(containsString("winning-numbers"), containsString("winners")); * </pre> * * @param matcher The hamcrest matcher that must response body must match. * @param additionalMatchers Optionally additional hamcrest matchers that must return <code>true</code>. * @return the response specification */ T body(Matcher<?> matcher, Matcher<?>... additionalMatchers); /** * Compare a path in the response body to something available in the response using arguments. * For example imagine that a resource "/x" returns the following JSON document: * <pre> * { * "userId" : "my-id", * "my.href" : "http://localhost:8080/my-id" * } * </pre> * you can then verify the href using: * <pre> * get("/x").then().body("%s.href", withArgs("my"), new ResponseAwareMatcher<Response>() { * public Matcher<?> matcher(Response response) { * return equalTo("http://localhost:8080/" + response.path("userId")); * } * }); * </pre> * Note that you can also use some of the predefined methods in {@link RestAssuredMatchers}. * * @param path The body path * @param responseAwareMatcher The {@link ResponseAwareMatcher} that creates the Hamcrest matcher. * @return the response specification * @see #body(String, ResponseAwareMatcher) * @see RestAssuredMatchers#endsWithPath(String) * @see RestAssuredMatchers#startsWithPath(String) * @see RestAssuredMatchers#containsPath(String) * @see RestAssuredMatchers#equalToPath(String) */ T body(String path, List<Argument> arguments, ResponseAwareMatcher<R> responseAwareMatcher); /** * Compare a path in the response body to something available in the response, for example another path. * For example imagine that a resource "/x" returns the following JSON document: * <pre> * { * "userId" : "my-id", * "href" : "http://localhost:8080/my-id" * } * </pre> * you can then verify the href using: * <pre> * get("/x").then().body("href", new ResponseAwareMatcher<Response>() { * public Matcher<?> matcher(Response response) { * return equalTo("http://localhost:8080/" + response.path("userId")); * } * }); * </pre> * Note that you can also use some of the predefined methods in {@link RestAssuredMatchers}. * * @param path The body path * @param responseAwareMatcher The {@link ResponseAwareMatcher} that creates the Hamcrest matcher. * @return the response specification * @see #body(String, java.util.List, ResponseAwareMatcher) * @see RestAssuredMatchers#endsWithPath(String) * @see RestAssuredMatchers#startsWithPath(String) * @see RestAssuredMatchers#containsPath(String) * @see RestAssuredMatchers#equalToPath(String) */ T body(String path, ResponseAwareMatcher<R> responseAwareMatcher); /** * Validate that the JSON or XML response body conforms to one or more Hamcrest matchers.<br> * <h3>JSON example</h3> * <p> * Assume that a GET request to "/lotto" returns a JSON response containing: * <pre> * { "lotto":{ * "lottoId":5, * "winning-numbers":[2,45,34,23,7,5,3], * "winners":[{ * "winnerId":23, * "numbers":[2,45,34,23,3,5] * },{ * "winnerId":54, * "numbers":[52,3,12,11,18,22] * }] * }} * </pre> * <p/> * You can verify that the lottoId is equal to 5 like this: * <pre> * get("/lotto").then().assertThat().body("lotto.lottoId", equalTo(5)); * </pre> * <p/> * You can also verify that e.g. one of the the winning numbers is 45. * <pre> * get("/lotto").then().assertThat().body("lotto.winning-numbers", hasItem(45)); * </pre> * <p/> * Or both at the same time: * <pre> * get("/lotto").then().assertThat().body("lotto.lottoId", equalTo(5)).and().body("lotto.winning-numbers", hasItem(45)); * </pre> * <p/> * or a slightly short version: * <pre> * get("/lotto").then().assertThat().body("lotto.lottoId", equalTo(5), "lotto.winning-numbers", hasItem(45)); * </pre> * </p> * <h3>XML example</h3> * <p> * Assume that a GET request to "/xml" returns a XML response containing: * <pre> * &lt;greeting&gt; * &lt;firstName&gt;John&lt;/firstName&gt; * &lt;lastName&gt;Doe&lt;/lastName&gt; * &lt;/greeting&gt; * </pre> * </p> * <p/> * You can now verify that the firstName is equal to "John" like this: * <pre> * get("/xml").then().assertThat().body("greeting.firstName", equalTo("John")); * </pre> * <p/> * To verify both the first name and last name you can do like this: * <pre> * get("/xml").then().assertThat().body("greeting.firstName", equalTo("John")).and().body("greeting.lastName", equalTo("Doe")); * </pre> * <p/> * Or the slightly shorter version of: * <pre> * get("/xml").then().assertThat().body("greeting.firstName", equalTo("John"), "greeting.lastName", equalTo("Doe")); * </pre> * <h3>Notes</h3> * <p> * Note that if the response body type is not of type <tt>application/xml</tt> or <tt>application/json</tt> you * <i>cannot</i> use this verification. * </p> * <p/> * <p> * The only difference between the <code>content</code> and <code>body</code> methods are of syntactic nature. * </p> * * @param path The body path * @param matcher The hamcrest matcher that must response body must match. * @param additionalKeyMatcherPairs Optionally additional hamcrest matchers that must return <code>true</code>. * @return the response specification */ T body(String path, Matcher<?> matcher, Object... additionalKeyMatcherPairs); /** * Syntactic sugar, e.g. * <pre> * get("/something").then().assertThat().body(containsString("OK")).and().body(containsString("something else")); * </pre> * <p/> * is that same as: * <pre> * get("/something").then().assertThat().body(containsString("OK")).body(containsString("something else")); * </pre> * * @return the response specification */ T and(); /** * Syntactic sugar, e.g. * <pre> * get("/something").then().using().defaultParser(JSON).assertThat().body(containsString("OK")).and().body(containsString("something else")); * </pre> * <p/> * is that same as: * <pre> * get("/something").then().defaultParser(JSON).body(containsString("OK")).body(containsString("something else")); * </pre> * * @return the response specification */ T using(); /** * Syntactic sugar, e.g. * <pre> * get("/something").then().assertThat().body(containsString("OK")).and().body(containsString("something else")); * </pre> * <p/> * is that same as: * <pre> * get("/something").then().body(containsString("OK")).body(containsString("something else")); * </pre> * * @return the response specification */ T assertThat(); /** * Validate that the response matches an entire specification. * <pre> * ResponseSpecification responseSpec = new ResponseSpecBuilder().expectStatusCode(200).build(); * get("/something").then() * spec(responseSpec). * body("x.y.z", equalTo("something")); * </pre> * <p/> * This is useful when you want to reuse multiple different expectations in several tests. * <p/> * The specification passed to this method is merged with the current specification. Note that the supplied specification * can overwrite data in the current specification. The following settings are overwritten: * <ul> * <li>Content type</li> * <li>Root path</ * <li>Status code</li> * <li>Status line</li> * </ul> * The following settings are merged: * <ul> * <li>Response body expectations</li> * <li>Cookies</li> * <li>Headers</li> * </ul> * <p/> * @param responseSpecificationToMerge The specification to merge with. * @return the response specification */ T spec(ResponseSpecification responseSpecificationToMerge); /** * Register a content-type to be parsed using a predefined parser. E.g. let's say you want parse * content-type <tt>application/custom</tt> with the XML parser to be able to verify the response using the XML dot notations: * <pre> * get("/x").then().assertThat().body("document.child", equalsTo("something") * </pre> * Since <tt>application/custom</tt> is not registered to be processed by the XML parser by default you need to explicitly * tell REST Assured to use this parser before making the request: * <pre> * get("/x").then().parser("application/custom", Parser.XML).assertThat(). ..; * </pre> * <p/> * You can also specify by it for every response by using: * <pre> * RestAssured.registerParser("application/custom", Parser.XML); * </pre> * * @param contentType The content-type to register * @param parser The parser to use when verifying the response. */ T parser(String contentType, Parser parser); /** * Register a default predefined parser that will be used if no other parser (registered or pre-defined) matches the response * content-type. E.g. let's say that for some reason no content-type is defined in the response but the content is nevertheless * JSON encoded. To be able to expect the content in REST Assured you need to set the default parser: * <pre> * get("/x").then().using().defaultParser(Parser.JSON).assertThat(). ..; * </pre> * <p/> * You can also specify it for every response by using: * <pre> * RestAssured.defaultParser(Parser.JSON); * </pre> * * @param parser The parser to use when verifying the response if no other parser is found for the response content-type. */ T defaultParser(Parser parser); /** * Extract values from the response or return the response instance itself. This is useful for example if you want to use values from the * response in sequent requests. For example given that the resource <code>title</code> returns the following JSON * <pre> * { * "title" : "My Title", * "_links": { * "self": { "href": "/title" }, * "next": { "href": "/title?page=2" } * } * } * </pre> * and you want to validate that content type is equal to <code>JSON</code> and the title is equal to <code>My Title</code> * but you also want to extract the link to the "next" title to use that in a subsequent request. This is how: * <pre> * String nextTitleLink = * given(). * param("param_name", "param_value"). * when(). * get("/title"). * then(). * contentType(JSON). * body("title", equalTo("My Title")). * extract(). * path("_links.next.href"); * * get(nextTitleLink). .. * </pre> * * @return An instance of {@link ExtractableResponse}. */ ExtractableResponse<R> extract(); /** * Returns the {@link ValidatableResponseLogSpec} that allows you to log different parts of the {@link Response}. * This is mainly useful for debug purposes when writing your tests. * * @return the validatable response log specification */ ValidatableResponseLogSpec<T, R> log(); /** * Validate that the response time (in milliseconds) matches the supplied <code>matcher</code>. For example: * <p/> * <pre> * when(). * get("/something"). * then(). * time(lessThan(2000)); * </pre> * <p/> * where <code>lessThan</code> is a Hamcrest matcher * * @return The {@link ValidatableResponse} instance. */ T time(Matcher<Long> matcher); /** * Validate that the response time matches the supplied <code>matcher</code> and time unit. For example: * <p/> * <pre> * when(). * get("/something"). * then(). * time(lessThan(2), TimeUnit.SECONDS); * </pre> * <p/> * where <code>lessThan</code> is a Hamcrest matcher * * @return The {@link ValidatableResponse} instance. */ T time(Matcher<Long> matcher, TimeUnit timeUnit); }
Fix default implementation of deprecated method detachRoot (#1503) (#1504) Co-authored-by: Guillaume Boucherie <0da8326ca790b8e23fb2821eb85ba7fa2df5f2e5@universign.com>
rest-assured/src/main/java/io/restassured/response/ValidatableResponseOptions.java
Fix default implementation of deprecated method detachRoot (#1503) (#1504)
Java
apache-2.0
aed025d744b62849e16ab5a5da6ac3dca23cc1d2
0
hazelcast/hazelcast-simulator,hazelcast/hazelcast-simulator,Danny-Hazelcast/hazelcast-stabilizer,Donnerbart/hazelcast-simulator,hasancelik/hazelcast-stabilizer,Danny-Hazelcast/hazelcast-stabilizer,jerrinot/hazelcast-stabilizer,fengshao0907/hazelcast-simulator,fengshao0907/hazelcast-simulator,pveentjer/hazelcast-simulator,jerrinot/hazelcast-stabilizer,gAmUssA/hazelcast-simulator,gAmUssA/hazelcast-simulator,hasancelik/hazelcast-stabilizer,pveentjer/hazelcast-simulator,Donnerbart/hazelcast-simulator,hazelcast/hazelcast-simulator
package com.hazelcast.simulator.tests.external; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.ICountDownLatch; import com.hazelcast.core.IList; import com.hazelcast.logging.ILogger; import com.hazelcast.logging.Logger; import com.hazelcast.simulator.probes.probes.IntervalProbe; import com.hazelcast.simulator.probes.probes.SimpleProbe; import com.hazelcast.simulator.probes.probes.impl.HdrLatencyDistributionProbe; import com.hazelcast.simulator.probes.probes.impl.HdrLatencyDistributionResult; import com.hazelcast.simulator.test.TestContext; import com.hazelcast.simulator.test.annotations.Run; import com.hazelcast.simulator.test.annotations.Setup; import com.hazelcast.util.EmptyStatement; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import static com.hazelcast.simulator.tests.helpers.HazelcastTestUtils.isMemberNode; import static com.hazelcast.simulator.utils.CommonUtils.sleepSeconds; import static java.lang.String.format; public class ExternalClientTest { private static final ILogger LOGGER = Logger.getLogger(ExternalClientTest.class); // properties public String basename = "externalClientsRunning"; public boolean waitForClientsCountAutoDetection = true; public int waitForClientCountAutoDetectionDelaySeconds = 10; public int waitForClientsCount = 1; public int waitIntervalSeconds = 60; public int expectedResultSize = 0; SimpleProbe externalClientThroughput; IntervalProbe<HdrLatencyDistributionResult, HdrLatencyDistributionProbe> externalClientLatency; private TestContext testContext; private HazelcastInstance hazelcastInstance; private boolean isExternalResultsCollectorInstance; private ICountDownLatch clientsRunning; @Setup public void setUp(TestContext testContext) throws Exception { this.testContext = testContext; hazelcastInstance = testContext.getTargetInstance(); if (isMemberNode(hazelcastInstance)) { return; } clientsRunning = hazelcastInstance.getCountDownLatch(basename); clientsRunning.trySetCount(waitForClientsCount); // determine one instance per cluster if (hazelcastInstance.getMap(basename).putIfAbsent(basename, true) == null) { isExternalResultsCollectorInstance = true; LOGGER.info("This instance will collect all probe results from external clients"); } else { LOGGER.info("This instance will not collect probe results"); } } @Run public void run() throws ExecutionException, InterruptedException { if (isMemberNode(hazelcastInstance)) { return; } // lazy set CountdownLatch if client count auto detection is enabled if (waitForClientsCountAutoDetection) { // wait some seconds to be sure that all external clients are started LOGGER.info("Waiting for all external clients to be started..."); sleepSeconds(waitForClientCountAutoDetectionDelaySeconds); waitForClientsCount = (int) hazelcastInstance.getAtomicLong("externalClientsStarted").get(); clientsRunning.trySetCount(waitForClientsCount); } // wait for external clients to finish while (true) { try { clientsRunning.await(waitIntervalSeconds, TimeUnit.SECONDS); } catch (InterruptedException ignored) { EmptyStatement.ignore(ignored); } long clientsRunningCount = clientsRunning.getCount(); if (clientsRunningCount > 0) { LOGGER.info(format("Waiting for %d/%d clients...", clientsRunningCount, waitForClientsCount)); } else { LOGGER.info(format("Got response from %d clients, stopping now!", waitForClientsCount)); break; } } // just a single instance will collect the results from all external clients if (!isExternalResultsCollectorInstance) { // disable probes externalClientThroughput.disable(); externalClientLatency.disable(); LOGGER.info("Stopping non result collecting ExternalClientTest"); testContext.stop(); return; } // get probe results LOGGER.info("Collecting results from external clients..."); getThroughputResults(); getLatencyResults(); LOGGER.info("Result collecting ExternalClientTest done!"); testContext.stop(); } private void getThroughputResults() { IList<String> throughputResults = getThroughputResultList(); int resultSize = throughputResults.size(); LOGGER.info(format("Collecting %d throughput results (expected %d)...", resultSize, expectedResultSize)); int totalInvocations = 0; double totalDuration = 0; for (String throughputString : throughputResults) { String[] throughput = throughputString.split("\\|"); int operationCount = Integer.parseInt(throughput[0]); long duration = TimeUnit.NANOSECONDS.toMillis(Long.parseLong(throughput[1])); String publisherId = "n/a"; if (throughput.length > 2) { publisherId = throughput[2]; } LOGGER.info(format("External client executed %d operations in %d ms (%s)", operationCount, duration, publisherId)); totalInvocations += operationCount; totalDuration += duration; } LOGGER.info("Done!"); if (resultSize == 0 || totalInvocations == 0 || totalDuration == 0) { LOGGER.info(format("No valid throughput probe data collected! results: %d, totalInvocations: %d, totalDuration: %.0f", resultSize, totalInvocations, totalDuration)); return; } long avgDuration = Math.round(totalDuration / resultSize); externalClientThroughput.setValues(avgDuration, totalInvocations); double performance = ((double) totalInvocations / avgDuration) * 1000; LOGGER.info(format("All external clients executed %d operations in %d ms (%.3f ops/s)", totalInvocations, avgDuration, performance)); } private IList<String> getThroughputResultList() { IList<String> throughputResults = hazelcastInstance.getList("externalClientsThroughputResults"); // wait for all throughput results to arrive int retries = 0; while (expectedResultSize > 0 && throughputResults.size() < expectedResultSize && retries++ < 60) { LOGGER.info(format("Waiting for %d/%d throughput results...", throughputResults.size(), expectedResultSize)); throughputResults = hazelcastInstance.getList("externalClientsThroughputResults"); sleepSeconds(1); } return throughputResults; } private void getLatencyResults() { IList<String> latencyLists = hazelcastInstance.getList("externalClientsLatencyResults"); LOGGER.info(format("Collecting %d latency result lists...", latencyLists.size())); for (String key : latencyLists) { IList<Long> values = hazelcastInstance.getList(key); LOGGER.info(format("Adding %d latency results...", values.size())); for (Long latency : values) { externalClientLatency.recordValue(latency); } } LOGGER.info("Done!"); } }
tests/src/main/java/com/hazelcast/simulator/tests/external/ExternalClientTest.java
package com.hazelcast.simulator.tests.external; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.ICountDownLatch; import com.hazelcast.core.IList; import com.hazelcast.logging.ILogger; import com.hazelcast.logging.Logger; import com.hazelcast.simulator.probes.probes.IntervalProbe; import com.hazelcast.simulator.probes.probes.SimpleProbe; import com.hazelcast.simulator.probes.probes.impl.HdrLatencyDistributionProbe; import com.hazelcast.simulator.probes.probes.impl.HdrLatencyDistributionResult; import com.hazelcast.simulator.test.TestContext; import com.hazelcast.simulator.test.annotations.Run; import com.hazelcast.simulator.test.annotations.Setup; import com.hazelcast.util.EmptyStatement; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import static com.hazelcast.simulator.tests.helpers.HazelcastTestUtils.isMemberNode; import static com.hazelcast.simulator.utils.CommonUtils.sleepSeconds; import static java.lang.String.format; public class ExternalClientTest { private static final ILogger LOGGER = Logger.getLogger(ExternalClientTest.class); // properties public String basename = "externalClientsRunning"; public boolean waitForClientsCountAutoDetection = true; public int waitForClientCountAutoDetectionDelaySeconds = 10; public int waitForClientsCount = 1; public int waitIntervalSeconds = 60; public int expectedResultSize = 0; SimpleProbe externalClientThroughput; IntervalProbe<HdrLatencyDistributionResult, HdrLatencyDistributionProbe> externalClientLatency; private TestContext testContext; private HazelcastInstance hazelcastInstance; private boolean isExternalResultsCollectorInstance; private ICountDownLatch clientsRunning; @Setup public void setUp(TestContext testContext) throws Exception { this.testContext = testContext; hazelcastInstance = testContext.getTargetInstance(); if (isMemberNode(hazelcastInstance)) { return; } clientsRunning = hazelcastInstance.getCountDownLatch(basename); clientsRunning.trySetCount(waitForClientsCount); // determine one instance per cluster if (hazelcastInstance.getMap(basename).putIfAbsent(basename, true) == null) { isExternalResultsCollectorInstance = true; LOGGER.info("This instance will collect all probe results from external clients"); } else { LOGGER.info("This instance will not collect probe results"); } } @Run public void run() throws ExecutionException, InterruptedException { if (isMemberNode(hazelcastInstance)) { return; } // lazy set CountdownLatch if client count auto detection is enabled if (waitForClientsCountAutoDetection) { // wait some seconds to be sure that all external clients are started LOGGER.info("Waiting for all external clients to be started..."); sleepSeconds(waitForClientCountAutoDetectionDelaySeconds); waitForClientsCount = (int) hazelcastInstance.getAtomicLong("externalClientsStarted").get(); clientsRunning.trySetCount(waitForClientsCount); } // wait for external clients to finish while (true) { try { clientsRunning.await(waitIntervalSeconds, TimeUnit.SECONDS); } catch (InterruptedException ignored) { EmptyStatement.ignore(ignored); } long clientsRunningCount = clientsRunning.getCount(); if (clientsRunningCount > 0) { LOGGER.info(format("Waiting for %d/%d clients...", clientsRunningCount, waitForClientsCount)); } else { LOGGER.info(format("Got response from %d clients, stopping now!", waitForClientsCount)); break; } } // just a single instance will collect the results from all external clients if (!isExternalResultsCollectorInstance) { // disable probes externalClientThroughput.disable(); externalClientLatency.disable(); LOGGER.info("Stopping non result collecting ExternalClientTest"); testContext.stop(); return; } // get probe results LOGGER.info("Collecting results from external clients..."); getThroughputResults(); getLatencyResults(); LOGGER.info("Result collecting ExternalClientTest done!"); testContext.stop(); } private void getThroughputResults() { IList<String> throughputResults = getThroughputResultList(); int resultSize = throughputResults.size(); LOGGER.info(format("Collecting %d throughput results (expected %d)...", resultSize, expectedResultSize)); int totalInvocations = 0; double totalDuration = 0; for (String throughputString : throughputResults) { String[] throughput = throughputString.split("\\|"); int operationCount = Integer.parseInt(throughput[0]); long duration = TimeUnit.NANOSECONDS.toMillis(Long.parseLong(throughput[1])); String publisherId = "n/a"; if (throughput.length > 2) { publisherId = throughput[2]; } LOGGER.info(format("External client executed %d operations in %d ms (%s)", operationCount, duration, publisherId)); totalInvocations += operationCount; totalDuration += duration; } LOGGER.info("Done!"); if (resultSize == 0 || totalInvocations == 0 || totalDuration == 0) { LOGGER.info(format("No valid throughput probe data collected! results: %d, totalInvocations: %d, totalDuration: %.0f", resultSize, totalInvocations, totalDuration)); return; } long avgDuration = Math.round(totalDuration / resultSize); externalClientThroughput.setValues(avgDuration, totalInvocations); double performance = ((double) totalInvocations / avgDuration) * 1000; LOGGER.info(format("All external clients executed %d operations in %d ms (%.3f ops/s)", totalInvocations, avgDuration, performance)); } private IList<String> getThroughputResultList() { IList<String> throughputResults = hazelcastInstance.getList("externalClientsThroughputResults"); // wait for all throughput results to arrive while (expectedResultSize > 0 && throughputResults.size() < expectedResultSize) { LOGGER.info(format("Waiting for %d/%d throughput results...", throughputResults.size(), expectedResultSize)); throughputResults = hazelcastInstance.getList("externalClientsThroughputResults"); sleepSeconds(1); } return throughputResults; } private void getLatencyResults() { IList<String> latencyLists = hazelcastInstance.getList("externalClientsLatencyResults"); LOGGER.info(format("Collecting %d latency result lists...", latencyLists.size())); for (String key : latencyLists) { IList<Long> values = hazelcastInstance.getList(key); LOGGER.info(format("Adding %d latency results...", values.size())); for (Long latency : values) { externalClientLatency.recordValue(latency); } } LOGGER.info("Done!"); } }
Added max retries in ExternalClientTest.
tests/src/main/java/com/hazelcast/simulator/tests/external/ExternalClientTest.java
Added max retries in ExternalClientTest.
Java
apache-2.0
1bf16b08fb6aff2795d3ebfe4496640254621d5f
0
markyao/disconf,nabilzhang/disconf,knightliao/disconf,knightliao/disconf,knightliao/disconf,nabilzhang/disconf,nabilzhang/disconf,markyao/disconf,markyao/disconf,nabilzhang/disconf,markyao/disconf,knightliao/disconf,markyao/disconf,knightliao/disconf,nabilzhang/disconf
package com.baidu.disconf.ub.common.utils; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URL; import java.net.URLDecoder; import java.util.Properties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * 配置导入工具 * * @author liaoqiqi * @version 2014-6-6 */ public class ConfigLoaderUtils { protected static final Logger LOGGER = LoggerFactory .getLogger(ConfigLoaderUtils.class); // loader private static ClassLoader loader = ConfigLoaderUtils.class .getClassLoader(); public static String CLASS_PATH = ""; // // get class path // static { if (loader == null) { LOGGER.info("using system class loader!"); loader = ClassLoader.getSystemClassLoader(); } java.net.URL url = loader.getResource(""); try { // get class path CLASS_PATH = url.getPath(); CLASS_PATH = URLDecoder.decode(CLASS_PATH, "utf-8"); } catch (Exception e) { LOGGER.warn(e.getMessage()); } } /** * * @Description: 使用TOMCAT方式来导入 * * @param propertyFilePath * @return void * @author liaoqiqi * @date 2013-6-19 */ private static Properties loadWithTomcatMode(final String propertyFilePath) throws Exception { Properties props = new Properties(); try { // 先用TOMCAT模式进行导入 // http://blog.csdn.net/minfree/article/details/1800311 // http://stackoverflow.com/questions/3263560/sysloader-getresource-problem-in-java URL url = loader.getResource(propertyFilePath); URI uri = new URI(url.toString()); props.load(new FileInputStream(uri.getPath())); } catch (Exception e) { // http://stackoverflow.com/questions/574809/load-a-resource-contained-in-a-jar props.load(loader.getResourceAsStream(propertyFilePath)); } return props; } /** * * @Description: 使用普通模式导入 * * @param propertyFilePath * @return void * @author liaoqiqi * @date 2013-6-19 */ private static Properties loadWithNormalMode(final String propertyFilePath) throws Exception { Properties props = new Properties(); props.load(new FileInputStream(propertyFilePath)); return props; } /** * * @Description: 配置文件载入器助手 * * @param propertyFilePath * @return * @throws Exception * @return Properties * @author liaoqiqi * @date 2013-6-19 */ public static Properties loadConfig(final String propertyFilePath) throws Exception { try { // 用TOMCAT模式 来载入试试 return ConfigLoaderUtils.loadWithTomcatMode(propertyFilePath); } catch (Exception e1) { try { // 用普通模式进行载入 return loadWithNormalMode(propertyFilePath); } catch (Exception e2) { throw new Exception("cannot load config file: " + propertyFilePath); } } } /** * * @Description: 采用两种方式来载入文件 * * @param file * @return * @return InputStream * @author liaoqiqi * @date 2013-6-20 */ public static InputStream loadFile(String filePath) { InputStream in = null; try { // 先用TOMCAT模式进行导入 in = loader.getResourceAsStream(filePath); if (in == null) { // 使用普通模式导入 try { return new FileInputStream(filePath); } catch (FileNotFoundException e) { return null; } } else { return in; } } finally { if (in != null) { try { in.close(); } catch (IOException e) { LOGGER.error("WHY HERE!", e); } } } } }
disconf-core/src/main/java/com/baidu/disconf/ub/common/utils/ConfigLoaderUtils.java
package com.baidu.disconf.ub.common.utils; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URL; import java.net.URLDecoder; import java.util.Properties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * 配置导入工具 * * @author liaoqiqi * @version 2014-6-6 */ public class ConfigLoaderUtils { protected static final Logger LOGGER = LoggerFactory .getLogger(ConfigLoaderUtils.class); // loader private static ClassLoader loader = ConfigLoaderUtils.class .getClassLoader(); public static String CLASS_PATH = ""; // // get class path // static { if (loader == null) { LOGGER.info("using system class loader!"); loader = ClassLoader.getSystemClassLoader(); } java.net.URL url = loader.getResource(""); try { // get class path CLASS_PATH = url.getPath(); CLASS_PATH = URLDecoder.decode(CLASS_PATH, "utf-8"); } catch (Exception e) { LOGGER.warn(e.getMessage()); } } /** * * @Description: 使用TOMCAT方式来导入 * * @param propertyFilePath * @return void * @author liaoqiqi * @date 2013-6-19 */ private static Properties loadWithTomcatMode(final String propertyFilePath) throws Exception { Properties props = new Properties(); // 先用TOMCAT模式进行导入 // http://blog.csdn.net/minfree/article/details/1800311 // http://stackoverflow.com/questions/3263560/sysloader-getresource-problem-in-java URL url = loader.getResource(propertyFilePath); URI uri = new URI(url.toString()); props.load(new FileInputStream(uri.getPath())); return props; } /** * * @Description: 使用普通模式导入 * * @param propertyFilePath * @return void * @author liaoqiqi * @date 2013-6-19 */ private static Properties loadWithNormalMode(final String propertyFilePath) throws Exception { Properties props = new Properties(); props.load(new FileInputStream(propertyFilePath)); return props; } /** * * @Description: 配置文件载入器助手 * * @param propertyFilePath * @return * @throws Exception * @return Properties * @author liaoqiqi * @date 2013-6-19 */ public static Properties loadConfig(final String propertyFilePath) throws Exception { try { // 用TOMCAT模式 来载入试试 return ConfigLoaderUtils.loadWithTomcatMode(propertyFilePath); } catch (Exception e1) { try { // 用普通模式进行载入 return loadWithNormalMode(propertyFilePath); } catch (Exception e2) { throw new Exception("cannot load config file: " + propertyFilePath); } } } /** * * @Description: 采用两种方式来载入文件 * * @param file * @return * @return InputStream * @author liaoqiqi * @date 2013-6-20 */ public static InputStream loadFile(String filePath) { InputStream in = null; try { // 先用TOMCAT模式进行导入 in = loader.getResourceAsStream(filePath); if (in == null) { // 使用普通模式导入 try { return new FileInputStream(filePath); } catch (FileNotFoundException e) { return null; } } else { return in; } } finally { if (in != null) { try { in.close(); } catch (IOException e) { LOGGER.error("WHY HERE!", e); } } } } }
fix cannot find disconf_sys.properties error
disconf-core/src/main/java/com/baidu/disconf/ub/common/utils/ConfigLoaderUtils.java
fix cannot find disconf_sys.properties error
Java
apache-2.0
99cfc43df0a39f48b8b8321add749a879018c336
0
wyona/yanel,wyona/yanel,baszero/yanel,wyona/yanel,baszero/yanel,wyona/yanel,wyona/yanel,baszero/yanel,baszero/yanel,baszero/yanel,wyona/yanel,baszero/yanel
package org.wyona.yanel.servlet; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.BufferedReader; import java.io.InputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Writer; import java.net.URL; import java.util.Calendar; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.Properties; import java.util.Vector; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.sax.SAXResult; import javax.xml.transform.sax.SAXTransformerFactory; import javax.xml.transform.sax.TransformerHandler; import javax.xml.transform.stream.StreamSource; import org.wyona.yanel.core.StateOfView; import org.wyona.yanel.core.Environment; import org.wyona.yanel.core.Path; import org.wyona.yanel.core.Resource; import org.wyona.yanel.core.ResourceConfiguration; import org.wyona.yanel.core.ResourceTypeDefinition; import org.wyona.yanel.core.ResourceTypeIdentifier; import org.wyona.yanel.core.ResourceTypeRegistry; import org.wyona.yanel.core.Yanel; import org.wyona.yanel.core.api.attributes.IntrospectableV1; import org.wyona.yanel.core.api.attributes.ModifiableV1; import org.wyona.yanel.core.api.attributes.ModifiableV2; import org.wyona.yanel.core.api.attributes.TranslatableV1; import org.wyona.yanel.core.api.attributes.VersionableV2; import org.wyona.yanel.core.api.attributes.ViewableV1; import org.wyona.yanel.core.api.attributes.ViewableV2; import org.wyona.yanel.core.api.attributes.WorkflowableV1; import org.wyona.yanel.core.api.security.WebAuthenticator; import org.wyona.yanel.core.attributes.versionable.RevisionInformation; import org.wyona.yanel.core.attributes.viewable.View; import org.wyona.yanel.core.attributes.viewable.ViewDescriptor; import org.wyona.yanel.core.navigation.Node; import org.wyona.yanel.core.navigation.Sitetree; import org.wyona.yanel.core.serialization.SerializerFactory; import org.wyona.yanel.core.source.SourceResolver; import org.wyona.yanel.core.transformation.I18nTransformer2; import org.wyona.yanel.core.util.DateUtil; import org.wyona.yanel.core.workflow.WorkflowException; import org.wyona.yanel.core.workflow.WorkflowHelper; import org.wyona.yanel.core.map.Map; import org.wyona.yanel.core.map.Realm; import org.wyona.yanel.core.util.ResourceAttributeHelper; import org.wyona.yanel.servlet.IdentityMap; import org.wyona.yanel.servlet.communication.HttpRequest; import org.wyona.yanel.servlet.communication.HttpResponse; import org.wyona.security.core.api.Identity; import org.wyona.security.core.api.IdentityManager; import org.wyona.security.core.api.Policy; import org.wyona.security.core.api.PolicyManager; import org.wyona.security.core.api.Role; import org.wyona.security.core.api.Usecase; import org.wyona.security.core.api.User; import org.apache.log4j.Category; import org.apache.xalan.transformer.TransformerIdentityImpl; import org.apache.xml.resolver.tools.CatalogResolver; import org.apache.xml.serializer.Serializer; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.DefaultConfigurationBuilder; import org.apache.commons.io.FilenameUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xml.sax.XMLReader; import org.xml.sax.helpers.XMLReaderFactory; /** * */ public class YanelServlet extends HttpServlet { private static Category log = Category.getInstance(YanelServlet.class); private ServletConfig config; ResourceTypeRegistry rtr; //PolicyManager pm; //IdentityManager im; Map map; Yanel yanel; Sitetree sitetree; File xsltInfoAndException; String xsltLoginScreenDefault; public static String IDENTITY_MAP_KEY = "identity-map"; private static String TOOLBAR_KEY = "toolbar"; private static String TOOLBAR_USECASE = "toolbar"; public static String NAMESPACE = "http://www.wyona.org/yanel/1.0"; private static final String METHOD_PROPFIND = "PROPFIND"; private static final String METHOD_OPTIONS = "OPTIONS"; private static final String METHOD_GET = "GET"; private static final String METHOD_POST = "POST"; private static final String METHOD_PUT = "PUT"; private static final String METHOD_DELETE = "DELETE"; private static final int INSIDE_TAG = 0; private static final int OUTSIDE_TAG = 1; private String sslPort = null; private String toolbarMasterSwitch = "off"; private String reservedPrefix; private String servletContextRealPath; private int cacheExpires = 0; public static final String DEFAULT_ENCODING = "UTF-8"; public static final String VIEW_ID_PARAM_NAME = "yanel.resource.viewid"; /** * */ public void init(ServletConfig config) throws ServletException { this.config = config; servletContextRealPath = config.getServletContext().getRealPath("/"); xsltInfoAndException = org.wyona.commons.io.FileUtil.file(servletContextRealPath, config.getInitParameter("exception-and-info-screen-xslt")); xsltLoginScreenDefault = config.getInitParameter("login-screen-xslt"); try { yanel = Yanel.getInstance(); yanel.init(); rtr = yanel.getResourceTypeRegistry(); map = (Map) yanel.getBeanFactory().getBean("map"); sitetree = (Sitetree) yanel.getBeanFactory().getBean("repo-navigation"); sslPort = config.getInitParameter("ssl-port"); toolbarMasterSwitch = config.getInitParameter("toolbar-master-switch"); reservedPrefix = yanel.getReservedPrefix(); String expires = config.getInitParameter("static-content-cache-expires"); if (expires != null) { this.cacheExpires = Integer.parseInt(expires); } } catch (Exception e) { log.error(e); throw new ServletException(e.getMessage(), e); } } /** * Dispatch requests */ public void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String httpAcceptMediaTypes = request.getHeader("Accept"); String httpAcceptLanguage = request.getHeader("Accept-Language"); String yanelUsecase = request.getParameter("yanel.usecase"); if(yanelUsecase != null && yanelUsecase.equals("logout")) { // Logout from Yanel if(doLogout(request, response) != null) return; } else if(yanelUsecase != null && yanelUsecase.equals("create")) { // Create a new resource if(doCreate(request, response) != null) return; } // Check authorization and if authorization failed, then try to authenticate if(doAccessControl(request, response) != null) { // Either redirect (after successful authentication) or access denied (and response will send the login screen) return; } else { if (log.isDebugEnabled()) log.debug("Access granted: " + request.getServletPath()); } // Check for requests re policies String policyRequestPara = request.getParameter("yanel.policy"); if (policyRequestPara != null) { doAccessPolicyRequest(request, response, policyRequestPara); return; } // Check for requests for global data Resource resource = getResource(request, response); String path = resource.getPath(); if (path.indexOf("/" + reservedPrefix + "/") == 0) { getGlobalData(request, response); return; } String value = request.getParameter("yanel.resource.usecase"); // Delete node if (value != null && value.equals("delete")) { handleDeleteUsecase(request, response); return; } // Delegate ... String method = request.getMethod(); if (method.equals(METHOD_PROPFIND)) { doPropfind(request, response); } else if (method.equals(METHOD_GET)) { doGet(request, response); } else if (method.equals(METHOD_POST)) { doPost(request, response); } else if (method.equals(METHOD_PUT)) { doPut(request, response); } else if (method.equals(METHOD_DELETE)) { doDelete(request, response); } else if (method.equals(METHOD_OPTIONS)) { doOptions(request, response); } else { log.error("No such method implemented: " + method); response.sendError(response.SC_NOT_IMPLEMENTED); } } /** * */ public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { HttpSession session = request.getSession(true); Resource resource = getResource(request, response); // Enable or disable toolbar switchToolbar(request); // Check for requests refered by WebDAV String yanelWebDAV = request.getParameter("yanel.webdav"); if(yanelWebDAV != null && yanelWebDAV.equals("propfind1")) { log.error("DEBUG: WebDAV client (" + request.getHeader("User-Agent") + ") requests to \"edit\" a resource: " + resource.getRealm() + ", " + resource.getPath()); //return; } String value = request.getParameter("yanel.resource.usecase"); try { if (value != null && value.equals("release-lock")) { log.debug("Release lock ..."); if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Versionable", "2")) { VersionableV2 versionable = (VersionableV2)resource; try { versionable.cancelCheckout(); } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException("Releasing of lock failed because of: " + resource.getPath() + " " + e.getMessage(), e); } } return; } else { getContent(request, response); return; } } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } /** * Checks if the yanel.toolbar request parameter is set and stores * the value of the parameter in the session. * @param request */ private void switchToolbar(HttpServletRequest request) { // Check for toolbar ... String yanelToolbar = request.getParameter("yanel.toolbar"); if(yanelToolbar != null) { HttpSession session = request.getSession(false); if (yanelToolbar.equals("on")) { log.info("Turn on toolbar!"); enableToolbar(request); } else if (yanelToolbar.equals("off")) { log.info("Turn off toolbar!"); disableToolbar(request); } else { log.warn("No such toolbar value: " + yanelToolbar); } } } /** * Returns the mime-type according to the given file extension. * Default is application/octet-stream. * @param extension * @return */ private String guessMimeType(String extension) { String ext = extension.toLowerCase(); if (ext.equals("html") || ext.equals("htm")) return "text/html"; if (ext.equals("css")) return "text/css"; if (ext.equals("txt")) return "text/plain"; if (ext.equals("js")) return "application/x-javascript"; if (ext.equals("jpg") || ext.equals("jpg")) return "image/jpeg"; if (ext.equals("gif")) return "image/gif"; if (ext.equals("pdf")) return "application/pdf"; if (ext.equals("zip")) return "application/zip"; if (ext.equals("htc")) return "text/x-component"; // TODO: add more mime types // TODO: and move to MimeTypeUtil return "application/octet-stream"; // default } /** * Get view of resource */ private void getContent(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { View view = null; org.w3c.dom.Document doc = null; try { doc = getDocument(NAMESPACE, "yanel"); } catch(Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage()); } Element rootElement = doc.getDocumentElement(); rootElement.setAttribute("servlet-context-real-path", servletContextRealPath); Element requestElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "request")); requestElement.setAttributeNS(NAMESPACE, "uri", request.getRequestURI()); requestElement.setAttributeNS(NAMESPACE, "servlet-path", request.getServletPath()); HttpSession session = request.getSession(true); Element sessionElement = (Element) rootElement.appendChild(doc.createElement("session")); sessionElement.setAttribute("id", session.getId()); Enumeration attrNames = session.getAttributeNames(); if (!attrNames.hasMoreElements()) { Element sessionNoAttributesElement = (Element) sessionElement.appendChild(doc.createElement("no-attributes")); } while (attrNames.hasMoreElements()) { String name = (String)attrNames.nextElement(); String value = session.getAttribute(name).toString(); Element sessionAttributeElement = (Element) sessionElement.appendChild(doc.createElement("attribute")); sessionAttributeElement.setAttribute("name", name); sessionAttributeElement.appendChild(doc.createTextNode(value)); } String usecase = request.getParameter("yanel.resource.usecase"); Resource res = null; long lastModified = -1; long size = -1; try { Environment environment = getEnvironment(request, response); res = getResource(request, response); if (res != null) { Element resourceElement = (Element) rootElement.appendChild(doc.createElement("resource")); ResourceConfiguration resConfig = res.getConfiguration(); if (resConfig != null) { Element resConfigElement = (Element) resourceElement.appendChild(doc.createElementNS(NAMESPACE, "config")); resConfigElement.setAttributeNS(NAMESPACE, "rti-name", resConfig.getName()); resConfigElement.setAttributeNS(NAMESPACE, "rti-namespace", resConfig.getNamespace()); } else { Element noResConfigElement = (Element) resourceElement.appendChild(doc.createElementNS(NAMESPACE, "no-config")); } Element realmElement = (Element) resourceElement.appendChild(doc.createElementNS(NAMESPACE, "realm")); realmElement.setAttributeNS(NAMESPACE, "name", res.getRealm().getName()); realmElement.setAttributeNS(NAMESPACE, "rid", res.getRealm().getID()); realmElement.setAttributeNS(NAMESPACE, "prefix", res.getRealm().getMountPoint()); Element identityManagerElement = (Element) realmElement.appendChild(doc.createElementNS(NAMESPACE, "identity-manager")); Element userManagerElement = (Element) identityManagerElement.appendChild(doc.createElementNS(NAMESPACE, "user-manager")); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "1")) { if (log.isDebugEnabled()) log.debug("Resource is viewable V1"); Element viewElement = (Element) resourceElement.appendChild(doc.createElement("view")); viewElement.setAttributeNS(NAMESPACE, "version", "1"); // TODO: The same as for ViewableV2 ... ViewDescriptor[] vd = ((ViewableV1) res).getViewDescriptors(); if (vd != null) { for (int i = 0; i < vd.length; i++) { Element descriptorElement = (Element) viewElement.appendChild(doc.createElement("descriptor")); if (vd[i].getMimeType() != null) { descriptorElement.appendChild(doc.createTextNode(vd[i].getMimeType())); } descriptorElement.setAttributeNS(NAMESPACE, "id", vd[i].getId()); } } else { viewElement.appendChild(doc.createTextNode("No View Descriptors!")); } String viewId = request.getParameter(VIEW_ID_PARAM_NAME); try { view = ((ViewableV1) res).getView(request, viewId); } catch(org.wyona.yarep.core.NoSuchNodeException e) { do404(request, response, doc, e.getMessage()); return; } catch(Exception e) { log.error(e.getMessage(), e); String message = e.toString(); log.error(e.getMessage(), e); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); exceptionElement.setAttributeNS(NAMESPACE, "status", "500"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } } else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "2")) { if (log.isDebugEnabled()) log.debug("Resource is viewable V2"); if (!((ViewableV2) res).exists()) { //log.warn("No such ViewableV2 resource: " + res.getPath()); //log.warn("TODO: It seems like many ViewableV2 resources are not implementing exists() properly!"); //do404(request, response, doc, res.getPath()); //return; } String viewId = request.getParameter(VIEW_ID_PARAM_NAME); Element viewElement = (Element) resourceElement.appendChild(doc.createElement("view")); viewElement.setAttributeNS(NAMESPACE, "version", "2"); ViewDescriptor[] vd = ((ViewableV2) res).getViewDescriptors(); if (vd != null) { for (int i = 0; i < vd.length; i++) { Element descriptorElement = (Element) viewElement.appendChild(doc.createElement("descriptor")); if (vd[i].getMimeType() != null) { descriptorElement.appendChild(doc.createTextNode(vd[i].getMimeType())); } descriptorElement.setAttributeNS(NAMESPACE, "id", vd[i].getId()); } } else { viewElement.appendChild(doc.createTextNode("No View Descriptors!")); } size = ((ViewableV2) res).getSize(); Element sizeElement = (Element) resourceElement.appendChild(doc.createElement("size")); sizeElement.appendChild(doc.createTextNode(String.valueOf(size))); try { String revisionName = request.getParameter("yanel.resource.revision"); if (revisionName != null && ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2")) { view = ((VersionableV2) res).getView(viewId, revisionName); } else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Workflowable", "1") && environment.getStateOfView().equals(StateOfView.LIVE)) { WorkflowableV1 workflowable = (WorkflowableV1)res; if (workflowable.isLive()) { view = workflowable.getLiveView(viewId); } else { String message = "The resource '" + res.getPath() + "' is WorkflowableV1, but has not been published yet. Instead the live version, the most recent version will be displayed!"; log.warn(message); view = ((ViewableV2) res).getView(viewId); // TODO: Maybe sending a 404 instead the most recent version should be configurable! /* do404(request, response, doc, message); return; */ } } else { view = ((ViewableV2) res).getView(viewId); } } catch(org.wyona.yarep.core.NoSuchNodeException e) { String message = "" + e; log.warn(message); do404(request, response, doc, message); return; } catch(org.wyona.yanel.core.ResourceNotFoundException e) { String message = "" + e; log.warn(message); do404(request, response, doc, message); return; } } else { Element noViewElement = (Element) resourceElement.appendChild(doc.createElement("not-viewable")); String message = res.getClass().getName() + " is not viewable! (" + res.getPath() + ", " + res.getRealm() + ")"; noViewElement.appendChild(doc.createTextNode(res.getClass().getName() + " is not viewable!")); log.error(message); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); exceptionElement.setAttributeNS(NAMESPACE, "status", "501"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_IMPLEMENTED); setYanelOutput(request, response, doc); return; } if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { lastModified = ((ModifiableV2) res).getLastModified(); Element lastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("last-modified")); lastModifiedElement.appendChild(doc.createTextNode(new java.util.Date(lastModified).toString())); } else { Element noLastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("no-last-modified")); } if (ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2")) { // retrieve the revisions, but only in the meta usecase (for performance reasons): if (request.getParameter("yanel.resource.meta") != null) { RevisionInformation[] revisions = ((VersionableV2)res).getRevisions(); Element revisionsElement = (Element) resourceElement.appendChild(doc.createElement("revisions")); if (revisions != null && revisions.length > 0) { for (int i = revisions.length - 1; i >= 0; i--) { Element revisionElement = (Element) revisionsElement.appendChild(doc.createElement("revision")); Element revisionNameElement = (Element) revisionElement.appendChild(doc.createElement("name")); revisionNameElement.appendChild(doc.createTextNode(revisions[i].getName())); Element revisionDateElement = (Element) revisionElement.appendChild(doc.createElement("date")); revisionDateElement.appendChild(doc.createTextNode(DateUtil.format(revisions[i].getDate()))); Element revisionUserElement = (Element) revisionElement.appendChild(doc.createElement("user")); revisionUserElement.appendChild(doc.createTextNode(revisions[i].getUser())); Element revisionCommentElement = (Element) revisionElement.appendChild(doc.createElement("comment")); revisionCommentElement.appendChild(doc.createTextNode(revisions[i].getComment())); } } else { Element noRevisionsYetElement = (Element) resourceElement.appendChild(doc.createElement("no-revisions-yet")); } } } else { Element notVersionableElement = (Element) resourceElement.appendChild(doc.createElement("not-versionable")); } if (ResourceAttributeHelper.hasAttributeImplemented(res, "Translatable", "1")) { TranslatableV1 translatable = ((TranslatableV1) res); Element translationsElement = (Element) resourceElement.appendChild(doc.createElement("translations")); String[] languages = translatable.getLanguages(); for (int i=0; i<languages.length; i++) { Element translationElement = (Element) translationsElement.appendChild(doc.createElement("translation")); translationElement.setAttribute("language", languages[i]); String path = translatable.getTranslation(languages[i]).getPath(); translationElement.setAttribute("path", path); } } if (usecase != null && usecase.equals("checkout")) { if(log.isDebugEnabled()) log.debug("Checkout data ..."); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2")) { // note: this will throw an exception if the document is checked out already // by another user. String userID = environment.getIdentity().getUsername(); VersionableV2 versionable = (VersionableV2)res; if (versionable.isCheckedOut()) { String checkoutUserID = versionable.getCheckoutUserID(); if (checkoutUserID.equals(userID)) { log.warn("Resource " + res.getPath() + " is already checked out by this user: " + checkoutUserID); } else { throw new Exception("Resource is already checked out by another user: " + checkoutUserID); } } else { versionable.checkout(userID); } } else { log.warn("Acquire lock has not been implemented yet ...!"); // acquireLock(); } } } else { Element resourceIsNullElement = (Element) rootElement.appendChild(doc.createElement("resource-is-null")); } } catch(org.wyona.yarep.core.NoSuchNodeException e) { String message = "" + e; log.warn(e, e); do404(request, response, doc, message); return; } catch(org.wyona.yanel.core.ResourceNotFoundException e) { String message = "" + e; log.warn(e, e); do404(request, response, doc, message); return; } catch(Exception e) { log.error(e.getMessage(), e); String message = e.toString() + "\n\n" + getStackTrace(e); //String message = e.toString(); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } // TODO: Move this introspection generation somewhere else ... try { if (usecase != null && usecase.equals("introspection")) { if (ResourceAttributeHelper.hasAttributeImplemented(res, "Introspectable", "1")) { String introspection = ((IntrospectableV1)res).getIntrospection(); response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.getWriter().print(introspection); } else { String message = "Resource is not introspectable."; Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); setYanelOutput(request, response, doc); } return; } } catch(Exception e) { log.error(e.getMessage(), e); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(e.getMessage())); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } String meta = request.getParameter("yanel.resource.meta"); if (meta != null) { if (meta.length() > 0) { log.warn("TODO: meta: " + meta); } else { log.debug("Show all meta"); } response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); setYanelOutput(request, response, doc); return; } if (view != null) { if (generateResponse(view, res, request, response, doc, size, lastModified) != null) return; } else { String message = "View is null!"; Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); } response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } /** * */ public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String transition = request.getParameter("yanel.resource.workflow.transition"); if (transition != null) { Resource resource = getResource(request, response); if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Workflowable", "1")) { WorkflowableV1 workflowable = (WorkflowableV1)resource; try { String revision = request.getParameter("yanel.resource.revision"); workflowable.doTransition(transition, revision); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); StringBuffer sb = new StringBuffer("<?xml version=\"1.0\"?>"); sb.append(workflowable.getWorkflowIntrospection()); PrintWriter w = response.getWriter(); w.print(sb); return; } catch (WorkflowException e) { // TODO: Implement response if transition has failed ... log.error(e, e); throw new ServletException(e.getMessage(), e); } } else { log.warn("Resource not workflowable: " + resource.getPath()); } } String value = request.getParameter("yanel.resource.usecase"); if (value != null && value.equals("save")) { log.debug("Save data ..."); save(request, response, false); return; } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); save(request, response, true); log.warn("Release lock has not been implemented yet ..."); // releaseLock(); return; } else { log.info("No parameter yanel.resource.usecase!"); String contentType = request.getContentType(); // TODO: Check for type (see section 9.2 of APP spec (e.g. draft 16) if (contentType.indexOf("application/atom+xml") >= 0) { InputStream in = intercept(request.getInputStream()); // Create new Atom entry try { String atomEntryUniversalName = "<{http://www.wyona.org/yanel/resource/1.0}atom-entry/>"; Realm realm = yanel.getMap().getRealm(request.getServletPath()); String newEntryPath = yanel.getMap().getPath(realm, request.getServletPath() + "/" + new java.util.Date().getTime() + ".xml"); log.error("DEBUG: Realm and Path of new Atom entry: " + realm + " " + newEntryPath); Resource atomEntryResource = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, newEntryPath, new ResourceTypeRegistry().getResourceTypeDefinition(atomEntryUniversalName), new ResourceTypeIdentifier(atomEntryUniversalName, null)); ((ModifiableV2)atomEntryResource).write(in); byte buffer[] = new byte[8192]; int bytesRead; InputStream resourceIn = ((ModifiableV2)atomEntryResource).getInputStream(); OutputStream responseOut = response.getOutputStream(); while ((bytesRead = resourceIn.read(buffer)) != -1) { responseOut.write(buffer, 0, bytesRead); } resourceIn.close(); //responseOut.close(); // TODO: Fix Location ... response.setHeader("Location", "http://ulysses.wyona.org" + newEntryPath); response.setStatus(javax.servlet.http.HttpServletResponse.SC_CREATED); return; } catch (Exception e) { log.error(e.getMessage(), e); throw new IOException(e.getMessage()); } } // Enable or disable toolbar switchToolbar(request); getContent(request, response); } } /** * HTTP PUT implementation */ public void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO: Reuse code doPost resp. share code with doPut String value = request.getParameter("yanel.resource.usecase"); if (value != null && value.equals("save")) { log.debug("Save data ..."); save(request, response, false); return; } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); save(request, response, true); log.warn("Release lock has not been implemented yet ...!"); // releaseLock(); return; } else { log.warn("No parameter yanel.resource.usecase!"); String contentType = request.getContentType(); if (contentType != null && contentType.indexOf("application/atom+xml") >= 0) { InputStream in = intercept(request.getInputStream()); // Overwrite existing atom entry try { String atomEntryUniversalName = "<{http://www.wyona.org/yanel/resource/1.0}atom-entry/>"; Realm realm = yanel.getMap().getRealm(request.getServletPath()); String entryPath = yanel.getMap().getPath(realm, request.getServletPath()); log.error("DEBUG: Realm and Path of new Atom entry: " + realm + " " + entryPath); Resource atomEntryResource = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, entryPath, new ResourceTypeRegistry().getResourceTypeDefinition(atomEntryUniversalName), new ResourceTypeIdentifier(atomEntryUniversalName, null)); // TODO: There seems to be a problem ... ((ModifiableV2)atomEntryResource).write(in); // NOTE: This method does not update updated date /* OutputStream out = ((ModifiableV2)atomEntry).getOutputStream(entryPath); byte buffer[] = new byte[8192]; int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } */ log.info("Atom entry has been saved: " + entryPath); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); return; } catch (Exception e) { log.error(e.getMessage(), e); throw new IOException(e.getMessage()); } } else { Resource resource = getResource(request, response); log.warn("Client (" + request.getHeader("User-Agent") + ") requests to save a resource: " + resource.getRealm() + ", " + resource.getPath()); save(request, response, false); return; } } } /** * HTTP DELETE implementation */ public void doDelete(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { Resource res = getResource(request, response); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { if (((ModifiableV2) res).delete()) { // TODO: Also delete resource config! What about access policies?! log.debug("Resource has been deleted: " + res); response.setStatus(HttpServletResponse.SC_OK); String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(res.getPath()); StringBuffer sb = new StringBuffer("<html><body>Page has been deleted! <a href=\"\">Check</a> or return to <a href=\"" + backToRealm + "\">Homepage</a>.</body></html>"); PrintWriter w = response.getWriter(); w.print(sb); return; } else { log.warn("Resource could not be deleted: " + res); response.setStatus(response.SC_FORBIDDEN); return; } } else { log.error("Resource '" + res + "' has interface ModifiableV2 not implemented." ); response.sendError(response.SC_NOT_IMPLEMENTED); return; } } catch (Exception e) { log.error("Could not delete resource with URL " + request.getRequestURL() + " " + e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } /** * */ private Resource getResource(HttpServletRequest request, HttpServletResponse response) throws ServletException { try { Realm realm = map.getRealm(request.getServletPath()); String path = map.getPath(realm, request.getServletPath()); HttpRequest httpRequest = (HttpRequest)request; HttpResponse httpResponse = new HttpResponse(response); Resource res = yanel.getResourceManager().getResource(getEnvironment(httpRequest, httpResponse), realm, path); return res; } catch(Exception e) { String errorMsg = "Could not get resource for request: " + request.getServletPath() + ": " + e.getMessage(); log.error(errorMsg, e); throw new ServletException(errorMsg, e); } } /** * */ private Environment getEnvironment(HttpServletRequest request, HttpServletResponse response) throws ServletException { Identity identity; try { identity = getIdentity(request); Realm realm = map.getRealm(request.getServletPath()); String stateOfView = StateOfView.AUTHORING; if (isToolbarEnabled(request)) { stateOfView = StateOfView.AUTHORING; } else { stateOfView = StateOfView.LIVE; } //log.debug("State of view: " + stateOfView); Environment environment = new Environment(request, response, identity, stateOfView, null); return environment; } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } /** * Save data */ private void save(HttpServletRequest request, HttpServletResponse response, boolean doCheckin) throws ServletException, IOException { log.debug("Save data ..."); Resource resource = getResource(request, response); /* -> commented because the current default repo implementation does not support versioning yet. if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Versionable", "2")) { try { // check the resource state: Identity identity = getIdentity(request); String userID = identity.getUser().getID(); VersionableV2 versionable = (VersionableV2)resource; if (versionable.isCheckedOut()) { String checkoutUserID = versionable.getCheckoutUserID(); if (!checkoutUserID.equals(userID)) { throw new Exception("Resource is checked out by another user: " + checkoutUserID); } } else { throw new Exception("Resource is not checked out."); } } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } */ InputStream in = request.getInputStream(); // TODO: Should be delegated to resource type, e.g. <{http://...}xml/>! // Check on well-formedness ... String contentType = request.getContentType(); log.debug("Content-Type: " + contentType); if (contentType != null && (contentType.indexOf("application/xml") >= 0 || contentType.indexOf("application/xhtml+xml") >= 0)) { log.info("Check well-formedness ..."); javax.xml.parsers.DocumentBuilderFactory dbf= javax.xml.parsers.DocumentBuilderFactory.newInstance(); try { javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder(); // TODO: Get log messages into log4j ... //parser.setErrorHandler(...); java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream(); byte[] buf = new byte[8192]; int bytesR; while ((bytesR = in.read(buf)) != -1) { baos.write(buf, 0, bytesR); } // Buffer within memory (TODO: Maybe replace with File-buffering ...) // http://www-128.ibm.com/developerworks/java/library/j-io1/ byte[] memBuffer = baos.toByteArray(); // NOTE: DOCTYPE is being resolved/retrieved (e.g. xhtml schema from w3.org) also // if isValidating is set to false. // Hence, for performance and network reasons we use a local catalog ... // Also see http://www.xml.com/pub/a/2004/03/03/catalogs.html // resp. http://xml.apache.org/commons/components/resolver/ // TODO: What about a resolver factory? parser.setEntityResolver(new org.apache.xml.resolver.tools.CatalogResolver()); parser.parse(new ByteArrayInputStream(memBuffer)); in = new ByteArrayInputStream(memBuffer); //org.w3c.dom.Document document = parser.parse(new ByteArrayInputStream(memBuffer)); } catch (org.xml.sax.SAXException e) { log.warn("Data is not well-formed: "+e.getMessage()); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"data-not-well-formed\">"); sb.append("<message>Data is not well-formed: "+e.getMessage()+"</message>"); sb.append("</exception>"); response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); w.print(sb); return; } catch (Exception e) { log.error(e.getMessage(), e); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"neutron\">"); //sb.append("<message>" + e.getStackTrace() + "</message>"); //sb.append("<message>" + e.getMessage() + "</message>"); sb.append("<message>" + e + "</message>"); sb.append("</exception>"); response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); w.print(sb); return; } log.info("Data seems to be well-formed :-)"); } else { log.info("No well-formedness check required for content type: " + contentType); } // IMPORTANT TODO: Use ModifiableV2.write(InputStream in) such that resource can modify data during saving resp. check if getOutputStream is equals null and then use write .... OutputStream out = null; Resource res = getResource(request, response); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "1")) { out = ((ModifiableV1) res).getOutputStream(new Path(request.getServletPath())); write(in, out, request, response); } else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { try { out = ((ModifiableV2) res).getOutputStream(); if (out != null) { write(in, out, request, response); } else { log.warn("getOutputStream() returned null, hence fallback to write()"); ((ModifiableV2) res).write(in); } } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } else { String message = res.getClass().getName() + " is not modifiable (neither V1 nor V2)!"; log.warn(message); StringBuffer sb = new StringBuffer(); // TODO: Differentiate between Neutron based and other clients ... sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"neutron\">"); sb.append("<message>" + message + "</message>"); sb.append("</exception>"); response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); w.print(sb); } if (doCheckin) { if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Versionable", "2")) { VersionableV2 versionable = (VersionableV2)resource; try { versionable.checkin("updated"); } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException("Could not check in resource: " + resource.getPath() + " " + e.getMessage(), e); } } } } /** * Check authorization and if not authorized then authenticate. Return null if authorization granted, otherwise return 401 and appropriate response such that client can provide credentials for authentication */ private HttpServletResponse doAccessControl(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // Get usecase Usecase usecase = getUsecase(request); // Get identity, realm, path Identity identity; Realm realm; String path; try { identity = getIdentity(request); realm = map.getRealm(request.getServletPath()); path = map.getPath(realm, request.getServletPath()); } catch (Exception e) { log.error(e, e); throw new ServletException(e.getMessage()); } // Check Authorization boolean authorized = false; try { if (log.isDebugEnabled()) log.debug("Check authorization: realm: " + realm + ", path: " + path + ", identity: " + identity + ", Usecase: " + usecase.getName()); authorized = realm.getPolicyManager().authorize(path, identity, usecase); if (log.isDebugEnabled()) log.debug("Check authorization result: " + authorized); } catch (Exception e) { log.error(e, e); throw new ServletException(e.getMessage(), e); } if(!authorized) { // TODO: Implement HTTP BASIC/DIGEST response (see above) log.info("Access denied: " + getRequestURLQS(request, null, false)); if(!request.isSecure()) { if(sslPort != null) { log.info("Redirect to SSL ..."); try { URL url = new URL(getRequestURLQS(request, null, false).toString()); url = new URL("https", url.getHost(), new Integer(sslPort).intValue(), url.getFile()); if (realm.isProxySet()) { if (realm.getProxySSLPort() >= 0) { log.debug("Use configured port: " + realm.getProxySSLPort()); url = new URL(url.getProtocol(), url.getHost(), new Integer(realm.getProxySSLPort()).intValue(), url.getFile()); } else { log.debug("Use default port: " + url.getDefaultPort()); // NOTE: getDefaultPort depends on the Protocol (e.g. https is 443) url = new URL(url.getProtocol(), url.getHost(), url.getDefaultPort(), url.getFile()); } } log.info("Redirect to SSL: " + url); response.setHeader("Location", url.toString()); // TODO: Yulup has a bug re TEMPORARY_REDIRECT //response.setStatus(javax.servlet.http.HttpServletResponse.SC_TEMPORARY_REDIRECT); response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY); return response; } catch (Exception e) { log.error(e); } } else { log.warn("SSL does not seem to be configured!"); } } if(doAuthenticate(request, response) != null) { log.info("Return response of web authenticator."); /* NOTE: Such a response can have different reasons: - Either no credentials provided yet and web authenticator is generating a response to fetch credentials - Or authentication failed and web authenticator is resending response to fetch again credentials"); - Or authentication was successful and web authenticator sends a redirect */ return response; } else { try { log.warn("Authentication was successful for user: " + getIdentity(request).getUsername()); } catch (Exception e) { log.error(e, e); } URL url = new URL(getRequestURLQS(request, null, false).toString()); if (sslPort != null) { url = new URL("https", url.getHost(), new Integer(sslPort).intValue(), url.getFile()); } log.warn("Redirect to original request: " + url); //response.sendRedirect(url.toString()); // 302 // TODO: Yulup has a bug re TEMPORARY_REDIRECT (or is the problem that the load balancer is rewritting 302 reponses?!) response.setHeader("Location", url.toString()); response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY); // 301 //response.setStatus(javax.servlet.http.HttpServletResponse.SC_TEMPORARY_REDIRECT); // 302 return response; } } else { log.info("Access granted: " + getRequestURLQS(request, null, false)); return null; } } /** * Patch request with proxy settings re realm configuration */ private String getRequestURLQS(HttpServletRequest request, String addQS, boolean xml) { try { Realm realm = map.getRealm(request.getServletPath()); // TODO: Handle this exception more gracefully! if (realm == null) log.error("No realm found for path " +request.getServletPath()); String proxyHostName = realm.getProxyHostName(); int proxyPort = realm.getProxyPort(); String proxyPrefix = realm.getProxyPrefix(); URL url = null; url = new URL(request.getRequestURL().toString()); //if(proxyHostName != null || proxyPort >= null || proxyPrefix != null) { if(realm.isProxySet()) { if (proxyHostName != null) { url = new URL(url.getProtocol(), proxyHostName, url.getPort(), url.getFile()); } if (proxyPort >= 0) { url = new URL(url.getProtocol(), url.getHost(), proxyPort, url.getFile()); } else { url = new URL(url.getProtocol(), url.getHost(), url.getDefaultPort(), url.getFile()); } if (proxyPrefix != null) { url = new URL(url.getProtocol(), url.getHost(), url.getPort(), url.getFile().substring(proxyPrefix.length())); } //log.debug("Proxy enabled for this realm resp. request: " + realm + ", " + url); } else { //log.debug("No proxy set for this realm resp. request: " + realm + ", " + url); } String urlQS = url.toString(); if (request.getQueryString() != null) { urlQS = urlQS + "?" + request.getQueryString(); if (addQS != null) urlQS = urlQS + "&" + addQS; } else { if (addQS != null) urlQS = urlQS + "?" + addQS; } if (xml) urlQS = urlQS.replaceAll("&", "&amp;"); if(log.isDebugEnabled()) log.debug("Request: " + urlQS); return urlQS; } catch (Exception e) { log.error(e); return null; } } /** * Also see https://svn.apache.org/repos/asf/tomcat/container/branches/tc5.0.x/catalina/src/share/org/apache/catalina/servlets/WebdavServlet.java * Also maybe interesting http://sourceforge.net/projects/openharmonise */ public void doPropfind(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Resource resource = getResource(request, response); //Node node = resource.getRealm().getSitetree().getNode(resource.getPath()); Node node = sitetree.getNode(resource.getRealm(),resource.getPath()); String depth = request.getHeader("Depth"); StringBuffer sb = new StringBuffer("<?xml version=\"1.0\"?>"); sb.append("<multistatus xmlns=\"DAV:\">"); if (depth.equals("0")) { if (node.isCollection()) { sb.append(" <response>"); sb.append(" <href>"+request.getRequestURI()+"</href>"); sb.append(" <propstat>"); sb.append(" <prop>"); sb.append(" <resourcetype><collection/></resourcetype>"); sb.append(" <getcontenttype>httpd/unix-directory</getcontenttype>"); sb.append(" </prop>"); sb.append(" <status>HTTP/1.1 200 OK</status>"); sb.append(" </propstat>"); sb.append(" </response>"); } else if (node.isResource()) { sb.append(" <response>"); sb.append(" <href>"+request.getRequestURI()+"</href>"); sb.append(" <propstat>"); sb.append(" <prop>"); sb.append(" <resourcetype/>"); // TODO: Set mime type of node! sb.append(" <getcontenttype>application/octet-stream</getcontenttype>"); // TODO: Set content length and last modified! sb.append(" <getcontentlength>0</getcontentlength>"); sb.append(" <getlastmodified>1969.02.16</getlastmodified>"); // See http://www.webdav.org/specs/rfc2518.html#PROPERTY_source, http://wiki.zope.org/HiperDom/RoundtripEditingDiscussion sb.append(" <source>\n"); sb.append(" <link>\n"); sb.append(" <src>" + request.getRequestURI() + "</src>\n"); sb.append(" <dst>" + request.getRequestURI() + "?yanel.resource.modifiable.source</dst>\n"); sb.append(" </link>\n"); sb.append(" </source>\n"); sb.append(" </prop>"); sb.append(" <status>HTTP/1.1 200 OK</status>"); sb.append(" </propstat>"); sb.append(" </response>"); } else { log.error("Neither collection nor resource!"); } } else if (depth.equals("1")) { // TODO: Shouldn't one check with isCollection() first?! Node[] children = node.getChildren(); if (children != null) { for (int i = 0; i < children.length; i++) { if (children[i].isCollection()) { sb.append(" <response>\n"); sb.append(" <href>" + request.getRequestURI() + "/" + children[i].getName() + "/</href>\n"); sb.append(" <propstat>\n"); sb.append(" <prop>\n"); sb.append(" <displayname>" + children[i].getName() + "</displayname>\n"); sb.append(" <resourcetype><collection/></resourcetype>\n"); sb.append(" <getcontenttype>httpd/unix-directory</getcontenttype>\n"); sb.append(" </prop>\n"); sb.append(" <status>HTTP/1.1 200 OK</status>\n"); sb.append(" </propstat>\n"); sb.append(" </response>\n"); } else if(children[i].isResource()) { sb.append(" <response>\n"); sb.append(" <href>" + request.getRequestURI() + "/" + children[i].getName() + "?yanel.webdav=propfind1</href>\n"); sb.append(" <propstat>\n"); sb.append(" <prop>\n"); sb.append(" <displayname>" + children[i].getName() + "</displayname>\n"); sb.append(" <resourcetype/>\n"); // TODO: Set mime type of node! sb.append(" <getcontenttype>application/octet-stream</getcontenttype>\n"); // TODO: Set content length and last modified! sb.append(" <getcontentlength>0</getcontentlength>"); sb.append(" <getlastmodified>1969.02.16</getlastmodified>"); // See http://www.webdav.org/specs/rfc2518.html#PROPERTY_source, http://wiki.zope.org/HiperDom/RoundtripEditingDiscussion sb.append(" <source>\n"); sb.append(" <link>\n"); sb.append(" <src>" + request.getRequestURI() + "/" + children[i].getName() + "</src>\n"); sb.append(" <dst>" + request.getRequestURI() + "/" + children[i].getName() + "?yanel.resource.modifiable.source</dst>\n"); sb.append(" </link>\n"); sb.append(" </source>\n"); sb.append(" </prop>\n"); sb.append(" <status>HTTP/1.1 200 OK</status>\n"); sb.append(" </propstat>\n"); sb.append(" </response>\n"); } else { log.error("Neither collection nor resource: " + children[i].getPath()); } } } else { log.warn("No children!"); } } else if (depth.equals("infinity")) { log.warn("TODO: List children and their children and their children ..."); } else { log.error("No such depth: " + depth); } sb.append("</multistatus>"); //response.setStatus(javax.servlet.http.HttpServletResponse.SC_MULTI_STATUS); response.setStatus(207, "Multi-Status"); PrintWriter w = response.getWriter(); w.print(sb); } /** * */ public void doOptions(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setHeader("DAV", "1"); // TODO: Is there anything else to do?! } /** * Authentication * @return null when authentication successful or has already been authenticated, otherwise return response generated by web authenticator */ public HttpServletResponse doAuthenticate(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { // TODO/TBD: In the case of HTTP-BASIC/DIGEST one needs to check authentication with every request // TODO: enhance API with flag, e.g. session-based="true/false" // WARNING: One needs to separate doAuthenticate from the login screen generation! //if (getIdentity(request) != null) return null; WebAuthenticator wa = map.getRealm(request.getServletPath()).getWebAuthenticator(); return wa.doAuthenticate(request, response, map, reservedPrefix, xsltLoginScreenDefault, servletContextRealPath, sslPort); } catch (Exception e) { log.error(e.getMessage(), e); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return response; } } /** * Escapes all reserved xml characters (&amp; &lt; &gt; &apos; &quot;) in a string. * @param s input string * @return string with escaped characters */ public static String encodeXML(String s) { s = s.replaceAll("&", "&amp;"); s = s.replaceAll("<", "&lt;"); s = s.replaceAll(">", "&gt;"); s = s.replaceAll("'", "&apos;"); s = s.replaceAll("\"", "&quot;"); return s; } /** * Do logout * @return null for a regular logout and a Neutron response if auth scheme is Neutron */ public HttpServletResponse doLogout(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { if (isToolbarEnabled(request)) { // TODO: Check if WORLD has access to the toolbar //if (getRealm().getPolicyManager().authorize(path, new Identity(), new Usecase(TOOLBAR_USECASE))) { disableToolbar(request); //} } HttpSession session = request.getSession(true); // TODO: should we logout only from the current realm, or from all realms? // -> logout only from the current realm Realm realm = map.getRealm(request.getServletPath()); IdentityMap identityMap = (IdentityMap)session.getAttribute(IDENTITY_MAP_KEY); if (identityMap != null && identityMap.containsKey(realm.getID())) { log.info("Logout from realm: " + realm.getID()); identityMap.remove(realm.getID()); } String clientSupportedAuthScheme = request.getHeader("WWW-Authenticate"); if (clientSupportedAuthScheme != null && clientSupportedAuthScheme.equals("Neutron-Auth")) { // TODO: send some XML content, e.g. <logout-successful/> response.setContentType("text/plain; charset=" + DEFAULT_ENCODING); response.setStatus(response.SC_OK); PrintWriter writer = response.getWriter(); writer.print("Neutron Logout Successful!"); return response; } if (log.isDebugEnabled()) log.debug("Regular Logout Successful!"); //return null; URL url = new URL(getRequestURLQS(request, null, false).toString()); String urlWithoutLogoutQS = url.toString().substring(0, url.toString().lastIndexOf("?")); log.warn("Redirect to original request: " + urlWithoutLogoutQS); //response.sendRedirect(url.toString()); // 302 // TODO: Just remove logout part from query string! (http://127.0.0.1:8080/yanel/test/use-cases/index.xhtml?yanel.resource.usecase=checkout&yanel.usecase=logout) // TODO: Alternative solution: http://bugzilla.wyona.com/cgi-bin/bugzilla/show_bug.cgi?id=6465 response.setHeader("Location", urlWithoutLogoutQS.toString()); //response.setHeader("Location", url.toString()); response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY); // 301 //response.setStatus(javax.servlet.http.HttpServletResponse.SC_TEMPORARY_REDIRECT); // 302 return response; } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } /** * Do create a new resource */ public HttpServletResponse doCreate(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { log.error("Not implemented yet!"); return null; } /** * Patches the mimetype of the Content-Type response field because * Microsoft Internet Explorer does not understand application/xhtml+xml * See http://en.wikipedia.org/wiki/Criticisms_of_Internet_Explorer#XHTML */ static public String patchMimeType(String mimeType, HttpServletRequest request) throws ServletException, IOException { String httpAcceptMediaTypes = request.getHeader("Accept"); if (mimeType != null && mimeType.equals("application/xhtml+xml") && httpAcceptMediaTypes != null && httpAcceptMediaTypes.indexOf("application/xhtml+xml") < 0) { log.info("Patch contentType with text/html because client (" + request.getHeader("User-Agent") + ") does not seem to understand application/xhtml+xml"); return "text/html"; } return mimeType; } /** * Intercept InputStream and log content ... */ public InputStream intercept(InputStream in) throws IOException { java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream(); byte[] buf = new byte[8192]; int bytesR; while ((bytesR = in.read(buf)) != -1) { baos.write(buf, 0, bytesR); } // Buffer within memory (TODO: Maybe replace with File-buffering ...) // http://www-128.ibm.com/developerworks/java/library/j-io1/ byte[] memBuffer = baos.toByteArray(); log.error("DEBUG: InputStream: " + baos); return new java.io.ByteArrayInputStream(memBuffer); } /** * Generate a "Yanel" response (page information, 404, internal server error, ...) */ private void setYanelOutput(HttpServletRequest request, HttpServletResponse response, Document doc) throws ServletException { String path = getResource(request, response).getPath(); String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(path); try { String yanelFormat = request.getParameter("yanel.format"); if(yanelFormat != null && yanelFormat.equals("xml")) { response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); OutputStream out = response.getOutputStream(); javax.xml.transform.TransformerFactory.newInstance().newTransformer().transform(new javax.xml.transform.dom.DOMSource(doc), new javax.xml.transform.stream.StreamResult(out)); out.close(); } else { String mimeType = patchMimeType("application/xhtml+xml", request); response.setContentType(mimeType + "; charset=" + DEFAULT_ENCODING); // create identity transformer which serves as a dom-to-sax transformer TransformerIdentityImpl transformer = new TransformerIdentityImpl(); // create xslt transformer: SAXTransformerFactory saxTransformerFactory = (SAXTransformerFactory)SAXTransformerFactory.newInstance(); TransformerHandler xsltTransformer = saxTransformerFactory.newTransformerHandler(new StreamSource(xsltInfoAndException)); xsltTransformer.getTransformer().setParameter("yanel.back2realm", backToRealm); xsltTransformer.getTransformer().setParameter("yanel.reservedPrefix", reservedPrefix); // create i18n transformer: I18nTransformer2 i18nTransformer = new I18nTransformer2("global", getLanguage(request),yanel.getMap().getRealm(request.getServletPath()).getDefaultLanguage()); CatalogResolver catalogResolver = new CatalogResolver(); i18nTransformer.setEntityResolver(new CatalogResolver()); // create serializer: Serializer serializer = SerializerFactory.getSerializer(SerializerFactory.XHTML_STRICT); // chain everything together (create a pipeline): xsltTransformer.setResult(new SAXResult(i18nTransformer)); i18nTransformer.setResult(new SAXResult(serializer.asContentHandler())); serializer.setOutputStream(response.getOutputStream()); // execute pipeline: transformer.transform(new DOMSource(doc), new SAXResult(xsltTransformer)); } } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage()); } } /** * Get language with the following priorization: 1) yanel.meta.language query string parameter, 2) Accept-Language header, 3) Default en */ private String getLanguage(HttpServletRequest request) throws Exception { // TODO: Shouldn't this be replaced by Resource.getRequestedLanguage() or Resource.getContentLanguage() ?! String language = request.getParameter("yanel.meta.language"); if (language == null) { language = request.getHeader("Accept-Language"); if (language != null) { int commaIndex = language.indexOf(","); if (commaIndex > 0) { language = language.substring(0, commaIndex); } int dashIndex = language.indexOf("-"); if (dashIndex > 0) { language = language.substring(0, dashIndex); } } } if(language != null && language.length() > 0) return language; return yanel.getMap().getRealm(request.getServletPath()).getDefaultLanguage(); } /** * Write to output stream of modifiable resource */ private void write(InputStream in, OutputStream out, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if (out != null) { log.debug("Content-Type: " + request.getContentType()); // TODO: Compare mime-type from response with mime-type of resource //if (contentType.equals("text/xml")) { ... } byte[] buffer = new byte[8192]; int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } out.flush(); out.close(); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<html>"); sb.append("<body>"); sb.append("<p>Data has been saved ...</p>"); sb.append("</body>"); sb.append("</html>"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.setContentType("application/xhtml+xml; charset=" + DEFAULT_ENCODING); PrintWriter w = response.getWriter(); w.print(sb); log.info("Data has been saved ..."); return; } else { log.error("OutputStream is null!"); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<html>"); sb.append("<body>"); sb.append("<p>Exception: OutputStream is null!</p>"); sb.append("</body>"); sb.append("</html>"); PrintWriter w = response.getWriter(); w.print(sb); response.setContentType("application/xhtml+xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } } /** * Get toolbar menus */ private String getToolbarMenus(Resource resource, HttpServletRequest request) throws ServletException, IOException, Exception { org.wyona.yanel.servlet.menu.Menu menu = null; String menuRealmClass = resource.getRealm().getMenuClass(); if (menuRealmClass != null) { menu = (org.wyona.yanel.servlet.menu.Menu) Class.forName(menuRealmClass).newInstance(); // TODO: Check resource configuration ... //} else if (RESOURCE) { } else { menu = new org.wyona.yanel.servlet.menu.impl.DefaultMenu(); } return menu.getAllMenus(resource, request, map, reservedPrefix); } /** * Gets the part of the toolbar which has to be inserted into the html header. * @param resource * @param request * @return * @throws Exception */ private String getToolbarHeader(Resource resource, HttpServletRequest request) throws Exception { String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(resource.getPath()); StringBuffer sb= new StringBuffer(); sb.append("<link type=\"text/css\" href=\"" + backToRealm + reservedPrefix + "/toolbar.css\" rel=\"stylesheet\"/>"); sb.append(System.getProperty("line.separator")); sb.append("<style type=\"text/css\" media=\"screen\">"); sb.append(System.getProperty("line.separator")); sb.append("#yaneltoolbar_menu li li.haschild{ background: lightgrey url(" + backToRealm + reservedPrefix + "/yanel-img/submenu.gif) no-repeat 98% 50%;}"); sb.append(System.getProperty("line.separator")); sb.append("#yaneltoolbar_menu li li.haschild:hover{ background: lightsteelblue url(" + backToRealm + reservedPrefix + "/yanel-img/submenu.gif) no-repeat 98% 50%;}"); sb.append("</style>"); sb.append(System.getProperty("line.separator")); // If browser is Mozilla (gecko engine rv:1.7) if (request.getHeader("User-Agent").indexOf("rv:1.7") >= 0) { sb.append("<link type=\"text/css\" href=\"" + backToRealm + reservedPrefix + "/toolbarMozilla.css\" rel=\"stylesheet\"/>"); sb.append(System.getProperty("line.separator")); } // If browser is IE if (request.getHeader("User-Agent").indexOf("compatible; MSIE") >= 0 && request.getHeader("User-Agent").indexOf("Windows") >= 0 ) { sb.append("<link type=\"text/css\" href=\"" + backToRealm + reservedPrefix + "/toolbarIE.css\" rel=\"stylesheet\"/>"); sb.append(System.getProperty("line.separator")); sb.append("<style type=\"text/css\" media=\"screen\">"); sb.append(" body{behavior:url(" + backToRealm + reservedPrefix + "/csshover.htc);font-size:100%;}"); sb.append("</style>"); } // If browser is IE6 if (request.getHeader("User-Agent").indexOf("compatible; MSIE 6") >= 0 && request.getHeader("User-Agent").indexOf("Windows") >= 0 ) { sb.append("<link type=\"text/css\" href=\"" + backToRealm + reservedPrefix + "/toolbarIE6.css\" rel=\"stylesheet\"/>"); sb.append(System.getProperty("line.separator")); } return sb.toString(); } /** * Gets the part of the toolbar which has to be inserted into the html body * right after the opening body tag. * @param resource * @param request * @return * @throws Exception */ private String getToolbarBodyStart(Resource resource, HttpServletRequest request) throws Exception { String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(resource.getPath()); StringBuffer buf = new StringBuffer(); buf.append("<div id=\"yaneltoolbar_headerwrap\">"); buf.append("<div id=\"yaneltoolbar_menu\">"); buf.append(getToolbarMenus(resource, request)); buf.append("</div>"); buf.append("<span id=\"yaneltoolbar_info\">"); //buf.append("Version: " + yanel.getVersion() + "-r" + yanel.getRevision() + "&#160;&#160;"); buf.append("Realm: <b>" + resource.getRealm().getName() + "</b>&#160;&#160;"); Identity identity = getIdentity(request); if (identity != null && !identity.isWorld()) { buf.append("User: <b>" + identity.getUsername() + "</b>"); } else { buf.append("User: <b>Not signed in!</b>"); } buf.append("</span>"); buf.append("<span id=\"yaneltoolbar_logo\">"); buf.append("<img src=\"" + backToRealm + reservedPrefix + "/yanel_toolbar_logo.png\"/>"); buf.append("</span>"); buf.append("</div>"); buf.append("<div id=\"yaneltoolbar_middlewrap\">"); return buf.toString(); } /** * Gets the part of the toolbar which has to be inserted into the html body * right before the closing body tag. * @param resource * @param request * @return * @throws Exception */ private String getToolbarBodyEnd(Resource resource, HttpServletRequest request) throws Exception { return "</div>"; } /** * Merges the toolbar and the page content. This will parse the html stream and add * the toolbar. * @param request * @param response * @param resource * @param view * @throws Exception */ private void mergeToolbarWithContent(HttpServletRequest request, HttpServletResponse response, Resource resource, View view) throws Exception { String encoding = view.getEncoding(); if (encoding == null) { encoding = "UTF-8"; } InputStreamReader reader = new InputStreamReader(view.getInputStream(), encoding); OutputStreamWriter writer = new OutputStreamWriter(response.getOutputStream(), encoding); int c; int state = OUTSIDE_TAG; StringBuffer tagBuf = null; int headcount = 0; int bodycount = 0; while ((c = reader.read()) != -1) { switch (state) { case OUTSIDE_TAG: if (c == '<') { tagBuf = new StringBuffer("<"); state = INSIDE_TAG; } else { writer.write(c); } break; case INSIDE_TAG: //writer.write(c); if (c == '>') { state = OUTSIDE_TAG; tagBuf.append((char)c); String tag = tagBuf.toString(); if (tag.startsWith("<head")) { if (headcount == 0) { writer.write(tag, 0, tag.length()); String toolbarString = getToolbarHeader(resource, request); writer.write(toolbarString, 0, toolbarString.length()); } else { writer.write(tag, 0, tag.length()); } headcount++; } else if (tag.startsWith("<body")) { if (bodycount == 0) { writer.write(tag, 0, tag.length()); String toolbarString = getToolbarBodyStart(resource, request); writer.write(toolbarString, 0, toolbarString.length()); } else { writer.write(tag, 0, tag.length()); } bodycount++; } else if (tag.equals("</body>")) { bodycount--; if (bodycount == 0) { String toolbarString = getToolbarBodyEnd(resource, request); writer.write(toolbarString, 0, toolbarString.length()); writer.write(tag, 0, tag.length()); } else { writer.write(tag, 0, tag.length()); } } else { writer.write(tag, 0, tag.length()); } } else { tagBuf.append((char)c); } break; } } writer.flush(); writer.close(); reader.close(); } /** * Gets the identity from the session associated with the given request. * @param request * @return identity or null if there is no identity in the session for the current realm or if there is no session at all */ private Identity getIdentity(HttpServletRequest request) throws Exception { Realm realm = map.getRealm(request.getServletPath()); HttpSession session = request.getSession(false); if (session != null) { IdentityMap identityMap = (IdentityMap)session.getAttribute(IDENTITY_MAP_KEY); if (identityMap != null) { Identity identity = (Identity)identityMap.get(realm.getID()); if (identity != null) { return identity; } } } // HTTP BASIC Authentication (For clients such as for instance Sunbird, OpenOffice or cadaver) // IMPORT NOTE: BASIC Authentication needs to be checked on every request, because clients often do not support session handling String authorizationHeader = request.getHeader("Authorization"); if (log.isDebugEnabled()) log.debug("Checking for Authorization Header: " + authorizationHeader); if (authorizationHeader != null) { if (authorizationHeader.toUpperCase().startsWith("BASIC")) { log.warn("Using BASIC authorization ..."); // Get encoded user and password, comes after "BASIC " String userpassEncoded = authorizationHeader.substring(6); // Decode it, using any base 64 decoder sun.misc.BASE64Decoder dec = new sun.misc.BASE64Decoder(); String userpassDecoded = new String(dec.decodeBuffer(userpassEncoded)); log.debug("Username and Password Decoded: " + userpassDecoded); String[] up = userpassDecoded.split(":"); String username = up[0]; String password = up[1]; log.debug("username: " + username + ", password: " + password); try { User user = realm.getIdentityManager().getUserManager().getUser(username); if (user != null && user.authenticate(password)) { return new Identity(user); } else { log.warn("HTTP BASIC Authentication failed for " + username + "!"); /* response.setHeader("WWW-Authenticate", "BASIC realm=\"" + realm.getName() + "\""); response.sendError(response.SC_UNAUTHORIZED); PrintWriter writer = response.getWriter(); writer.print("BASIC Authentication Failed!"); return response; */ } } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } else if (authorizationHeader.toUpperCase().startsWith("DIGEST")) { log.error("DIGEST is not implemented"); /* authorized = false; response.sendError(response.SC_UNAUTHORIZED); response.setHeader("WWW-Authenticate", "DIGEST realm=\"" + realm.getName() + "\""); PrintWriter writer = response.getWriter(); writer.print("DIGEST is not implemented!"); */ } else { log.warn("No such authorization type implemented: " + authorizationHeader); } } if(log.isDebugEnabled()) log.debug("No identity yet (Neither session nor header based! Identity is set to WORLD!)"); // TBD: Should add world identity to the session? return new Identity(); } /** * Create a DOM Document */ static public Document getDocument(String namespace, String localname) throws Exception { javax.xml.parsers.DocumentBuilderFactory dbf= javax.xml.parsers.DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder(); org.w3c.dom.DOMImplementation impl = parser.getDOMImplementation(); org.w3c.dom.DocumentType doctype = null; Document doc = impl.createDocument(namespace, localname, doctype); if (namespace != null) { doc.getDocumentElement().setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns", namespace); } return doc; } /** * Get global data located below reserved prefix */ public void getGlobalData(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Resource resource = getResource(request, response); String path = resource.getPath(); String viewId = request.getParameter(VIEW_ID_PARAM_NAME); if (path.startsWith("/" + reservedPrefix + "/users/")) { String userName = path.substring(reservedPrefix.length() + 8); userName = userName.substring(0, userName.lastIndexOf(".html")); try { java.util.Map properties = new HashMap(); properties.put("user", userName); ResourceConfiguration rc = new ResourceConfiguration("yanel-user", "http://www.wyona.org/yanel/resource/1.0", properties); Realm realm = yanel.getMap().getRealm(request.getServletPath()); Resource yanelUserResource = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, path, rc); View view = ((ViewableV2) yanelUserResource).getView(viewId); if (view != null) { if (generateResponse(view, yanelUserResource, request, response, getDocument(NAMESPACE, "yanel"), -1, -1) != null) return; } } catch (Exception e) { throw new ServletException(e); } response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); return; } else if (path.indexOf("user-mgmt/list-users.html") >= 0) { log.warn("TODO: Implementation not finished yet!"); } else if (path.indexOf("about.html") >= 0) { response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); StringBuffer sb = new StringBuffer("<html>"); sb.append("<head><title>About Yanel</title></head>"); sb.append("<body><h1>About Yanel</h1><p>Version " + yanel.getVersion() + "-r" + yanel.getRevision() + "</p><p>Copyright &#169; 2005 - 2008 Wyona. All rights reserved.</p></body>"); sb.append("</html>"); PrintWriter w = response.getWriter(); w.print(sb); return; } else if (path.indexOf("data-repository-sitetree.html") >= 0) { try { Realm realm = yanel.getMap().getRealm(request.getServletPath()); File drsResConfigFile = getGlobalResourceConfiguration("data-repo-sitetree_yanel-rc.xml", realm); ResourceConfiguration rc = new ResourceConfiguration(new java.io.FileInputStream(drsResConfigFile)); Resource sitetreeResource = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, path, rc); View view = ((ViewableV2) sitetreeResource).getView(viewId); if (view != null) { if (generateResponse(view, sitetreeResource, request, response, getDocument(NAMESPACE, "yanel"), -1, -1) != null) return; } } catch (Exception e) { throw new ServletException(e); } } else if (path.indexOf("resource-types") >= 0) { //log.debug("Resource path: " + resource.getPath()); String[] pathPart1 = path.split("/resource-types/"); String[] pathPart2 = pathPart1[1].split("::"); String[] pathPart3 = pathPart2[1].split("/"); String name = pathPart3[0]; // The request (see resource.getPath()) seems to replace 'http://' or 'http%3a%2f%2f' by 'http:/', so let's change this back String namespace = pathPart2[0].replaceAll("http:/", "http://"); try { java.util.Map properties = new HashMap(); Realm realm = yanel.getMap().getRealm(request.getServletPath()); ResourceConfiguration rc = new ResourceConfiguration(name, namespace, properties); Resource resourceOfPrefix = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, path, rc); String htdocsPath; if (pathPart2[1].indexOf("/" + reservedPrefix + "/") >= 0) { htdocsPath = "rtyanelhtdocs:" + path.split("::" + name)[1].split("/" + reservedPrefix)[1].replace('/', File.separatorChar); } else { htdocsPath = "rthtdocs:" + path.split("::" + name)[1].replace('/', File.separatorChar); } SourceResolver resolver = new SourceResolver(resourceOfPrefix); Source source = resolver.resolve(htdocsPath, null); InputStream htdocIn = ((StreamSource) source).getInputStream(); if (htdocIn != null) { log.debug("Resource-Type specific data: " + htdocsPath); // TODO: Set HTTP header (mime-type, size, etc.) String mimeType = guessMimeType(FilenameUtils.getExtension(FilenameUtils.getName(htdocsPath))); response.setHeader("Content-Type", mimeType); byte buffer[] = new byte[8192]; int bytesRead; OutputStream out = response.getOutputStream(); while ((bytesRead = htdocIn.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } htdocIn.close(); // allow client-side caching: if (cacheExpires != 0) { setExpiresHeader(response, cacheExpires); } return; } else { log.error("No such file or directory: " + htdocsPath); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); return; } } catch (Exception e) { throw new ServletException(e); } } else { File globalFile = org.wyona.commons.io.FileUtil.file(servletContextRealPath, "htdocs" + File.separator + path.substring(reservedPrefix.length() + 2)); if (globalFile.exists()) { log.debug("Global data: " + globalFile); // TODO: Set HTTP header (mime-type, size, etc.) String mimeType = guessMimeType(FilenameUtils.getExtension(globalFile.getName())); response.setHeader("Content-Type", mimeType); byte buffer[] = new byte[8192]; int bytesRead; InputStream in = new java.io.FileInputStream(globalFile); OutputStream out = response.getOutputStream(); while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } in.close(); // allow client-side caching: if (cacheExpires != 0) { setExpiresHeader(response, cacheExpires); } return; } else { log.error("No such file or directory: " + globalFile); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); return; } } } private void setExpiresHeader(HttpServletResponse response, int hours) { Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.HOUR_OF_DAY, hours); String expires = DateUtil.formatRFC822GMT(calendar.getTime()); response.setHeader("Expires", expires); } /** * Generate response from a resource view */ private HttpServletResponse generateResponse(View view, Resource res, HttpServletRequest request, HttpServletResponse response, Document doc, long size, long lastModified) throws ServletException, IOException { // Check if the view contains the response, otherwise assume that the resource wrote the response, and just return. // TODO: There seem like no header fields are being set (e.g. Content-Length, ...). Please see below ... // Check if viewable resource has already created a response if (!view.isResponse()) return response; // Set encoding if (view.getEncoding() != null) { response.setContentType(patchMimeType(view.getMimeType(), request) + "; charset=" + view.getEncoding()); } else if (res.getConfiguration() != null && res.getConfiguration().getEncoding() != null) { response.setContentType(patchMimeType(view.getMimeType(), request) + "; charset=" + res.getConfiguration().getEncoding()); } else { // try to guess if we have to set the default encoding String mimeType = view.getMimeType(); if (mimeType != null && mimeType.startsWith("text") || mimeType.equals("application/xml") || mimeType.equals("application/xhtml+xml") || mimeType.equals("application/atom+xml") || mimeType.equals("application/x-javascript")) { response.setContentType(patchMimeType(mimeType, request) + "; charset=" + DEFAULT_ENCODING); } else { // probably binary mime-type, don't set encoding response.setContentType(patchMimeType(mimeType, request)); } } // Set HTTP headers: HashMap headers = view.getHttpHeaders(); Iterator iter = headers.keySet().iterator(); while (iter.hasNext()) { String name = (String)iter.next(); String value = (String)headers.get(name); if (log.isDebugEnabled()) { log.debug("set http header: " + name + ": " + value); } response.setHeader(name, value); } // Possibly embed toolbar: // TODO: Check if user is authorized to actually see toolbar (Current flaw: Enabled Toolbar, Login, Toolbar is enabled, Logout, Toolbar is still visible!) if (isToolbarEnabled(request)) { String mimeType = view.getMimeType(); if (mimeType != null && mimeType.indexOf("html") > 0) { // TODO: What about other query strings or frames or TinyMCE? if (request.getParameter("yanel.resource.usecase") == null) { if (toolbarMasterSwitch.equals("on")) { OutputStream os = response.getOutputStream(); try { mergeToolbarWithContent(request, response, res, view); } catch (Exception e) { log.error(e, e); String message = "Error merging toolbar into content: " + e.toString(); Element exceptionElement = (Element) doc.getDocumentElement().appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return response; } return response; } else { log.info("Toolbar has been disabled. Please check web.xml!"); } } else { log.error("DEBUG: Exception to the rule: " + request.getParameter("yanel.resource.usecase")); } } else { log.debug("No HTML related mime type: " + mimeType); } } else { log.debug("Toolbar is turned off."); } InputStream is = view.getInputStream(); if (is != null) { // Write actual content into response byte buffer[] = new byte[8192]; int bytesRead; bytesRead = is.read(buffer); // TODO: Compare If-Modified-Since with lastModified and return 304 without content resp. check on ETag String ifModifiedSince = request.getHeader("If-Modified-Since"); if (ifModifiedSince != null) { if (log.isDebugEnabled()) log.debug("TODO: Implement 304 ..."); } if(lastModified >= 0) response.setDateHeader("Last-Modified", lastModified); if(size > 0) { if (log.isDebugEnabled()) log.debug("Size of " + request.getRequestURI() + ": " + size); response.setContentLength((int) size); } else { if (log.isDebugEnabled()) log.debug("No size for " + request.getRequestURI() + ": " + size); } // Check if InputStream is empty if (bytesRead != -1) { java.io.OutputStream os = response.getOutputStream(); os.write(buffer, 0, bytesRead); while ((bytesRead = is.read(buffer)) != -1) { os.write(buffer, 0, bytesRead); } os.close(); } else { log.warn("Returned content size of request '" + request.getRequestURI() + "' is 0"); } is.close(); return response; } else { String message = "Returned InputStream of request '" + request.getRequestURI() + "' is null!"; Element exceptionElement = (Element) doc.getDocumentElement().appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); is.close(); return response; } } /** * */ public void destroy() { super.destroy(); yanel.destroy(); log.warn("Yanel webapp has been shut down."); } /** * */ private Usecase getUsecase(HttpServletRequest request) { Usecase usecase = null; // TODO: Replace hardcoded roles by mapping between roles amd query strings ... String value = request.getParameter("yanel.resource.usecase"); String yanelUsecaseValue = request.getParameter("yanel.usecase"); String workflowTransitionValue = request.getParameter("yanel.resource.workflow.transition"); String contentType = request.getContentType(); String method = request.getMethod(); if (value != null && value.equals("save")) { log.debug("Save data ..."); usecase = new Usecase("write"); } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); usecase = new Usecase("write"); } else if (yanelUsecaseValue != null && yanelUsecaseValue.equals("create")) { log.debug("Create new resource ..."); usecase = new Usecase("resource.create"); } else if (value != null && value.equals("introspection")) { if(log.isDebugEnabled()) log.debug("Dynamically generated introspection ..."); usecase = new Usecase("introspection"); } else if (value != null && value.equals("checkout")) { log.debug("Checkout data ..."); usecase = new Usecase("open"); } else if (contentType != null && contentType.indexOf("application/atom+xml") >= 0 && (method.equals(METHOD_PUT) || method.equals(METHOD_POST))) { // TODO: Is posting atom entries different from a general post (see below)?! log.error("DEBUG: Write/Checkin Atom entry ..."); usecase = new Usecase("write"); // TODO: METHOD_POST is not generally protected, but save, checkin, application/atom+xml are being protected. See doPost(.... } else if (method.equals(METHOD_PUT)) { log.error("DEBUG: Upload data ..."); usecase = new Usecase("write"); } else if (method.equals(METHOD_DELETE)) { log.error("DEBUG: Delete resource (HTTP method delete)"); usecase = new Usecase("delete"); } else if (value != null && value.equals("delete")) { log.info("Delete resource (yanel resource usecase delete)"); usecase = new Usecase("delete"); } else if (workflowTransitionValue != null) { // TODO: How shall we protect workflow transitions?! log.error("DEBUG: Workflow transition ..."); usecase = new Usecase("view"); } else { usecase = new Usecase("view"); } value = request.getParameter("yanel.toolbar"); if (value != null && value.equals("on")) { log.debug("Turn on toolbar ..."); usecase = new Usecase(TOOLBAR_USECASE); } value = request.getParameter("yanel.policy"); if (value != null) { if (value.equals("create")) { usecase = new Usecase("policy.create"); } else if (value.equals("read")) { usecase = new Usecase("policy.read"); } else if (value.equals("update")) { usecase = new Usecase("policy.update"); } else if (value.equals("delete")) { usecase = new Usecase("policy.delete"); } else { log.warn("No such policy usecase: " + value); } } return usecase; } /** * Handle access policy requests (CRUD, whereas delete is not implemented yet!) */ private void doAccessPolicyRequest(HttpServletRequest request, HttpServletResponse response, String usecase) throws ServletException, IOException { try { String viewId = request.getParameter(VIEW_ID_PARAM_NAME); Realm realm = map.getRealm(request.getServletPath()); String path = map.getPath(realm, request.getServletPath()); File pmrcGlobalFile = getGlobalResourceConfiguration("policy-manager_yanel-rc.xml", realm); Resource policyManagerResource = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, path, new ResourceConfiguration(new java.io.FileInputStream(pmrcGlobalFile))); View view = ((ViewableV2) policyManagerResource).getView(viewId); if (view != null) { if (generateResponse(view, policyManagerResource, request, response, getDocument(NAMESPACE, "yanel"), -1, -1) != null) return; } log.error("Something went terribly wrong!"); response.getWriter().print("Something went terribly wrong!"); return; } catch(Exception e) { log.error(e, e); throw new ServletException(e.getMessage()); } } /** * */ private void enableToolbar(HttpServletRequest request) { request.getSession(true).setAttribute(TOOLBAR_KEY, "on"); } /** * */ private void disableToolbar(HttpServletRequest request) { request.getSession(true).setAttribute(TOOLBAR_KEY, "off"); } /** * */ private boolean isToolbarEnabled(HttpServletRequest request) { String toolbarStatus = (String) request.getSession(true).getAttribute(TOOLBAR_KEY); if (toolbarStatus != null && toolbarStatus.equals("on")) { String yanelToolbar = request.getParameter("yanel.toolbar"); if(yanelToolbar != null && request.getParameter("yanel.toolbar").equals("suppress")) { return false; } else { return true; } } return false; } /** * Handle delete usecase */ private void handleDeleteUsecase(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String confirmed = request.getParameter("confirmed"); if (confirmed != null) { String path = getResource(request, response).getPath(); log.warn("Really delete " + path); doDelete(request, response); return; } else { log.warn("Delete has not been confirmed by client yet!"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); StringBuffer sb = new StringBuffer("<html><body>Do you really want to delete this page? <a href=\"?yanel.resource.usecase=delete&confirmed\">YES</a>, <a href=\"\">no</a></body></html>"); PrintWriter w = response.getWriter(); w.print(sb); return; } } /** * */ private File getGlobalResourceConfiguration(String resConfigName, Realm realm) { // TODO: Introduce a repository for the Yanel webapp File realmDir = new File(realm.getConfigFile().getParent()); File globalResConfigFile = org.wyona.commons.io.FileUtil.file(realmDir.getAbsolutePath(), "src" + File.separator + "webapp" + File.separator + "global-resource-configs/" + resConfigName); if (!globalResConfigFile.isFile()) { // Fallback to global configuration globalResConfigFile = org.wyona.commons.io.FileUtil.file(servletContextRealPath, "global-resource-configs/" + resConfigName); } return globalResConfigFile; } /** * */ private String getStackTrace(Exception e) { java.io.StringWriter sw = new java.io.StringWriter(); e.printStackTrace(new java.io.PrintWriter(sw)); return sw.toString(); } /** * */ private void do404(HttpServletRequest request, HttpServletResponse response, Document doc, String exceptionMessage) throws ServletException { // TODO: Log all 404 within a dedicated file (with client info attached) such that an admin can react to it ... String message = "No such node/resource exception: " + exceptionMessage; log.warn(message); /* Element exceptionElement = (Element) doc.getDocumentElement().appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); exceptionElement.setAttributeNS(NAMESPACE, "status", "404"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); setYanelOutput(request, response, doc); return; */ // TODO: Finish the XML (as it used to be before)! response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); try { Realm realm = yanel.getMap().getRealm(request.getServletPath()); File pnfResConfigFile = getGlobalResourceConfiguration("404_yanel-rc.xml", realm); ResourceConfiguration rc = new ResourceConfiguration(new java.io.FileInputStream(pnfResConfigFile)); String path = getResource(request, response).getPath(); Resource pageNotFoundResource = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, path, rc); String viewId = request.getParameter(VIEW_ID_PARAM_NAME); if (request.getParameter("yanel.format") != null) { // backwards compatible viewId = request.getParameter("yanel.format"); } View view = ((ViewableV2) pageNotFoundResource).getView(viewId); if (view != null) { if (generateResponse(view, pageNotFoundResource, request, response, getDocument(NAMESPACE, "yanel"), -1, -1) != null) return; } log.error("404 seems to be broken!"); return; } catch (Exception e) { log.error(e, e); return; } } }
src/webapp/src/java/org/wyona/yanel/servlet/YanelServlet.java
package org.wyona.yanel.servlet; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.BufferedReader; import java.io.InputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Writer; import java.net.URL; import java.util.Calendar; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.Properties; import java.util.Vector; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.sax.SAXResult; import javax.xml.transform.sax.SAXTransformerFactory; import javax.xml.transform.sax.TransformerHandler; import javax.xml.transform.stream.StreamSource; import org.wyona.yanel.core.StateOfView; import org.wyona.yanel.core.Environment; import org.wyona.yanel.core.Path; import org.wyona.yanel.core.Resource; import org.wyona.yanel.core.ResourceConfiguration; import org.wyona.yanel.core.ResourceTypeDefinition; import org.wyona.yanel.core.ResourceTypeIdentifier; import org.wyona.yanel.core.ResourceTypeRegistry; import org.wyona.yanel.core.Yanel; import org.wyona.yanel.core.api.attributes.IntrospectableV1; import org.wyona.yanel.core.api.attributes.ModifiableV1; import org.wyona.yanel.core.api.attributes.ModifiableV2; import org.wyona.yanel.core.api.attributes.TranslatableV1; import org.wyona.yanel.core.api.attributes.VersionableV2; import org.wyona.yanel.core.api.attributes.ViewableV1; import org.wyona.yanel.core.api.attributes.ViewableV2; import org.wyona.yanel.core.api.attributes.WorkflowableV1; import org.wyona.yanel.core.api.security.WebAuthenticator; import org.wyona.yanel.core.attributes.versionable.RevisionInformation; import org.wyona.yanel.core.attributes.viewable.View; import org.wyona.yanel.core.attributes.viewable.ViewDescriptor; import org.wyona.yanel.core.navigation.Node; import org.wyona.yanel.core.navigation.Sitetree; import org.wyona.yanel.core.serialization.SerializerFactory; import org.wyona.yanel.core.source.SourceResolver; import org.wyona.yanel.core.transformation.I18nTransformer2; import org.wyona.yanel.core.util.DateUtil; import org.wyona.yanel.core.workflow.WorkflowException; import org.wyona.yanel.core.workflow.WorkflowHelper; import org.wyona.yanel.core.map.Map; import org.wyona.yanel.core.map.Realm; import org.wyona.yanel.core.util.ResourceAttributeHelper; import org.wyona.yanel.servlet.IdentityMap; import org.wyona.yanel.servlet.communication.HttpRequest; import org.wyona.yanel.servlet.communication.HttpResponse; import org.wyona.security.core.api.Identity; import org.wyona.security.core.api.IdentityManager; import org.wyona.security.core.api.Policy; import org.wyona.security.core.api.PolicyManager; import org.wyona.security.core.api.Role; import org.wyona.security.core.api.Usecase; import org.wyona.security.core.api.User; import org.apache.log4j.Category; import org.apache.xalan.transformer.TransformerIdentityImpl; import org.apache.xml.resolver.tools.CatalogResolver; import org.apache.xml.serializer.Serializer; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.DefaultConfigurationBuilder; import org.apache.commons.io.FilenameUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xml.sax.XMLReader; import org.xml.sax.helpers.XMLReaderFactory; /** * */ public class YanelServlet extends HttpServlet { private static Category log = Category.getInstance(YanelServlet.class); private ServletConfig config; ResourceTypeRegistry rtr; //PolicyManager pm; //IdentityManager im; Map map; Yanel yanel; Sitetree sitetree; File xsltInfoAndException; String xsltLoginScreenDefault; public static String IDENTITY_MAP_KEY = "identity-map"; private static String TOOLBAR_KEY = "toolbar"; private static String TOOLBAR_USECASE = "toolbar"; public static String NAMESPACE = "http://www.wyona.org/yanel/1.0"; private static final String METHOD_PROPFIND = "PROPFIND"; private static final String METHOD_OPTIONS = "OPTIONS"; private static final String METHOD_GET = "GET"; private static final String METHOD_POST = "POST"; private static final String METHOD_PUT = "PUT"; private static final String METHOD_DELETE = "DELETE"; private static final int INSIDE_TAG = 0; private static final int OUTSIDE_TAG = 1; private String sslPort = null; private String toolbarMasterSwitch = "off"; private String reservedPrefix; private String servletContextRealPath; private int cacheExpires = 0; public static final String DEFAULT_ENCODING = "UTF-8"; public static final String VIEW_ID_PARAM_NAME = "yanel.resource.viewid"; /** * */ public void init(ServletConfig config) throws ServletException { this.config = config; servletContextRealPath = config.getServletContext().getRealPath("/"); xsltInfoAndException = org.wyona.commons.io.FileUtil.file(servletContextRealPath, config.getInitParameter("exception-and-info-screen-xslt")); xsltLoginScreenDefault = config.getInitParameter("login-screen-xslt"); try { yanel = Yanel.getInstance(); yanel.init(); rtr = yanel.getResourceTypeRegistry(); map = (Map) yanel.getBeanFactory().getBean("map"); sitetree = (Sitetree) yanel.getBeanFactory().getBean("repo-navigation"); sslPort = config.getInitParameter("ssl-port"); toolbarMasterSwitch = config.getInitParameter("toolbar-master-switch"); reservedPrefix = yanel.getReservedPrefix(); String expires = config.getInitParameter("static-content-cache-expires"); if (expires != null) { this.cacheExpires = Integer.parseInt(expires); } } catch (Exception e) { log.error(e); throw new ServletException(e.getMessage(), e); } } /** * Dispatch requests */ public void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String httpAcceptMediaTypes = request.getHeader("Accept"); String httpAcceptLanguage = request.getHeader("Accept-Language"); String yanelUsecase = request.getParameter("yanel.usecase"); if(yanelUsecase != null && yanelUsecase.equals("logout")) { // Logout from Yanel if(doLogout(request, response) != null) return; } else if(yanelUsecase != null && yanelUsecase.equals("create")) { // Create a new resource if(doCreate(request, response) != null) return; } // Check authorization and if authorization failed, then try to authenticate if(doAccessControl(request, response) != null) { // Either redirect (after successful authentication) or access denied (and response will send the login screen) return; } else { if (log.isDebugEnabled()) log.debug("Access granted: " + request.getServletPath()); } // Check for requests re policies String policyRequestPara = request.getParameter("yanel.policy"); if (policyRequestPara != null) { doAccessPolicyRequest(request, response, policyRequestPara); return; } // Check for requests for global data Resource resource = getResource(request, response); String path = resource.getPath(); if (path.indexOf("/" + reservedPrefix + "/") == 0) { getGlobalData(request, response); return; } String value = request.getParameter("yanel.resource.usecase"); // Delete node if (value != null && value.equals("delete")) { handleDeleteUsecase(request, response); return; } // Delegate ... String method = request.getMethod(); if (method.equals(METHOD_PROPFIND)) { doPropfind(request, response); } else if (method.equals(METHOD_GET)) { doGet(request, response); } else if (method.equals(METHOD_POST)) { doPost(request, response); } else if (method.equals(METHOD_PUT)) { doPut(request, response); } else if (method.equals(METHOD_DELETE)) { doDelete(request, response); } else if (method.equals(METHOD_OPTIONS)) { doOptions(request, response); } else { log.error("No such method implemented: " + method); response.sendError(response.SC_NOT_IMPLEMENTED); } } /** * */ public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { HttpSession session = request.getSession(true); Resource resource = getResource(request, response); // Enable or disable toolbar switchToolbar(request); // Check for requests refered by WebDAV String yanelWebDAV = request.getParameter("yanel.webdav"); if(yanelWebDAV != null && yanelWebDAV.equals("propfind1")) { log.error("DEBUG: WebDAV client (" + request.getHeader("User-Agent") + ") requests to \"edit\" a resource: " + resource.getRealm() + ", " + resource.getPath()); //return; } String value = request.getParameter("yanel.resource.usecase"); try { if (value != null && value.equals("release-lock")) { log.debug("Release lock ..."); if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Versionable", "2")) { VersionableV2 versionable = (VersionableV2)resource; try { versionable.cancelCheckout(); } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException("Releasing of lock failed because of: " + resource.getPath() + " " + e.getMessage(), e); } } return; } else { getContent(request, response); return; } } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } /** * Checks if the yanel.toolbar request parameter is set and stores * the value of the parameter in the session. * @param request */ private void switchToolbar(HttpServletRequest request) { // Check for toolbar ... String yanelToolbar = request.getParameter("yanel.toolbar"); if(yanelToolbar != null) { HttpSession session = request.getSession(false); if (yanelToolbar.equals("on")) { log.info("Turn on toolbar!"); enableToolbar(request); } else if (yanelToolbar.equals("off")) { log.info("Turn off toolbar!"); disableToolbar(request); } else { log.warn("No such toolbar value: " + yanelToolbar); } } } /** * Returns the mime-type according to the given file extension. * Default is application/octet-stream. * @param extension * @return */ private String guessMimeType(String extension) { String ext = extension.toLowerCase(); if (ext.equals("html") || ext.equals("htm")) return "text/html"; if (ext.equals("css")) return "text/css"; if (ext.equals("txt")) return "text/plain"; if (ext.equals("js")) return "application/x-javascript"; if (ext.equals("jpg") || ext.equals("jpg")) return "image/jpeg"; if (ext.equals("gif")) return "image/gif"; if (ext.equals("pdf")) return "application/pdf"; if (ext.equals("zip")) return "application/zip"; if (ext.equals("htc")) return "text/x-component"; // TODO: add more mime types // TODO: and move to MimeTypeUtil return "application/octet-stream"; // default } /** * Get view of resource */ private void getContent(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { View view = null; org.w3c.dom.Document doc = null; try { doc = getDocument(NAMESPACE, "yanel"); } catch(Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage()); } Element rootElement = doc.getDocumentElement(); rootElement.setAttribute("servlet-context-real-path", servletContextRealPath); Element requestElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "request")); requestElement.setAttributeNS(NAMESPACE, "uri", request.getRequestURI()); requestElement.setAttributeNS(NAMESPACE, "servlet-path", request.getServletPath()); HttpSession session = request.getSession(true); Element sessionElement = (Element) rootElement.appendChild(doc.createElement("session")); sessionElement.setAttribute("id", session.getId()); Enumeration attrNames = session.getAttributeNames(); if (!attrNames.hasMoreElements()) { Element sessionNoAttributesElement = (Element) sessionElement.appendChild(doc.createElement("no-attributes")); } while (attrNames.hasMoreElements()) { String name = (String)attrNames.nextElement(); String value = session.getAttribute(name).toString(); Element sessionAttributeElement = (Element) sessionElement.appendChild(doc.createElement("attribute")); sessionAttributeElement.setAttribute("name", name); sessionAttributeElement.appendChild(doc.createTextNode(value)); } String usecase = request.getParameter("yanel.resource.usecase"); Resource res = null; long lastModified = -1; long size = -1; try { Environment environment = getEnvironment(request, response); res = getResource(request, response); if (res != null) { Element resourceElement = (Element) rootElement.appendChild(doc.createElement("resource")); ResourceConfiguration resConfig = res.getConfiguration(); if (resConfig != null) { Element resConfigElement = (Element) resourceElement.appendChild(doc.createElementNS(NAMESPACE, "config")); resConfigElement.setAttributeNS(NAMESPACE, "rti-name", resConfig.getName()); resConfigElement.setAttributeNS(NAMESPACE, "rti-namespace", resConfig.getNamespace()); } else { Element noResConfigElement = (Element) resourceElement.appendChild(doc.createElementNS(NAMESPACE, "no-config")); } Element realmElement = (Element) resourceElement.appendChild(doc.createElementNS(NAMESPACE, "realm")); realmElement.setAttributeNS(NAMESPACE, "name", res.getRealm().getName()); realmElement.setAttributeNS(NAMESPACE, "rid", res.getRealm().getID()); realmElement.setAttributeNS(NAMESPACE, "prefix", res.getRealm().getMountPoint()); Element identityManagerElement = (Element) realmElement.appendChild(doc.createElementNS(NAMESPACE, "identity-manager")); Element userManagerElement = (Element) identityManagerElement.appendChild(doc.createElementNS(NAMESPACE, "user-manager")); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "1")) { if (log.isDebugEnabled()) log.debug("Resource is viewable V1"); Element viewElement = (Element) resourceElement.appendChild(doc.createElement("view")); viewElement.setAttributeNS(NAMESPACE, "version", "1"); // TODO: The same as for ViewableV2 ... ViewDescriptor[] vd = ((ViewableV1) res).getViewDescriptors(); if (vd != null) { for (int i = 0; i < vd.length; i++) { Element descriptorElement = (Element) viewElement.appendChild(doc.createElement("descriptor")); if (vd[i].getMimeType() != null) { descriptorElement.appendChild(doc.createTextNode(vd[i].getMimeType())); } descriptorElement.setAttributeNS(NAMESPACE, "id", vd[i].getId()); } } else { viewElement.appendChild(doc.createTextNode("No View Descriptors!")); } String viewId = request.getParameter(VIEW_ID_PARAM_NAME); try { view = ((ViewableV1) res).getView(request, viewId); } catch(org.wyona.yarep.core.NoSuchNodeException e) { do404(request, response, doc, e.getMessage()); return; } catch(Exception e) { log.error(e.getMessage(), e); String message = e.toString(); log.error(e.getMessage(), e); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); exceptionElement.setAttributeNS(NAMESPACE, "status", "500"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } } else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "2")) { if (log.isDebugEnabled()) log.debug("Resource is viewable V2"); if (!((ViewableV2) res).exists()) { //log.warn("No such ViewableV2 resource: " + res.getPath()); //log.warn("TODO: It seems like many ViewableV2 resources are not implementing exists() properly!"); //do404(request, response, doc, res.getPath()); //return; } String viewId = request.getParameter(VIEW_ID_PARAM_NAME); Element viewElement = (Element) resourceElement.appendChild(doc.createElement("view")); viewElement.setAttributeNS(NAMESPACE, "version", "2"); ViewDescriptor[] vd = ((ViewableV2) res).getViewDescriptors(); if (vd != null) { for (int i = 0; i < vd.length; i++) { Element descriptorElement = (Element) viewElement.appendChild(doc.createElement("descriptor")); if (vd[i].getMimeType() != null) { descriptorElement.appendChild(doc.createTextNode(vd[i].getMimeType())); } descriptorElement.setAttributeNS(NAMESPACE, "id", vd[i].getId()); } } else { viewElement.appendChild(doc.createTextNode("No View Descriptors!")); } size = ((ViewableV2) res).getSize(); Element sizeElement = (Element) resourceElement.appendChild(doc.createElement("size")); sizeElement.appendChild(doc.createTextNode(String.valueOf(size))); try { String revisionName = request.getParameter("yanel.resource.revision"); if (revisionName != null && ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2")) { view = ((VersionableV2) res).getView(viewId, revisionName); } else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Workflowable", "1") && environment.getStateOfView().equals(StateOfView.LIVE)) { WorkflowableV1 workflowable = (WorkflowableV1)res; if (workflowable.isLive()) { view = workflowable.getLiveView(viewId); } else { String message = "The resource '" + res.getPath() + "' is WorkflowableV1, but has not been published yet. Instead the live version, the most recent version will be displayed!"; log.warn(message); view = ((ViewableV2) res).getView(viewId); // TODO: Maybe sending a 404 instead the most recent version should be configurable! /* do404(request, response, doc, message); return; */ } } else { view = ((ViewableV2) res).getView(viewId); } } catch(org.wyona.yarep.core.NoSuchNodeException e) { String message = "" + e; log.warn(message); do404(request, response, doc, message); return; } catch(org.wyona.yanel.core.ResourceNotFoundException e) { String message = "" + e; log.warn(message); do404(request, response, doc, message); return; } } else { Element noViewElement = (Element) resourceElement.appendChild(doc.createElement("not-viewable")); String message = res.getClass().getName() + " is not viewable! (" + res.getPath() + ", " + res.getRealm() + ")"; noViewElement.appendChild(doc.createTextNode(res.getClass().getName() + " is not viewable!")); log.error(message); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); exceptionElement.setAttributeNS(NAMESPACE, "status", "501"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_IMPLEMENTED); setYanelOutput(request, response, doc); return; } if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { lastModified = ((ModifiableV2) res).getLastModified(); Element lastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("last-modified")); lastModifiedElement.appendChild(doc.createTextNode(new java.util.Date(lastModified).toString())); } else { Element noLastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("no-last-modified")); } if (ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2")) { // retrieve the revisions, but only in the meta usecase (for performance reasons): if (request.getParameter("yanel.resource.meta") != null) { RevisionInformation[] revisions = ((VersionableV2)res).getRevisions(); Element revisionsElement = (Element) resourceElement.appendChild(doc.createElement("revisions")); if (revisions != null && revisions.length > 0) { for (int i = revisions.length - 1; i >= 0; i--) { Element revisionElement = (Element) revisionsElement.appendChild(doc.createElement("revision")); Element revisionNameElement = (Element) revisionElement.appendChild(doc.createElement("name")); revisionNameElement.appendChild(doc.createTextNode(revisions[i].getName())); Element revisionDateElement = (Element) revisionElement.appendChild(doc.createElement("date")); revisionDateElement.appendChild(doc.createTextNode(DateUtil.format(revisions[i].getDate()))); Element revisionUserElement = (Element) revisionElement.appendChild(doc.createElement("user")); revisionUserElement.appendChild(doc.createTextNode(revisions[i].getUser())); Element revisionCommentElement = (Element) revisionElement.appendChild(doc.createElement("comment")); revisionCommentElement.appendChild(doc.createTextNode(revisions[i].getComment())); } } else { Element noRevisionsYetElement = (Element) resourceElement.appendChild(doc.createElement("no-revisions-yet")); } } } else { Element notVersionableElement = (Element) resourceElement.appendChild(doc.createElement("not-versionable")); } if (ResourceAttributeHelper.hasAttributeImplemented(res, "Translatable", "1")) { TranslatableV1 translatable = ((TranslatableV1) res); Element translationsElement = (Element) resourceElement.appendChild(doc.createElement("translations")); String[] languages = translatable.getLanguages(); for (int i=0; i<languages.length; i++) { Element translationElement = (Element) translationsElement.appendChild(doc.createElement("translation")); translationElement.setAttribute("language", languages[i]); String path = translatable.getTranslation(languages[i]).getPath(); translationElement.setAttribute("path", path); } } if (usecase != null && usecase.equals("checkout")) { if(log.isDebugEnabled()) log.debug("Checkout data ..."); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2")) { // note: this will throw an exception if the document is checked out already // by another user. String userID = environment.getIdentity().getUsername(); VersionableV2 versionable = (VersionableV2)res; if (versionable.isCheckedOut()) { String checkoutUserID = versionable.getCheckoutUserID(); if (checkoutUserID.equals(userID)) { log.warn("Resource " + res.getPath() + " is already checked out by this user: " + checkoutUserID); } else { throw new Exception("Resource is already checked out by another user: " + checkoutUserID); } } else { versionable.checkout(userID); } } else { log.warn("Acquire lock has not been implemented yet ...!"); // acquireLock(); } } } else { Element resourceIsNullElement = (Element) rootElement.appendChild(doc.createElement("resource-is-null")); } } catch(org.wyona.yarep.core.NoSuchNodeException e) { String message = "" + e; log.warn(e, e); do404(request, response, doc, message); return; } catch(org.wyona.yanel.core.ResourceNotFoundException e) { String message = "" + e; log.warn(e, e); do404(request, response, doc, message); return; } catch(Exception e) { log.error(e.getMessage(), e); String message = e.toString() + "\n\n" + getStackTrace(e); //String message = e.toString(); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } // TODO: Move this introspection generation somewhere else ... try { if (usecase != null && usecase.equals("introspection")) { if (ResourceAttributeHelper.hasAttributeImplemented(res, "Introspectable", "1")) { String introspection = ((IntrospectableV1)res).getIntrospection(); response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.getWriter().print(introspection); } else { String message = "Resource is not introspectable."; Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); setYanelOutput(request, response, doc); } return; } } catch(Exception e) { log.error(e.getMessage(), e); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(e.getMessage())); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } String meta = request.getParameter("yanel.resource.meta"); if (meta != null) { if (meta.length() > 0) { log.warn("TODO: meta: " + meta); } else { log.debug("Show all meta"); } response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); setYanelOutput(request, response, doc); return; } if (view != null) { if (generateResponse(view, res, request, response, doc, size, lastModified) != null) return; } else { String message = "View is null!"; Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); } response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } /** * */ public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String transition = request.getParameter("yanel.resource.workflow.transition"); if (transition != null) { Resource resource = getResource(request, response); if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Workflowable", "1")) { WorkflowableV1 workflowable = (WorkflowableV1)resource; try { String revision = request.getParameter("yanel.resource.revision"); workflowable.doTransition(transition, revision); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); StringBuffer sb = new StringBuffer("<?xml version=\"1.0\"?>"); sb.append(workflowable.getWorkflowIntrospection()); PrintWriter w = response.getWriter(); w.print(sb); return; } catch (WorkflowException e) { // TODO: Implement response if transition has failed ... log.error(e, e); throw new ServletException(e.getMessage(), e); } } else { log.warn("Resource not workflowable: " + resource.getPath()); } } String value = request.getParameter("yanel.resource.usecase"); if (value != null && value.equals("save")) { log.debug("Save data ..."); save(request, response, false); return; } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); save(request, response, true); log.warn("Release lock has not been implemented yet ..."); // releaseLock(); return; } else { log.info("No parameter yanel.resource.usecase!"); String contentType = request.getContentType(); // TODO: Check for type (see section 9.2 of APP spec (e.g. draft 16) if (contentType.indexOf("application/atom+xml") >= 0) { InputStream in = intercept(request.getInputStream()); // Create new Atom entry try { String atomEntryUniversalName = "<{http://www.wyona.org/yanel/resource/1.0}atom-entry/>"; Realm realm = yanel.getMap().getRealm(request.getServletPath()); String newEntryPath = yanel.getMap().getPath(realm, request.getServletPath() + "/" + new java.util.Date().getTime() + ".xml"); log.error("DEBUG: Realm and Path of new Atom entry: " + realm + " " + newEntryPath); Resource atomEntryResource = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, newEntryPath, new ResourceTypeRegistry().getResourceTypeDefinition(atomEntryUniversalName), new ResourceTypeIdentifier(atomEntryUniversalName, null)); ((ModifiableV2)atomEntryResource).write(in); byte buffer[] = new byte[8192]; int bytesRead; InputStream resourceIn = ((ModifiableV2)atomEntryResource).getInputStream(); OutputStream responseOut = response.getOutputStream(); while ((bytesRead = resourceIn.read(buffer)) != -1) { responseOut.write(buffer, 0, bytesRead); } resourceIn.close(); //responseOut.close(); // TODO: Fix Location ... response.setHeader("Location", "http://ulysses.wyona.org" + newEntryPath); response.setStatus(javax.servlet.http.HttpServletResponse.SC_CREATED); return; } catch (Exception e) { log.error(e.getMessage(), e); throw new IOException(e.getMessage()); } } // Enable or disable toolbar switchToolbar(request); getContent(request, response); } } /** * HTTP PUT implementation */ public void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO: Reuse code doPost resp. share code with doPut String value = request.getParameter("yanel.resource.usecase"); if (value != null && value.equals("save")) { log.debug("Save data ..."); save(request, response, false); return; } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); save(request, response, true); log.warn("Release lock has not been implemented yet ...!"); // releaseLock(); return; } else { log.warn("No parameter yanel.resource.usecase!"); String contentType = request.getContentType(); if (contentType != null && contentType.indexOf("application/atom+xml") >= 0) { InputStream in = intercept(request.getInputStream()); // Overwrite existing atom entry try { String atomEntryUniversalName = "<{http://www.wyona.org/yanel/resource/1.0}atom-entry/>"; Realm realm = yanel.getMap().getRealm(request.getServletPath()); String entryPath = yanel.getMap().getPath(realm, request.getServletPath()); log.error("DEBUG: Realm and Path of new Atom entry: " + realm + " " + entryPath); Resource atomEntryResource = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, entryPath, new ResourceTypeRegistry().getResourceTypeDefinition(atomEntryUniversalName), new ResourceTypeIdentifier(atomEntryUniversalName, null)); // TODO: There seems to be a problem ... ((ModifiableV2)atomEntryResource).write(in); // NOTE: This method does not update updated date /* OutputStream out = ((ModifiableV2)atomEntry).getOutputStream(entryPath); byte buffer[] = new byte[8192]; int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } */ log.info("Atom entry has been saved: " + entryPath); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); return; } catch (Exception e) { log.error(e.getMessage(), e); throw new IOException(e.getMessage()); } } else { Resource resource = getResource(request, response); log.warn("Client (" + request.getHeader("User-Agent") + ") requests to save a resource: " + resource.getRealm() + ", " + resource.getPath()); save(request, response, false); return; } } } /** * HTTP DELETE implementation */ public void doDelete(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { Resource res = getResource(request, response); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { if (((ModifiableV2) res).delete()) { // TODO: Also delete resource config! What about access policies?! log.debug("Resource has been deleted: " + res); response.setStatus(HttpServletResponse.SC_OK); String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(res.getPath()); StringBuffer sb = new StringBuffer("<html><body>Page has been deleted! <a href=\"\">Check</a> or return to <a href=\"" + backToRealm + "\">Homepage</a>.</body></html>"); PrintWriter w = response.getWriter(); w.print(sb); return; } else { log.warn("Resource could not be deleted: " + res); response.setStatus(response.SC_FORBIDDEN); return; } } else { log.error("Resource '" + res + "' has interface ModifiableV2 not implemented." ); response.sendError(response.SC_NOT_IMPLEMENTED); return; } } catch (Exception e) { log.error("Could not delete resource with URL " + request.getRequestURL() + " " + e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } /** * */ private Resource getResource(HttpServletRequest request, HttpServletResponse response) throws ServletException { try { Realm realm = map.getRealm(request.getServletPath()); String path = map.getPath(realm, request.getServletPath()); HttpRequest httpRequest = (HttpRequest)request; HttpResponse httpResponse = new HttpResponse(response); Resource res = yanel.getResourceManager().getResource(getEnvironment(httpRequest, httpResponse), realm, path); return res; } catch(Exception e) { String errorMsg = "Could not get resource for request: " + request.getServletPath() + ": " + e.getMessage(); log.error(errorMsg, e); throw new ServletException(errorMsg, e); } } /** * */ private Environment getEnvironment(HttpServletRequest request, HttpServletResponse response) throws ServletException { Identity identity; try { identity = getIdentity(request); Realm realm = map.getRealm(request.getServletPath()); String stateOfView = StateOfView.AUTHORING; if (isToolbarEnabled(request)) { stateOfView = StateOfView.AUTHORING; } else { stateOfView = StateOfView.LIVE; } //log.debug("State of view: " + stateOfView); Environment environment = new Environment(request, response, identity, stateOfView, null); return environment; } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } /** * Save data */ private void save(HttpServletRequest request, HttpServletResponse response, boolean doCheckin) throws ServletException, IOException { log.debug("Save data ..."); Resource resource = getResource(request, response); /* -> commented because the current default repo implementation does not support versioning yet. if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Versionable", "2")) { try { // check the resource state: Identity identity = getIdentity(request); String userID = identity.getUser().getID(); VersionableV2 versionable = (VersionableV2)resource; if (versionable.isCheckedOut()) { String checkoutUserID = versionable.getCheckoutUserID(); if (!checkoutUserID.equals(userID)) { throw new Exception("Resource is checked out by another user: " + checkoutUserID); } } else { throw new Exception("Resource is not checked out."); } } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } */ InputStream in = request.getInputStream(); // TODO: Should be delegated to resource type, e.g. <{http://...}xml/>! // Check on well-formedness ... String contentType = request.getContentType(); log.debug("Content-Type: " + contentType); if (contentType != null && (contentType.indexOf("application/xml") >= 0 || contentType.indexOf("application/xhtml+xml") >= 0)) { log.info("Check well-formedness ..."); javax.xml.parsers.DocumentBuilderFactory dbf= javax.xml.parsers.DocumentBuilderFactory.newInstance(); try { javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder(); // TODO: Get log messages into log4j ... //parser.setErrorHandler(...); java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream(); byte[] buf = new byte[8192]; int bytesR; while ((bytesR = in.read(buf)) != -1) { baos.write(buf, 0, bytesR); } // Buffer within memory (TODO: Maybe replace with File-buffering ...) // http://www-128.ibm.com/developerworks/java/library/j-io1/ byte[] memBuffer = baos.toByteArray(); // NOTE: DOCTYPE is being resolved/retrieved (e.g. xhtml schema from w3.org) also // if isValidating is set to false. // Hence, for performance and network reasons we use a local catalog ... // Also see http://www.xml.com/pub/a/2004/03/03/catalogs.html // resp. http://xml.apache.org/commons/components/resolver/ // TODO: What about a resolver factory? parser.setEntityResolver(new org.apache.xml.resolver.tools.CatalogResolver()); parser.parse(new ByteArrayInputStream(memBuffer)); in = new ByteArrayInputStream(memBuffer); //org.w3c.dom.Document document = parser.parse(new ByteArrayInputStream(memBuffer)); } catch (org.xml.sax.SAXException e) { log.warn("Data is not well-formed: "+e.getMessage()); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"data-not-well-formed\">"); sb.append("<message>Data is not well-formed: "+e.getMessage()+"</message>"); sb.append("</exception>"); response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); w.print(sb); return; } catch (Exception e) { log.error(e.getMessage(), e); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"neutron\">"); //sb.append("<message>" + e.getStackTrace() + "</message>"); //sb.append("<message>" + e.getMessage() + "</message>"); sb.append("<message>" + e + "</message>"); sb.append("</exception>"); response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); w.print(sb); return; } log.info("Data seems to be well-formed :-)"); } else { log.info("No well-formedness check required for content type: " + contentType); } // IMPORTANT TODO: Use ModifiableV2.write(InputStream in) such that resource can modify data during saving resp. check if getOutputStream is equals null and then use write .... OutputStream out = null; Resource res = getResource(request, response); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "1")) { out = ((ModifiableV1) res).getOutputStream(new Path(request.getServletPath())); write(in, out, request, response); } else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { try { out = ((ModifiableV2) res).getOutputStream(); if (out != null) { write(in, out, request, response); } else { log.warn("getOutputStream() returned null, hence fallback to write()"); ((ModifiableV2) res).write(in); } } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } else { String message = res.getClass().getName() + " is not modifiable (neither V1 nor V2)!"; log.warn(message); StringBuffer sb = new StringBuffer(); // TODO: Differentiate between Neutron based and other clients ... sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"neutron\">"); sb.append("<message>" + message + "</message>"); sb.append("</exception>"); response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); w.print(sb); } if (doCheckin) { if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Versionable", "2")) { VersionableV2 versionable = (VersionableV2)resource; try { versionable.checkin("updated"); } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException("Could not check in resource: " + resource.getPath() + " " + e.getMessage(), e); } } } } /** * Check authorization and if not authorized then authenticate. Return null if authorization granted, otherwise return 401 and appropriate response such that client can provide credentials for authentication */ private HttpServletResponse doAccessControl(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // Get usecase Usecase usecase = getUsecase(request); // Get identity, realm, path Identity identity; Realm realm; String path; try { identity = getIdentity(request); realm = map.getRealm(request.getServletPath()); path = map.getPath(realm, request.getServletPath()); } catch (Exception e) { log.error(e, e); throw new ServletException(e.getMessage()); } // Check Authorization boolean authorized = false; try { if (log.isDebugEnabled()) log.debug("Check authorization: realm: " + realm + ", path: " + path + ", identity: " + identity + ", Usecase: " + usecase.getName()); authorized = realm.getPolicyManager().authorize(path, identity, usecase); if (log.isDebugEnabled()) log.debug("Check authorization result: " + authorized); } catch (Exception e) { log.error(e, e); throw new ServletException(e.getMessage(), e); } if(!authorized) { // TODO: Implement HTTP BASIC/DIGEST response (see above) log.info("Access denied: " + getRequestURLQS(request, null, false)); if(!request.isSecure()) { if(sslPort != null) { log.info("Redirect to SSL ..."); try { URL url = new URL(getRequestURLQS(request, null, false).toString()); url = new URL("https", url.getHost(), new Integer(sslPort).intValue(), url.getFile()); if (realm.isProxySet()) { if (realm.getProxySSLPort() >= 0) { log.debug("Use configured port: " + realm.getProxySSLPort()); url = new URL(url.getProtocol(), url.getHost(), new Integer(realm.getProxySSLPort()).intValue(), url.getFile()); } else { log.debug("Use default port: " + url.getDefaultPort()); // NOTE: getDefaultPort depends on the Protocol (e.g. https is 443) url = new URL(url.getProtocol(), url.getHost(), url.getDefaultPort(), url.getFile()); } } log.info("Redirect to SSL: " + url); response.setHeader("Location", url.toString()); // TODO: Yulup has a bug re TEMPORARY_REDIRECT //response.setStatus(javax.servlet.http.HttpServletResponse.SC_TEMPORARY_REDIRECT); response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY); return response; } catch (Exception e) { log.error(e); } } else { log.warn("SSL does not seem to be configured!"); } } if(doAuthenticate(request, response) != null) { log.info("Return response of web authenticator."); /* NOTE: Such a response can have different reasons: - Either no credentials provided yet and web authenticator is generating a response to fetch credentials - Or authentication failed and web authenticator is resending response to fetch again credentials"); - Or authentication was successful and web authenticator sends a redirect */ return response; } else { try { log.warn("Authentication was successful for user: " + getIdentity(request).getUsername()); } catch (Exception e) { log.error(e, e); } URL url = new URL(getRequestURLQS(request, null, false).toString()); if (sslPort != null) { url = new URL("https", url.getHost(), new Integer(sslPort).intValue(), url.getFile()); } log.warn("Redirect to original request: " + url); //response.sendRedirect(url.toString()); // 302 // TODO: Yulup has a bug re TEMPORARY_REDIRECT (or is the problem that the load balancer is rewritting 302 reponses?!) response.setHeader("Location", url.toString()); response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY); // 301 //response.setStatus(javax.servlet.http.HttpServletResponse.SC_TEMPORARY_REDIRECT); // 302 return response; } } else { log.info("Access granted: " + getRequestURLQS(request, null, false)); return null; } } /** * Patch request with proxy settings re realm configuration */ private String getRequestURLQS(HttpServletRequest request, String addQS, boolean xml) { try { Realm realm = map.getRealm(request.getServletPath()); // TODO: Handle this exception more gracefully! if (realm == null) log.error("No realm found for path " +request.getServletPath()); String proxyHostName = realm.getProxyHostName(); int proxyPort = realm.getProxyPort(); String proxyPrefix = realm.getProxyPrefix(); URL url = null; url = new URL(request.getRequestURL().toString()); //if(proxyHostName != null || proxyPort >= null || proxyPrefix != null) { if(realm.isProxySet()) { if (proxyHostName != null) { url = new URL(url.getProtocol(), proxyHostName, url.getPort(), url.getFile()); } if (proxyPort >= 0) { url = new URL(url.getProtocol(), url.getHost(), proxyPort, url.getFile()); } else { url = new URL(url.getProtocol(), url.getHost(), url.getDefaultPort(), url.getFile()); } if (proxyPrefix != null) { url = new URL(url.getProtocol(), url.getHost(), url.getPort(), url.getFile().substring(proxyPrefix.length())); } //log.debug("Proxy enabled for this realm resp. request: " + realm + ", " + url); } else { //log.debug("No proxy set for this realm resp. request: " + realm + ", " + url); } String urlQS = url.toString(); if (request.getQueryString() != null) { urlQS = urlQS + "?" + request.getQueryString(); if (addQS != null) urlQS = urlQS + "&" + addQS; } else { if (addQS != null) urlQS = urlQS + "?" + addQS; } if (xml) urlQS = urlQS.replaceAll("&", "&amp;"); if(log.isDebugEnabled()) log.debug("Request: " + urlQS); return urlQS; } catch (Exception e) { log.error(e); return null; } } /** * Also see https://svn.apache.org/repos/asf/tomcat/container/branches/tc5.0.x/catalina/src/share/org/apache/catalina/servlets/WebdavServlet.java * Also maybe interesting http://sourceforge.net/projects/openharmonise */ public void doPropfind(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Resource resource = getResource(request, response); //Node node = resource.getRealm().getSitetree().getNode(resource.getPath()); Node node = sitetree.getNode(resource.getRealm(),resource.getPath()); String depth = request.getHeader("Depth"); StringBuffer sb = new StringBuffer("<?xml version=\"1.0\"?>"); sb.append("<multistatus xmlns=\"DAV:\">"); if (depth.equals("0")) { if (node.isCollection()) { sb.append(" <response>"); sb.append(" <href>"+request.getRequestURI()+"</href>"); sb.append(" <propstat>"); sb.append(" <prop>"); sb.append(" <resourcetype><collection/></resourcetype>"); sb.append(" <getcontenttype>httpd/unix-directory</getcontenttype>"); sb.append(" </prop>"); sb.append(" <status>HTTP/1.1 200 OK</status>"); sb.append(" </propstat>"); sb.append(" </response>"); } else if (node.isResource()) { sb.append(" <response>"); sb.append(" <href>"+request.getRequestURI()+"</href>"); sb.append(" <propstat>"); sb.append(" <prop>"); sb.append(" <resourcetype/>"); // TODO: Set mime type of node! sb.append(" <getcontenttype>application/octet-stream</getcontenttype>"); // TODO: Set content length and last modified! sb.append(" <getcontentlength>0</getcontentlength>"); sb.append(" <getlastmodified>1969.02.16</getlastmodified>"); // See http://www.webdav.org/specs/rfc2518.html#PROPERTY_source, http://wiki.zope.org/HiperDom/RoundtripEditingDiscussion sb.append(" <source>\n"); sb.append(" <link>\n"); sb.append(" <src>" + request.getRequestURI() + "</src>\n"); sb.append(" <dst>" + request.getRequestURI() + "?yanel.resource.modifiable.source</dst>\n"); sb.append(" </link>\n"); sb.append(" </source>\n"); sb.append(" </prop>"); sb.append(" <status>HTTP/1.1 200 OK</status>"); sb.append(" </propstat>"); sb.append(" </response>"); } else { log.error("Neither collection nor resource!"); } } else if (depth.equals("1")) { // TODO: Shouldn't one check with isCollection() first?! Node[] children = node.getChildren(); if (children != null) { for (int i = 0; i < children.length; i++) { if (children[i].isCollection()) { sb.append(" <response>\n"); sb.append(" <href>" + request.getRequestURI() + "/" + children[i].getName() + "/</href>\n"); sb.append(" <propstat>\n"); sb.append(" <prop>\n"); sb.append(" <displayname>" + children[i].getName() + "</displayname>\n"); sb.append(" <resourcetype><collection/></resourcetype>\n"); sb.append(" <getcontenttype>httpd/unix-directory</getcontenttype>\n"); sb.append(" </prop>\n"); sb.append(" <status>HTTP/1.1 200 OK</status>\n"); sb.append(" </propstat>\n"); sb.append(" </response>\n"); } else if(children[i].isResource()) { sb.append(" <response>\n"); sb.append(" <href>" + request.getRequestURI() + "/" + children[i].getName() + "?yanel.webdav=propfind1</href>\n"); sb.append(" <propstat>\n"); sb.append(" <prop>\n"); sb.append(" <displayname>" + children[i].getName() + "</displayname>\n"); sb.append(" <resourcetype/>\n"); // TODO: Set mime type of node! sb.append(" <getcontenttype>application/octet-stream</getcontenttype>\n"); // TODO: Set content length and last modified! sb.append(" <getcontentlength>0</getcontentlength>"); sb.append(" <getlastmodified>1969.02.16</getlastmodified>"); // See http://www.webdav.org/specs/rfc2518.html#PROPERTY_source, http://wiki.zope.org/HiperDom/RoundtripEditingDiscussion sb.append(" <source>\n"); sb.append(" <link>\n"); sb.append(" <src>" + request.getRequestURI() + "/" + children[i].getName() + "</src>\n"); sb.append(" <dst>" + request.getRequestURI() + "/" + children[i].getName() + "?yanel.resource.modifiable.source</dst>\n"); sb.append(" </link>\n"); sb.append(" </source>\n"); sb.append(" </prop>\n"); sb.append(" <status>HTTP/1.1 200 OK</status>\n"); sb.append(" </propstat>\n"); sb.append(" </response>\n"); } else { log.error("Neither collection nor resource: " + children[i].getPath()); } } } else { log.warn("No children!"); } } else if (depth.equals("infinity")) { log.warn("TODO: List children and their children and their children ..."); } else { log.error("No such depth: " + depth); } sb.append("</multistatus>"); //response.setStatus(javax.servlet.http.HttpServletResponse.SC_MULTI_STATUS); response.setStatus(207, "Multi-Status"); PrintWriter w = response.getWriter(); w.print(sb); } /** * */ public void doOptions(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setHeader("DAV", "1"); // TODO: Is there anything else to do?! } /** * Authentication * @return null when authentication successful or has already been authenticated, otherwise return response generated by web authenticator */ public HttpServletResponse doAuthenticate(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { // TODO/TBD: In the case of HTTP-BASIC/DIGEST one needs to check authentication with every request // TODO: enhance API with flag, e.g. session-based="true/false" // WARNING: One needs to separate doAuthenticate from the login screen generation! //if (getIdentity(request) != null) return null; WebAuthenticator wa = map.getRealm(request.getServletPath()).getWebAuthenticator(); return wa.doAuthenticate(request, response, map, reservedPrefix, xsltLoginScreenDefault, servletContextRealPath, sslPort); } catch (Exception e) { log.error(e.getMessage(), e); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return response; } } /** * Escapes all reserved xml characters (&amp; &lt; &gt; &apos; &quot;) in a string. * @param s input string * @return string with escaped characters */ public static String encodeXML(String s) { s = s.replaceAll("&", "&amp;"); s = s.replaceAll("<", "&lt;"); s = s.replaceAll(">", "&gt;"); s = s.replaceAll("'", "&apos;"); s = s.replaceAll("\"", "&quot;"); return s; } /** * Do logout * @return null for a regular logout and a Neutron response if auth scheme is Neutron */ public HttpServletResponse doLogout(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { if (isToolbarEnabled(request)) { // TODO: Check if WORLD has access to the toolbar //if (getRealm().getPolicyManager().authorize(path, new Identity(), new Usecase(TOOLBAR_USECASE))) { disableToolbar(request); //} } HttpSession session = request.getSession(true); // TODO: should we logout only from the current realm, or from all realms? // -> logout only from the current realm Realm realm = map.getRealm(request.getServletPath()); IdentityMap identityMap = (IdentityMap)session.getAttribute(IDENTITY_MAP_KEY); if (identityMap != null && identityMap.containsKey(realm.getID())) { log.info("Logout from realm: " + realm.getID()); identityMap.remove(realm.getID()); } String clientSupportedAuthScheme = request.getHeader("WWW-Authenticate"); if (clientSupportedAuthScheme != null && clientSupportedAuthScheme.equals("Neutron-Auth")) { // TODO: send some XML content, e.g. <logout-successful/> response.setContentType("text/plain; charset=" + DEFAULT_ENCODING); response.setStatus(response.SC_OK); PrintWriter writer = response.getWriter(); writer.print("Neutron Logout Successful!"); return response; } if (log.isDebugEnabled()) log.debug("Regular Logout Successful!"); //return null; URL url = new URL(getRequestURLQS(request, null, false).toString()); String urlWithoutLogoutQS = url.toString().substring(0, url.toString().lastIndexOf("?")); log.warn("Redirect to original request: " + urlWithoutLogoutQS); //response.sendRedirect(url.toString()); // 302 response.setHeader("Location", urlWithoutLogoutQS.toString()); //response.setHeader("Location", url.toString()); response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY); // 301 //response.setStatus(javax.servlet.http.HttpServletResponse.SC_TEMPORARY_REDIRECT); // 302 return response; } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } /** * Do create a new resource */ public HttpServletResponse doCreate(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { log.error("Not implemented yet!"); return null; } /** * Patches the mimetype of the Content-Type response field because * Microsoft Internet Explorer does not understand application/xhtml+xml * See http://en.wikipedia.org/wiki/Criticisms_of_Internet_Explorer#XHTML */ static public String patchMimeType(String mimeType, HttpServletRequest request) throws ServletException, IOException { String httpAcceptMediaTypes = request.getHeader("Accept"); if (mimeType != null && mimeType.equals("application/xhtml+xml") && httpAcceptMediaTypes != null && httpAcceptMediaTypes.indexOf("application/xhtml+xml") < 0) { log.info("Patch contentType with text/html because client (" + request.getHeader("User-Agent") + ") does not seem to understand application/xhtml+xml"); return "text/html"; } return mimeType; } /** * Intercept InputStream and log content ... */ public InputStream intercept(InputStream in) throws IOException { java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream(); byte[] buf = new byte[8192]; int bytesR; while ((bytesR = in.read(buf)) != -1) { baos.write(buf, 0, bytesR); } // Buffer within memory (TODO: Maybe replace with File-buffering ...) // http://www-128.ibm.com/developerworks/java/library/j-io1/ byte[] memBuffer = baos.toByteArray(); log.error("DEBUG: InputStream: " + baos); return new java.io.ByteArrayInputStream(memBuffer); } /** * Generate a "Yanel" response (page information, 404, internal server error, ...) */ private void setYanelOutput(HttpServletRequest request, HttpServletResponse response, Document doc) throws ServletException { String path = getResource(request, response).getPath(); String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(path); try { String yanelFormat = request.getParameter("yanel.format"); if(yanelFormat != null && yanelFormat.equals("xml")) { response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); OutputStream out = response.getOutputStream(); javax.xml.transform.TransformerFactory.newInstance().newTransformer().transform(new javax.xml.transform.dom.DOMSource(doc), new javax.xml.transform.stream.StreamResult(out)); out.close(); } else { String mimeType = patchMimeType("application/xhtml+xml", request); response.setContentType(mimeType + "; charset=" + DEFAULT_ENCODING); // create identity transformer which serves as a dom-to-sax transformer TransformerIdentityImpl transformer = new TransformerIdentityImpl(); // create xslt transformer: SAXTransformerFactory saxTransformerFactory = (SAXTransformerFactory)SAXTransformerFactory.newInstance(); TransformerHandler xsltTransformer = saxTransformerFactory.newTransformerHandler(new StreamSource(xsltInfoAndException)); xsltTransformer.getTransformer().setParameter("yanel.back2realm", backToRealm); xsltTransformer.getTransformer().setParameter("yanel.reservedPrefix", reservedPrefix); // create i18n transformer: I18nTransformer2 i18nTransformer = new I18nTransformer2("global", getLanguage(request),yanel.getMap().getRealm(request.getServletPath()).getDefaultLanguage()); CatalogResolver catalogResolver = new CatalogResolver(); i18nTransformer.setEntityResolver(new CatalogResolver()); // create serializer: Serializer serializer = SerializerFactory.getSerializer(SerializerFactory.XHTML_STRICT); // chain everything together (create a pipeline): xsltTransformer.setResult(new SAXResult(i18nTransformer)); i18nTransformer.setResult(new SAXResult(serializer.asContentHandler())); serializer.setOutputStream(response.getOutputStream()); // execute pipeline: transformer.transform(new DOMSource(doc), new SAXResult(xsltTransformer)); } } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage()); } } /** * Get language with the following priorization: 1) yanel.meta.language query string parameter, 2) Accept-Language header, 3) Default en */ private String getLanguage(HttpServletRequest request) throws Exception { // TODO: Shouldn't this be replaced by Resource.getRequestedLanguage() or Resource.getContentLanguage() ?! String language = request.getParameter("yanel.meta.language"); if (language == null) { language = request.getHeader("Accept-Language"); if (language != null) { int commaIndex = language.indexOf(","); if (commaIndex > 0) { language = language.substring(0, commaIndex); } int dashIndex = language.indexOf("-"); if (dashIndex > 0) { language = language.substring(0, dashIndex); } } } if(language != null && language.length() > 0) return language; return yanel.getMap().getRealm(request.getServletPath()).getDefaultLanguage(); } /** * Write to output stream of modifiable resource */ private void write(InputStream in, OutputStream out, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if (out != null) { log.debug("Content-Type: " + request.getContentType()); // TODO: Compare mime-type from response with mime-type of resource //if (contentType.equals("text/xml")) { ... } byte[] buffer = new byte[8192]; int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } out.flush(); out.close(); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<html>"); sb.append("<body>"); sb.append("<p>Data has been saved ...</p>"); sb.append("</body>"); sb.append("</html>"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.setContentType("application/xhtml+xml; charset=" + DEFAULT_ENCODING); PrintWriter w = response.getWriter(); w.print(sb); log.info("Data has been saved ..."); return; } else { log.error("OutputStream is null!"); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<html>"); sb.append("<body>"); sb.append("<p>Exception: OutputStream is null!</p>"); sb.append("</body>"); sb.append("</html>"); PrintWriter w = response.getWriter(); w.print(sb); response.setContentType("application/xhtml+xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } } /** * Get toolbar menus */ private String getToolbarMenus(Resource resource, HttpServletRequest request) throws ServletException, IOException, Exception { org.wyona.yanel.servlet.menu.Menu menu = null; String menuRealmClass = resource.getRealm().getMenuClass(); if (menuRealmClass != null) { menu = (org.wyona.yanel.servlet.menu.Menu) Class.forName(menuRealmClass).newInstance(); // TODO: Check resource configuration ... //} else if (RESOURCE) { } else { menu = new org.wyona.yanel.servlet.menu.impl.DefaultMenu(); } return menu.getAllMenus(resource, request, map, reservedPrefix); } /** * Gets the part of the toolbar which has to be inserted into the html header. * @param resource * @param request * @return * @throws Exception */ private String getToolbarHeader(Resource resource, HttpServletRequest request) throws Exception { String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(resource.getPath()); StringBuffer sb= new StringBuffer(); sb.append("<link type=\"text/css\" href=\"" + backToRealm + reservedPrefix + "/toolbar.css\" rel=\"stylesheet\"/>"); sb.append(System.getProperty("line.separator")); sb.append("<style type=\"text/css\" media=\"screen\">"); sb.append(System.getProperty("line.separator")); sb.append("#yaneltoolbar_menu li li.haschild{ background: lightgrey url(" + backToRealm + reservedPrefix + "/yanel-img/submenu.gif) no-repeat 98% 50%;}"); sb.append(System.getProperty("line.separator")); sb.append("#yaneltoolbar_menu li li.haschild:hover{ background: lightsteelblue url(" + backToRealm + reservedPrefix + "/yanel-img/submenu.gif) no-repeat 98% 50%;}"); sb.append("</style>"); sb.append(System.getProperty("line.separator")); // If browser is Mozilla (gecko engine rv:1.7) if (request.getHeader("User-Agent").indexOf("rv:1.7") >= 0) { sb.append("<link type=\"text/css\" href=\"" + backToRealm + reservedPrefix + "/toolbarMozilla.css\" rel=\"stylesheet\"/>"); sb.append(System.getProperty("line.separator")); } // If browser is IE if (request.getHeader("User-Agent").indexOf("compatible; MSIE") >= 0 && request.getHeader("User-Agent").indexOf("Windows") >= 0 ) { sb.append("<link type=\"text/css\" href=\"" + backToRealm + reservedPrefix + "/toolbarIE.css\" rel=\"stylesheet\"/>"); sb.append(System.getProperty("line.separator")); sb.append("<style type=\"text/css\" media=\"screen\">"); sb.append(" body{behavior:url(" + backToRealm + reservedPrefix + "/csshover.htc);font-size:100%;}"); sb.append("</style>"); } // If browser is IE6 if (request.getHeader("User-Agent").indexOf("compatible; MSIE 6") >= 0 && request.getHeader("User-Agent").indexOf("Windows") >= 0 ) { sb.append("<link type=\"text/css\" href=\"" + backToRealm + reservedPrefix + "/toolbarIE6.css\" rel=\"stylesheet\"/>"); sb.append(System.getProperty("line.separator")); } return sb.toString(); } /** * Gets the part of the toolbar which has to be inserted into the html body * right after the opening body tag. * @param resource * @param request * @return * @throws Exception */ private String getToolbarBodyStart(Resource resource, HttpServletRequest request) throws Exception { String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(resource.getPath()); StringBuffer buf = new StringBuffer(); buf.append("<div id=\"yaneltoolbar_headerwrap\">"); buf.append("<div id=\"yaneltoolbar_menu\">"); buf.append(getToolbarMenus(resource, request)); buf.append("</div>"); buf.append("<span id=\"yaneltoolbar_info\">"); //buf.append("Version: " + yanel.getVersion() + "-r" + yanel.getRevision() + "&#160;&#160;"); buf.append("Realm: <b>" + resource.getRealm().getName() + "</b>&#160;&#160;"); Identity identity = getIdentity(request); if (identity != null && !identity.isWorld()) { buf.append("User: <b>" + identity.getUsername() + "</b>"); } else { buf.append("User: <b>Not signed in!</b>"); } buf.append("</span>"); buf.append("<span id=\"yaneltoolbar_logo\">"); buf.append("<img src=\"" + backToRealm + reservedPrefix + "/yanel_toolbar_logo.png\"/>"); buf.append("</span>"); buf.append("</div>"); buf.append("<div id=\"yaneltoolbar_middlewrap\">"); return buf.toString(); } /** * Gets the part of the toolbar which has to be inserted into the html body * right before the closing body tag. * @param resource * @param request * @return * @throws Exception */ private String getToolbarBodyEnd(Resource resource, HttpServletRequest request) throws Exception { return "</div>"; } /** * Merges the toolbar and the page content. This will parse the html stream and add * the toolbar. * @param request * @param response * @param resource * @param view * @throws Exception */ private void mergeToolbarWithContent(HttpServletRequest request, HttpServletResponse response, Resource resource, View view) throws Exception { String encoding = view.getEncoding(); if (encoding == null) { encoding = "UTF-8"; } InputStreamReader reader = new InputStreamReader(view.getInputStream(), encoding); OutputStreamWriter writer = new OutputStreamWriter(response.getOutputStream(), encoding); int c; int state = OUTSIDE_TAG; StringBuffer tagBuf = null; int headcount = 0; int bodycount = 0; while ((c = reader.read()) != -1) { switch (state) { case OUTSIDE_TAG: if (c == '<') { tagBuf = new StringBuffer("<"); state = INSIDE_TAG; } else { writer.write(c); } break; case INSIDE_TAG: //writer.write(c); if (c == '>') { state = OUTSIDE_TAG; tagBuf.append((char)c); String tag = tagBuf.toString(); if (tag.startsWith("<head")) { if (headcount == 0) { writer.write(tag, 0, tag.length()); String toolbarString = getToolbarHeader(resource, request); writer.write(toolbarString, 0, toolbarString.length()); } else { writer.write(tag, 0, tag.length()); } headcount++; } else if (tag.startsWith("<body")) { if (bodycount == 0) { writer.write(tag, 0, tag.length()); String toolbarString = getToolbarBodyStart(resource, request); writer.write(toolbarString, 0, toolbarString.length()); } else { writer.write(tag, 0, tag.length()); } bodycount++; } else if (tag.equals("</body>")) { bodycount--; if (bodycount == 0) { String toolbarString = getToolbarBodyEnd(resource, request); writer.write(toolbarString, 0, toolbarString.length()); writer.write(tag, 0, tag.length()); } else { writer.write(tag, 0, tag.length()); } } else { writer.write(tag, 0, tag.length()); } } else { tagBuf.append((char)c); } break; } } writer.flush(); writer.close(); reader.close(); } /** * Gets the identity from the session associated with the given request. * @param request * @return identity or null if there is no identity in the session for the current realm or if there is no session at all */ private Identity getIdentity(HttpServletRequest request) throws Exception { Realm realm = map.getRealm(request.getServletPath()); HttpSession session = request.getSession(false); if (session != null) { IdentityMap identityMap = (IdentityMap)session.getAttribute(IDENTITY_MAP_KEY); if (identityMap != null) { Identity identity = (Identity)identityMap.get(realm.getID()); if (identity != null) { return identity; } } } // HTTP BASIC Authentication (For clients such as for instance Sunbird, OpenOffice or cadaver) // IMPORT NOTE: BASIC Authentication needs to be checked on every request, because clients often do not support session handling String authorizationHeader = request.getHeader("Authorization"); if (log.isDebugEnabled()) log.debug("Checking for Authorization Header: " + authorizationHeader); if (authorizationHeader != null) { if (authorizationHeader.toUpperCase().startsWith("BASIC")) { log.warn("Using BASIC authorization ..."); // Get encoded user and password, comes after "BASIC " String userpassEncoded = authorizationHeader.substring(6); // Decode it, using any base 64 decoder sun.misc.BASE64Decoder dec = new sun.misc.BASE64Decoder(); String userpassDecoded = new String(dec.decodeBuffer(userpassEncoded)); log.debug("Username and Password Decoded: " + userpassDecoded); String[] up = userpassDecoded.split(":"); String username = up[0]; String password = up[1]; log.debug("username: " + username + ", password: " + password); try { User user = realm.getIdentityManager().getUserManager().getUser(username); if (user != null && user.authenticate(password)) { return new Identity(user); } else { log.warn("HTTP BASIC Authentication failed for " + username + "!"); /* response.setHeader("WWW-Authenticate", "BASIC realm=\"" + realm.getName() + "\""); response.sendError(response.SC_UNAUTHORIZED); PrintWriter writer = response.getWriter(); writer.print("BASIC Authentication Failed!"); return response; */ } } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } else if (authorizationHeader.toUpperCase().startsWith("DIGEST")) { log.error("DIGEST is not implemented"); /* authorized = false; response.sendError(response.SC_UNAUTHORIZED); response.setHeader("WWW-Authenticate", "DIGEST realm=\"" + realm.getName() + "\""); PrintWriter writer = response.getWriter(); writer.print("DIGEST is not implemented!"); */ } else { log.warn("No such authorization type implemented: " + authorizationHeader); } } if(log.isDebugEnabled()) log.debug("No identity yet (Neither session nor header based! Identity is set to WORLD!)"); // TBD: Should add world identity to the session? return new Identity(); } /** * Create a DOM Document */ static public Document getDocument(String namespace, String localname) throws Exception { javax.xml.parsers.DocumentBuilderFactory dbf= javax.xml.parsers.DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder(); org.w3c.dom.DOMImplementation impl = parser.getDOMImplementation(); org.w3c.dom.DocumentType doctype = null; Document doc = impl.createDocument(namespace, localname, doctype); if (namespace != null) { doc.getDocumentElement().setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns", namespace); } return doc; } /** * Get global data located below reserved prefix */ public void getGlobalData(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Resource resource = getResource(request, response); String path = resource.getPath(); String viewId = request.getParameter(VIEW_ID_PARAM_NAME); if (path.startsWith("/" + reservedPrefix + "/users/")) { String userName = path.substring(reservedPrefix.length() + 8); userName = userName.substring(0, userName.lastIndexOf(".html")); try { java.util.Map properties = new HashMap(); properties.put("user", userName); ResourceConfiguration rc = new ResourceConfiguration("yanel-user", "http://www.wyona.org/yanel/resource/1.0", properties); Realm realm = yanel.getMap().getRealm(request.getServletPath()); Resource yanelUserResource = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, path, rc); View view = ((ViewableV2) yanelUserResource).getView(viewId); if (view != null) { if (generateResponse(view, yanelUserResource, request, response, getDocument(NAMESPACE, "yanel"), -1, -1) != null) return; } } catch (Exception e) { throw new ServletException(e); } response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); return; } else if (path.indexOf("user-mgmt/list-users.html") >= 0) { log.warn("TODO: Implementation not finished yet!"); } else if (path.indexOf("about.html") >= 0) { response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); StringBuffer sb = new StringBuffer("<html>"); sb.append("<head><title>About Yanel</title></head>"); sb.append("<body><h1>About Yanel</h1><p>Version " + yanel.getVersion() + "-r" + yanel.getRevision() + "</p><p>Copyright &#169; 2005 - 2008 Wyona. All rights reserved.</p></body>"); sb.append("</html>"); PrintWriter w = response.getWriter(); w.print(sb); return; } else if (path.indexOf("data-repository-sitetree.html") >= 0) { try { Realm realm = yanel.getMap().getRealm(request.getServletPath()); File drsResConfigFile = getGlobalResourceConfiguration("data-repo-sitetree_yanel-rc.xml", realm); ResourceConfiguration rc = new ResourceConfiguration(new java.io.FileInputStream(drsResConfigFile)); Resource sitetreeResource = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, path, rc); View view = ((ViewableV2) sitetreeResource).getView(viewId); if (view != null) { if (generateResponse(view, sitetreeResource, request, response, getDocument(NAMESPACE, "yanel"), -1, -1) != null) return; } } catch (Exception e) { throw new ServletException(e); } } else if (path.indexOf("resource-types") >= 0) { //log.debug("Resource path: " + resource.getPath()); String[] pathPart1 = path.split("/resource-types/"); String[] pathPart2 = pathPart1[1].split("::"); String[] pathPart3 = pathPart2[1].split("/"); String name = pathPart3[0]; // The request (see resource.getPath()) seems to replace 'http://' or 'http%3a%2f%2f' by 'http:/', so let's change this back String namespace = pathPart2[0].replaceAll("http:/", "http://"); try { java.util.Map properties = new HashMap(); Realm realm = yanel.getMap().getRealm(request.getServletPath()); ResourceConfiguration rc = new ResourceConfiguration(name, namespace, properties); Resource resourceOfPrefix = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, path, rc); String htdocsPath; if (pathPart2[1].indexOf("/" + reservedPrefix + "/") >= 0) { htdocsPath = "rtyanelhtdocs:" + path.split("::" + name)[1].split("/" + reservedPrefix)[1].replace('/', File.separatorChar); } else { htdocsPath = "rthtdocs:" + path.split("::" + name)[1].replace('/', File.separatorChar); } SourceResolver resolver = new SourceResolver(resourceOfPrefix); Source source = resolver.resolve(htdocsPath, null); InputStream htdocIn = ((StreamSource) source).getInputStream(); if (htdocIn != null) { log.debug("Resource-Type specific data: " + htdocsPath); // TODO: Set HTTP header (mime-type, size, etc.) String mimeType = guessMimeType(FilenameUtils.getExtension(FilenameUtils.getName(htdocsPath))); response.setHeader("Content-Type", mimeType); byte buffer[] = new byte[8192]; int bytesRead; OutputStream out = response.getOutputStream(); while ((bytesRead = htdocIn.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } htdocIn.close(); // allow client-side caching: if (cacheExpires != 0) { setExpiresHeader(response, cacheExpires); } return; } else { log.error("No such file or directory: " + htdocsPath); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); return; } } catch (Exception e) { throw new ServletException(e); } } else { File globalFile = org.wyona.commons.io.FileUtil.file(servletContextRealPath, "htdocs" + File.separator + path.substring(reservedPrefix.length() + 2)); if (globalFile.exists()) { log.debug("Global data: " + globalFile); // TODO: Set HTTP header (mime-type, size, etc.) String mimeType = guessMimeType(FilenameUtils.getExtension(globalFile.getName())); response.setHeader("Content-Type", mimeType); byte buffer[] = new byte[8192]; int bytesRead; InputStream in = new java.io.FileInputStream(globalFile); OutputStream out = response.getOutputStream(); while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } in.close(); // allow client-side caching: if (cacheExpires != 0) { setExpiresHeader(response, cacheExpires); } return; } else { log.error("No such file or directory: " + globalFile); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); return; } } } private void setExpiresHeader(HttpServletResponse response, int hours) { Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.HOUR_OF_DAY, hours); String expires = DateUtil.formatRFC822GMT(calendar.getTime()); response.setHeader("Expires", expires); } /** * Generate response from a resource view */ private HttpServletResponse generateResponse(View view, Resource res, HttpServletRequest request, HttpServletResponse response, Document doc, long size, long lastModified) throws ServletException, IOException { // Check if the view contains the response, otherwise assume that the resource wrote the response, and just return. // TODO: There seem like no header fields are being set (e.g. Content-Length, ...). Please see below ... // Check if viewable resource has already created a response if (!view.isResponse()) return response; // Set encoding if (view.getEncoding() != null) { response.setContentType(patchMimeType(view.getMimeType(), request) + "; charset=" + view.getEncoding()); } else if (res.getConfiguration() != null && res.getConfiguration().getEncoding() != null) { response.setContentType(patchMimeType(view.getMimeType(), request) + "; charset=" + res.getConfiguration().getEncoding()); } else { // try to guess if we have to set the default encoding String mimeType = view.getMimeType(); if (mimeType != null && mimeType.startsWith("text") || mimeType.equals("application/xml") || mimeType.equals("application/xhtml+xml") || mimeType.equals("application/atom+xml") || mimeType.equals("application/x-javascript")) { response.setContentType(patchMimeType(mimeType, request) + "; charset=" + DEFAULT_ENCODING); } else { // probably binary mime-type, don't set encoding response.setContentType(patchMimeType(mimeType, request)); } } // Set HTTP headers: HashMap headers = view.getHttpHeaders(); Iterator iter = headers.keySet().iterator(); while (iter.hasNext()) { String name = (String)iter.next(); String value = (String)headers.get(name); if (log.isDebugEnabled()) { log.debug("set http header: " + name + ": " + value); } response.setHeader(name, value); } // Possibly embed toolbar: // TODO: Check if user is authorized to actually see toolbar (Current flaw: Enabled Toolbar, Login, Toolbar is enabled, Logout, Toolbar is still visible!) if (isToolbarEnabled(request)) { String mimeType = view.getMimeType(); if (mimeType != null && mimeType.indexOf("html") > 0) { // TODO: What about other query strings or frames or TinyMCE? if (request.getParameter("yanel.resource.usecase") == null) { if (toolbarMasterSwitch.equals("on")) { OutputStream os = response.getOutputStream(); try { mergeToolbarWithContent(request, response, res, view); } catch (Exception e) { log.error(e, e); String message = "Error merging toolbar into content: " + e.toString(); Element exceptionElement = (Element) doc.getDocumentElement().appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return response; } return response; } else { log.info("Toolbar has been disabled. Please check web.xml!"); } } else { log.error("DEBUG: Exception to the rule: " + request.getParameter("yanel.resource.usecase")); } } else { log.debug("No HTML related mime type: " + mimeType); } } else { log.debug("Toolbar is turned off."); } InputStream is = view.getInputStream(); if (is != null) { // Write actual content into response byte buffer[] = new byte[8192]; int bytesRead; bytesRead = is.read(buffer); // TODO: Compare If-Modified-Since with lastModified and return 304 without content resp. check on ETag String ifModifiedSince = request.getHeader("If-Modified-Since"); if (ifModifiedSince != null) { if (log.isDebugEnabled()) log.debug("TODO: Implement 304 ..."); } if(lastModified >= 0) response.setDateHeader("Last-Modified", lastModified); if(size > 0) { if (log.isDebugEnabled()) log.debug("Size of " + request.getRequestURI() + ": " + size); response.setContentLength((int) size); } else { if (log.isDebugEnabled()) log.debug("No size for " + request.getRequestURI() + ": " + size); } // Check if InputStream is empty if (bytesRead != -1) { java.io.OutputStream os = response.getOutputStream(); os.write(buffer, 0, bytesRead); while ((bytesRead = is.read(buffer)) != -1) { os.write(buffer, 0, bytesRead); } os.close(); } else { log.warn("Returned content size of request '" + request.getRequestURI() + "' is 0"); } is.close(); return response; } else { String message = "Returned InputStream of request '" + request.getRequestURI() + "' is null!"; Element exceptionElement = (Element) doc.getDocumentElement().appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); is.close(); return response; } } /** * */ public void destroy() { super.destroy(); yanel.destroy(); log.warn("Yanel webapp has been shut down."); } /** * */ private Usecase getUsecase(HttpServletRequest request) { Usecase usecase = null; // TODO: Replace hardcoded roles by mapping between roles amd query strings ... String value = request.getParameter("yanel.resource.usecase"); String yanelUsecaseValue = request.getParameter("yanel.usecase"); String workflowTransitionValue = request.getParameter("yanel.resource.workflow.transition"); String contentType = request.getContentType(); String method = request.getMethod(); if (value != null && value.equals("save")) { log.debug("Save data ..."); usecase = new Usecase("write"); } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); usecase = new Usecase("write"); } else if (yanelUsecaseValue != null && yanelUsecaseValue.equals("create")) { log.debug("Create new resource ..."); usecase = new Usecase("resource.create"); } else if (value != null && value.equals("introspection")) { if(log.isDebugEnabled()) log.debug("Dynamically generated introspection ..."); usecase = new Usecase("introspection"); } else if (value != null && value.equals("checkout")) { log.debug("Checkout data ..."); usecase = new Usecase("open"); } else if (contentType != null && contentType.indexOf("application/atom+xml") >= 0 && (method.equals(METHOD_PUT) || method.equals(METHOD_POST))) { // TODO: Is posting atom entries different from a general post (see below)?! log.error("DEBUG: Write/Checkin Atom entry ..."); usecase = new Usecase("write"); // TODO: METHOD_POST is not generally protected, but save, checkin, application/atom+xml are being protected. See doPost(.... } else if (method.equals(METHOD_PUT)) { log.error("DEBUG: Upload data ..."); usecase = new Usecase("write"); } else if (method.equals(METHOD_DELETE)) { log.error("DEBUG: Delete resource (HTTP method delete)"); usecase = new Usecase("delete"); } else if (value != null && value.equals("delete")) { log.info("Delete resource (yanel resource usecase delete)"); usecase = new Usecase("delete"); } else if (workflowTransitionValue != null) { // TODO: How shall we protect workflow transitions?! log.error("DEBUG: Workflow transition ..."); usecase = new Usecase("view"); } else { usecase = new Usecase("view"); } value = request.getParameter("yanel.toolbar"); if (value != null && value.equals("on")) { log.debug("Turn on toolbar ..."); usecase = new Usecase(TOOLBAR_USECASE); } value = request.getParameter("yanel.policy"); if (value != null) { if (value.equals("create")) { usecase = new Usecase("policy.create"); } else if (value.equals("read")) { usecase = new Usecase("policy.read"); } else if (value.equals("update")) { usecase = new Usecase("policy.update"); } else if (value.equals("delete")) { usecase = new Usecase("policy.delete"); } else { log.warn("No such policy usecase: " + value); } } return usecase; } /** * Handle access policy requests (CRUD, whereas delete is not implemented yet!) */ private void doAccessPolicyRequest(HttpServletRequest request, HttpServletResponse response, String usecase) throws ServletException, IOException { try { String viewId = request.getParameter(VIEW_ID_PARAM_NAME); Realm realm = map.getRealm(request.getServletPath()); String path = map.getPath(realm, request.getServletPath()); File pmrcGlobalFile = getGlobalResourceConfiguration("policy-manager_yanel-rc.xml", realm); Resource policyManagerResource = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, path, new ResourceConfiguration(new java.io.FileInputStream(pmrcGlobalFile))); View view = ((ViewableV2) policyManagerResource).getView(viewId); if (view != null) { if (generateResponse(view, policyManagerResource, request, response, getDocument(NAMESPACE, "yanel"), -1, -1) != null) return; } log.error("Something went terribly wrong!"); response.getWriter().print("Something went terribly wrong!"); return; } catch(Exception e) { log.error(e, e); throw new ServletException(e.getMessage()); } } /** * */ private void enableToolbar(HttpServletRequest request) { request.getSession(true).setAttribute(TOOLBAR_KEY, "on"); } /** * */ private void disableToolbar(HttpServletRequest request) { request.getSession(true).setAttribute(TOOLBAR_KEY, "off"); } /** * */ private boolean isToolbarEnabled(HttpServletRequest request) { String toolbarStatus = (String) request.getSession(true).getAttribute(TOOLBAR_KEY); if (toolbarStatus != null && toolbarStatus.equals("on")) { String yanelToolbar = request.getParameter("yanel.toolbar"); if(yanelToolbar != null && request.getParameter("yanel.toolbar").equals("suppress")) { return false; } else { return true; } } return false; } /** * Handle delete usecase */ private void handleDeleteUsecase(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String confirmed = request.getParameter("confirmed"); if (confirmed != null) { String path = getResource(request, response).getPath(); log.warn("Really delete " + path); doDelete(request, response); return; } else { log.warn("Delete has not been confirmed by client yet!"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); StringBuffer sb = new StringBuffer("<html><body>Do you really want to delete this page? <a href=\"?yanel.resource.usecase=delete&confirmed\">YES</a>, <a href=\"\">no</a></body></html>"); PrintWriter w = response.getWriter(); w.print(sb); return; } } /** * */ private File getGlobalResourceConfiguration(String resConfigName, Realm realm) { // TODO: Introduce a repository for the Yanel webapp File realmDir = new File(realm.getConfigFile().getParent()); File globalResConfigFile = org.wyona.commons.io.FileUtil.file(realmDir.getAbsolutePath(), "src" + File.separator + "webapp" + File.separator + "global-resource-configs/" + resConfigName); if (!globalResConfigFile.isFile()) { // Fallback to global configuration globalResConfigFile = org.wyona.commons.io.FileUtil.file(servletContextRealPath, "global-resource-configs/" + resConfigName); } return globalResConfigFile; } /** * */ private String getStackTrace(Exception e) { java.io.StringWriter sw = new java.io.StringWriter(); e.printStackTrace(new java.io.PrintWriter(sw)); return sw.toString(); } /** * */ private void do404(HttpServletRequest request, HttpServletResponse response, Document doc, String exceptionMessage) throws ServletException { // TODO: Log all 404 within a dedicated file (with client info attached) such that an admin can react to it ... String message = "No such node/resource exception: " + exceptionMessage; log.warn(message); /* Element exceptionElement = (Element) doc.getDocumentElement().appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); exceptionElement.setAttributeNS(NAMESPACE, "status", "404"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); setYanelOutput(request, response, doc); return; */ // TODO: Finish the XML (as it used to be before)! response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); try { Realm realm = yanel.getMap().getRealm(request.getServletPath()); File pnfResConfigFile = getGlobalResourceConfiguration("404_yanel-rc.xml", realm); ResourceConfiguration rc = new ResourceConfiguration(new java.io.FileInputStream(pnfResConfigFile)); String path = getResource(request, response).getPath(); Resource pageNotFoundResource = yanel.getResourceManager().getResource(getEnvironment(request, response), realm, path, rc); String viewId = request.getParameter(VIEW_ID_PARAM_NAME); if (request.getParameter("yanel.format") != null) { // backwards compatible viewId = request.getParameter("yanel.format"); } View view = ((ViewableV2) pageNotFoundResource).getView(viewId); if (view != null) { if (generateResponse(view, pageNotFoundResource, request, response, getDocument(NAMESPACE, "yanel"), -1, -1) != null) return; } log.error("404 seems to be broken!"); return; } catch (Exception e) { log.error(e, e); return; } } }
TODO added re doLogout
src/webapp/src/java/org/wyona/yanel/servlet/YanelServlet.java
TODO added re doLogout
Java
apache-2.0
1857c85a80c9177bf9882693ddd2ad9d4ca9458c
0
murat8505/Youtubemysongs,ajindal2/Youtubemysongs
package com.android.youtubemysongs; <<<<<<< HEAD ======= >>>>>>> 44e59fb2602c45d1183b2b7ea585ca373125b0ce import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.provider.MediaStore; import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.ListView; import android.widget.TextView; import android.widget.AdapterView.OnItemClickListener; import android.widget.Toast; import org.apache.commons.io.IOUtils; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.json.JSONArray; import org.json.JSONObject; import com.android.youtubemysongs.R; public class Sample extends Activity { ListView musiclist; Cursor musiccursor; int music_column_index; int count; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.sample); init_phone_music_grid(); } @SuppressWarnings("deprecation") private void init_phone_music_grid() { System.gc(); String[] proj = { MediaStore.Audio.Media._ID,MediaStore.Audio.Media.DISPLAY_NAME,MediaStore.Audio.Media.ARTIST, MediaStore.Audio.Media.TITLE,MediaStore.Audio.Media.ALBUM }; musiccursor = managedQuery(MediaStore.Audio.Media.EXTERNAL_CONTENT_URI,proj, null, null, null); count = musiccursor.getCount(); musiclist = (ListView) findViewById(R.id.PhoneMusicList); musiclist.setAdapter(new MusicAdapter(getApplicationContext())); musiclist.setOnItemClickListener(musicgridlistener); } private OnItemClickListener musicgridlistener = new OnItemClickListener() { public void onItemClick(AdapterView parent, View v, int position,long id) { String id0=null; int i=0; try { System.gc(); String lInfoStr = "fail"; music_column_index = musiccursor.getColumnIndexOrThrow(MediaStore.Audio.Media.TITLE); int col1=musiccursor.getColumnIndexOrThrow(MediaStore.Audio.Media.ARTIST); int col2=musiccursor.getColumnIndexOrThrow(MediaStore.Audio.Media.ALBUM); musiccursor.moveToPosition(position); String title = musiccursor.getString(music_column_index); String artist = musiccursor.getString(col1); String album = musiccursor.getString(col2); if(!artist.contains("<unknown>"))title=(title+" "+artist).replace(" ", "%20"); else if(!album.contains("<unknown>")) title=(title+" "+album).replace(" ", "%20"); else title=(title).replace(" ", "%20"); String url="http://gdata.youtube.com/feeds/api/videos?q="+title+"&max-results=5&v=2&format=5&alt=jsonc"; URL jsonURL = new URL(url); URLConnection jc = jsonURL.openConnection(); InputStream is = jc.getInputStream(); String jsonTxt = IOUtils.toString( is ); JSONObject jj = new JSONObject(jsonTxt); JSONObject jdata = jj.getJSONObject("data"); int totalItems = Math.min(10,jdata.getInt("totalItems")); JSONArray aitems = null; if (totalItems > 0) aitems = jdata.getJSONArray("items"); while(lInfoStr.contains("fail")&&i<totalItems){ JSONObject item0 = aitems.getJSONObject(i); id0 = item0.getString("id"); HttpClient lClient = new DefaultHttpClient(); HttpGet lGetMethod = new HttpGet(YouTubemysongs.YOUTUBE_VIDEO_INFORMATION_URL + id0); HttpResponse lResp = null; lResp = lClient.execute(lGetMethod); ByteArrayOutputStream lBOS = new ByteArrayOutputStream(); lResp.getEntity().writeTo(lBOS); lInfoStr = new String(lBOS.toString("UTF-8")); i++; } if(i>=totalItems){ Toast.makeText(getApplicationContext(), "Sorry! No video found :(", Toast.LENGTH_SHORT).show(); } else{ Intent lVideoIntent = new Intent(null, Uri.parse("ytv://"+id0), Sample.this, YouTubemysongs.class); startActivity(lVideoIntent); } } catch (Exception e) {e.printStackTrace();} } }; public class MusicAdapter extends BaseAdapter { private Context mContext; public MusicAdapter(Context c) { mContext = c; } public int getCount() { return count; } public Object getItem(int position) { return position; } public long getItemId(int position) { return position; } public View getView(int position, View convertView, ViewGroup parent) { System.gc(); String id = null; TextView tv; if (convertView == null) { tv = new TextView(mContext.getApplicationContext()); } else{ tv = (TextView) convertView; } musiccursor.moveToPosition(position); music_column_index = musiccursor.getColumnIndexOrThrow(MediaStore.Audio.Media.TITLE); id = musiccursor.getString(music_column_index); music_column_index = musiccursor.getColumnIndexOrThrow(MediaStore.Audio.Media.ARTIST); id += " - " + musiccursor.getString(music_column_index); tv.setText(id); return tv; } } }
OpenYouTubeActivity/src/com/android/youtubemysongs/Sample.java
package com.android.youtubemysongs; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.provider.MediaStore; import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.ListView; import android.widget.TextView; import android.widget.AdapterView.OnItemClickListener; import android.widget.Toast; import org.apache.commons.io.IOUtils; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.json.JSONArray; import org.json.JSONObject; import com.android.youtubemysongs.R; public class Sample extends Activity { ListView musiclist; Cursor musiccursor; int music_column_index; int count; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.sample); init_phone_music_grid(); } @SuppressWarnings("deprecation") private void init_phone_music_grid() { System.gc(); String[] proj = { MediaStore.Audio.Media._ID,MediaStore.Audio.Media.DISPLAY_NAME,MediaStore.Audio.Media.ARTIST, MediaStore.Audio.Media.TITLE,MediaStore.Audio.Media.ALBUM }; musiccursor = managedQuery(MediaStore.Audio.Media.EXTERNAL_CONTENT_URI,proj, null, null, null); count = musiccursor.getCount(); musiclist = (ListView) findViewById(R.id.PhoneMusicList); musiclist.setAdapter(new MusicAdapter(getApplicationContext())); musiclist.setOnItemClickListener(musicgridlistener); } private OnItemClickListener musicgridlistener = new OnItemClickListener() { public void onItemClick(AdapterView parent, View v, int position,long id) { String id0=null; int i=0; try { System.gc(); String lInfoStr = "fail"; music_column_index = musiccursor.getColumnIndexOrThrow(MediaStore.Audio.Media.TITLE); int col1=musiccursor.getColumnIndexOrThrow(MediaStore.Audio.Media.ARTIST); int col2=musiccursor.getColumnIndexOrThrow(MediaStore.Audio.Media.ALBUM); musiccursor.moveToPosition(position); String title = musiccursor.getString(music_column_index); String artist = musiccursor.getString(col1); String album = musiccursor.getString(col2); if(!artist.contains("<unknown>"))title=(title+" "+artist).replace(" ", "%20"); else if(!album.contains("<unknown>")) title=(title+" "+album).replace(" ", "%20"); else title=(title).replace(" ", "%20"); String url="http://gdata.youtube.com/feeds/api/videos?q="+title+"&max-results=10&v=2&format=5&alt=jsonc"; URL jsonURL = new URL(url); URLConnection jc = jsonURL.openConnection(); InputStream is = jc.getInputStream(); String jsonTxt = IOUtils.toString( is ); JSONObject jj = new JSONObject(jsonTxt); JSONObject jdata = jj.getJSONObject("data"); JSONArray aitems = jdata.getJSONArray("items"); while(lInfoStr.contains("fail")&&i<10){ JSONObject item0 = aitems.getJSONObject(i); id0 = item0.getString("id"); HttpClient lClient = new DefaultHttpClient(); HttpGet lGetMethod = new HttpGet(YouTubemysongs.YOUTUBE_VIDEO_INFORMATION_URL + id0); HttpResponse lResp = null; lResp = lClient.execute(lGetMethod); ByteArrayOutputStream lBOS = new ByteArrayOutputStream(); lResp.getEntity().writeTo(lBOS); lInfoStr = new String(lBOS.toString("UTF-8")); //Log.v("AANCHAL",lInfoStr); i++; } if(i>=10){ Log.v("AANCHAL", "here"); Toast.makeText(getApplicationContext(), "YOUR TEXT", Toast.LENGTH_LONG).show(); //Toast.makeText(getApplicationContext(), "Cannot play this video due to Youtube permissions policy", Toast.LENGTH_LONG).show();} } else{ Intent lVideoIntent = new Intent(null, Uri.parse("ytv://"+id0), Sample.this, YouTubemysongs.class); startActivity(lVideoIntent); } } catch (Exception e) {e.printStackTrace();} } }; public class MusicAdapter extends BaseAdapter { private Context mContext; public MusicAdapter(Context c) { mContext = c; } public int getCount() { return count; } public Object getItem(int position) { return position; } public long getItemId(int position) { return position; } public View getView(int position, View convertView, ViewGroup parent) { System.gc(); String id = null; TextView tv; if (convertView == null) { tv = new TextView(mContext.getApplicationContext()); } else{ tv = (TextView) convertView; } musiccursor.moveToPosition(position); music_column_index = musiccursor.getColumnIndexOrThrow(MediaStore.Audio.Media.TITLE); id = musiccursor.getString(music_column_index); music_column_index = musiccursor.getColumnIndexOrThrow(MediaStore.Audio.Media.ARTIST); id += " - " + musiccursor.getString(music_column_index); tv.setText(id); return tv; } } }
title,album,artist searchMerge branch 'master' of https://github.com/ajindal2/Youtubemysongs Conflicts: OpenYouTubeActivity/src/com/android/youtubemysongs/Sample.java
OpenYouTubeActivity/src/com/android/youtubemysongs/Sample.java
title,album,artist searchMerge branch 'master' of https://github.com/ajindal2/Youtubemysongs
Java
apache-2.0
995c0daef27640c73a9cb256bb9bff0d8ba64e80
0
MichaelNedzelsky/intellij-community,caot/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,izonder/intellij-community,fnouama/intellij-community,fitermay/intellij-community,fnouama/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,ernestp/consulo,pwoodworth/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,apixandru/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,signed/intellij-community,clumsy/intellij-community,ahb0327/intellij-community,holmes/intellij-community,diorcety/intellij-community,adedayo/intellij-community,hurricup/intellij-community,jagguli/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,ryano144/intellij-community,allotria/intellij-community,slisson/intellij-community,fnouama/intellij-community,holmes/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,supersven/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,adedayo/intellij-community,semonte/intellij-community,Lekanich/intellij-community,caot/intellij-community,ivan-fedorov/intellij-community,consulo/consulo,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,allotria/intellij-community,signed/intellij-community,slisson/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,samthor/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,robovm/robovm-studio,vladmm/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,asedunov/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,petteyg/intellij-community,diorcety/intellij-community,fitermay/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,samthor/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,izonder/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,joewalnes/idea-community,apixandru/intellij-community,da1z/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,supersven/intellij-community,ahb0327/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,caot/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,petteyg/intellij-community,youdonghai/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,diorcety/intellij-community,asedunov/intellij-community,fnouama/intellij-community,blademainer/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,hurricup/intellij-community,robovm/robovm-studio,xfournet/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,xfournet/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,caot/intellij-community,MichaelNedzelsky/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,semonte/intellij-community,apixandru/intellij-community,fnouama/intellij-community,mglukhikh/intellij-community,consulo/consulo,diorcety/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,ibinti/intellij-community,adedayo/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,akosyakov/intellij-community,TangHao1987/intellij-community,Lekanich/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,vladmm/intellij-community,hurricup/intellij-community,blademainer/intellij-community,allotria/intellij-community,da1z/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,signed/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,amith01994/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,Lekanich/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,TangHao1987/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,caot/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,ivan-fedorov/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,semonte/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,holmes/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,FHannes/intellij-community,dslomov/intellij-community,slisson/intellij-community,tmpgit/intellij-community,supersven/intellij-community,retomerz/intellij-community,holmes/intellij-community,Lekanich/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,FHannes/intellij-community,semonte/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,dslomov/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,joewalnes/idea-community,ol-loginov/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,consulo/consulo,supersven/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,joewalnes/idea-community,izonder/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,da1z/intellij-community,joewalnes/idea-community,ryano144/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,semonte/intellij-community,caot/intellij-community,izonder/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,signed/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,izonder/intellij-community,dslomov/intellij-community,asedunov/intellij-community,ibinti/intellij-community,izonder/intellij-community,blademainer/intellij-community,kool79/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,tmpgit/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,petteyg/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,consulo/consulo,apixandru/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,amith01994/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,slisson/intellij-community,kool79/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,kdwink/intellij-community,caot/intellij-community,ryano144/intellij-community,retomerz/intellij-community,allotria/intellij-community,clumsy/intellij-community,apixandru/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,adedayo/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,dslomov/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,FHannes/intellij-community,xfournet/intellij-community,semonte/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,fnouama/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,da1z/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,allotria/intellij-community,gnuhub/intellij-community,holmes/intellij-community,nicolargo/intellij-community,MER-GROUP/intellij-community,slisson/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,ryano144/intellij-community,dslomov/intellij-community,vladmm/intellij-community,supersven/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,gnuhub/intellij-community,xfournet/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,fitermay/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,vladmm/intellij-community,petteyg/intellij-community,izonder/intellij-community,Distrotech/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,semonte/intellij-community,youdonghai/intellij-community,izonder/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,robovm/robovm-studio,petteyg/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,hurricup/intellij-community,caot/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,supersven/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,semonte/intellij-community,robovm/robovm-studio,ibinti/intellij-community,akosyakov/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,xfournet/intellij-community,fitermay/intellij-community,holmes/intellij-community,samthor/intellij-community,vladmm/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,da1z/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,joewalnes/idea-community,salguarnieri/intellij-community,vladmm/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,signed/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,retomerz/intellij-community,da1z/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,ibinti/intellij-community,dslomov/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,allotria/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,clumsy/intellij-community,kool79/intellij-community,kool79/intellij-community,tmpgit/intellij-community,wreckJ/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,petteyg/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,vladmm/intellij-community,caot/intellij-community,ernestp/consulo,vvv1559/intellij-community,samthor/intellij-community,consulo/consulo,akosyakov/intellij-community,ahb0327/intellij-community,clumsy/intellij-community,kool79/intellij-community,Distrotech/intellij-community,supersven/intellij-community,blademainer/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,suncycheng/intellij-community,adedayo/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,apixandru/intellij-community,supersven/intellij-community,ryano144/intellij-community,ftomassetti/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,amith01994/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,xfournet/intellij-community,clumsy/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,clumsy/intellij-community,consulo/consulo,nicolargo/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,suncycheng/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,ernestp/consulo,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,ryano144/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,joewalnes/idea-community,orekyuu/intellij-community,fitermay/intellij-community,akosyakov/intellij-community,ibinti/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,holmes/intellij-community,signed/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,samthor/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,amith01994/intellij-community,kool79/intellij-community,semonte/intellij-community,akosyakov/intellij-community,clumsy/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,clumsy/intellij-community,caot/intellij-community,asedunov/intellij-community,Lekanich/intellij-community,hurricup/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,fengbaicanhe/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,holmes/intellij-community,robovm/robovm-studio,robovm/robovm-studio,diorcety/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,joewalnes/idea-community,robovm/robovm-studio,holmes/intellij-community,diorcety/intellij-community,xfournet/intellij-community,kdwink/intellij-community,semonte/intellij-community,retomerz/intellij-community,amith01994/intellij-community,robovm/robovm-studio,ernestp/consulo,slisson/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,kool79/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,xfournet/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,dslomov/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,Lekanich/intellij-community,slisson/intellij-community,xfournet/intellij-community,hurricup/intellij-community,da1z/intellij-community,kool79/intellij-community,youdonghai/intellij-community,da1z/intellij-community,ernestp/consulo,blademainer/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,SerCeMan/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,ibinti/intellij-community,ryano144/intellij-community,holmes/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,gnuhub/intellij-community,joewalnes/idea-community,nicolargo/intellij-community,kool79/intellij-community,slisson/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,MER-GROUP/intellij-community,joewalnes/idea-community,Distrotech/intellij-community,dslomov/intellij-community,Distrotech/intellij-community,ernestp/consulo
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vcs; import com.intellij.openapi.project.Project; import com.intellij.openapi.vcs.changes.VcsDirtyScopeManager; import com.intellij.openapi.vfs.*; import com.intellij.util.messages.MessageBusConnection; import java.io.File; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; /** * <p> * Listens to the external changes of the given files under vcs root * and marks everything under this root dirty if this file changes. * This is intended to listen to repository service files (like .git/index for Git VCS or .hg/dirindex for Mercurial) * and update IDEA's information about file statuses in case of external operations with repository. * For example, when user commits from the command line IDEA should notice that and update the Changes View. * </p> * <p> * To avoid marking everything dirty in case of commit from IDEA (or other action), there are methods * {@link #internalOperationStarted()} and {@link #internalOperationEnded()}: the VCS plugin should call them before and after * each operation that would change files which are listened. Thus the change (for example, commit from IDEA) will be treated * as internal and files won't be marked dirty (the Changes View is updated after IDEA commit anyway). * </p> * <p> * Use the constructor to create the listener, and {@link #activate()}/{@link #dispose()} to subscribe/unsubscribe to events. * </p> * @author Kirill Likhodedov */ public class RepositoryChangeListener extends VirtualFileAdapter implements VcsListener { private final Project myProject; private final String[] myRelativePathsToListen; private final AtomicBoolean myInternalChangeInProgress = new AtomicBoolean(); private final AtomicLong myInternalChangeEndTime = new AtomicLong(); private MessageBusConnection myConnection; /** * @param relativePathsToListen Paths to files (directories are not supported yet) which are to be listened. * Paths are relative to vcs roots. */ public RepositoryChangeListener(Project project, String... relativePathsToListen) { myProject = project; myRelativePathsToListen = relativePathsToListen; } public void activate() { loadRepoFilesForAllMappings(myRelativePathsToListen); myConnection = myProject.getMessageBus().connect(); myConnection.subscribe(ProjectLevelVcsManager.VCS_CONFIGURATION_CHANGED, this); VirtualFileManager.getInstance().addVirtualFileListener(this); } public void dispose() { myConnection.disconnect(); VirtualFileManager.getInstance().removeVirtualFileListener(this); } @Override public void directoryMappingChanged() { loadRepoFilesForAllMappings(myRelativePathsToListen); } @Override public void contentsChanged(VirtualFileEvent event) { final String path = event.getFile().getPath(); for (String relativePath : myRelativePathsToListen) { if (path.endsWith(relativePath)) { if (!myInternalChangeInProgress.get() && !internalChangeHappenedRecently()) { // identify the vcs root for this file VirtualFile vcsRoot = null; for (VcsRoot root : ProjectLevelVcsManager.getInstance(myProject).getAllVcsRoots()) { if ((root.path.getPath() + "/" + relativePath).equals(path)) { vcsRoot = root.path; break; } } if (vcsRoot != null) { VcsDirtyScopeManager.getInstance(myProject).dirDirtyRecursively(vcsRoot); } } break; } } } /** * Notifies the listener that IDEA is going to change index file right away. * For example, at the beginning of the commit process. */ public void internalOperationStarted() { myInternalChangeInProgress.set(true); } /** * Notifies the listener that IDEA has finished changing index file. */ public void internalOperationEnded() { // no synchronization here, because it's not dangerous if index change time is a bit different. myInternalChangeInProgress.set(false); myInternalChangeEndTime.set(System.currentTimeMillis()); } /** * @return true if last index change via IDEA happened less than a second ago. */ private boolean internalChangeHappenedRecently() { return System.currentTimeMillis() - myInternalChangeEndTime.get() < 1000; } // load repository files for all repositories (and thus subscribe to changes in them) private void loadRepoFilesForAllMappings(String[] relativePathsToListen) { for (VcsRoot root : ProjectLevelVcsManager.getInstance(myProject).getAllVcsRoots()) { for (String path : relativePathsToListen) { loadRepoFile(root.path, path); } } } /** * Loads the file for the given repository root, * so that the correspondent VirtualFile is created and thus changes to this file will be fired to the listener. */ private void loadRepoFile(VirtualFile vcsRoot, String relativePath) { if (vcsRoot != null) { LocalFileSystem.getInstance().refreshAndFindFileByIoFile(new File(vcsRoot.getPath(), relativePath)); } } }
platform/vcs-impl/src/com/intellij/openapi/vcs/RepositoryChangeListener.java
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vcs; import com.intellij.openapi.project.Project; import com.intellij.openapi.vcs.changes.VcsDirtyScopeManager; import com.intellij.openapi.vfs.*; import com.intellij.util.messages.MessageBusConnection; import java.io.File; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; /** * <p> * Listens to the external changes of the given files under vcs root * and marks everything under this root dirty if this file changes. * This is intended to listen to repository service files (like .git/index for Git VCS or .hg/dirindex for Mercurial) * and update IDEA's information about file statuses in case of external operations with repository. * For example, when user commits from the command line IDEA should notice that and update the Changes View. * </p> * <p> * To avoid marking everything dirty in case of commit from IDEA (or other action), there are methods * {@link #internalOperationStarted()} and {@link #internalOperationEnded()}: the VCS plugin should call them before and after * each operation that would change files which are listened. Thus the change (for example, commit from IDEA) will be treated * as internal and files won't be marked dirty (the Changes View is updated after IDEA commit anyway). * </p> * <p> * Use the constructor to create the listener, and {@link #activate()}/{@link #dispose()} to subscribe/unsubscribe to events. * </p> * @author Kirill Likhodedov */ public class RepositoryChangeListener extends VirtualFileAdapter implements VcsListener { private final Project myProject; private final String[] myRelativePathsToListen; private final AtomicBoolean myInternalChangeInProgress = new AtomicBoolean(); private final AtomicLong myInternalChangeEndTime = new AtomicLong(); private final MessageBusConnection myConnection; /** * @param relativePathsToListen Paths to files (directories are not supported yet) which are to be listened. * Paths are relative to vcs roots. */ public RepositoryChangeListener(Project project, String... relativePathsToListen) { myProject = project; myRelativePathsToListen = relativePathsToListen; myConnection = myProject.getMessageBus().connect(); } public void activate() { loadRepoFilesForAllMappings(myRelativePathsToListen); myConnection.subscribe(ProjectLevelVcsManager.VCS_CONFIGURATION_CHANGED, this); VirtualFileManager.getInstance().addVirtualFileListener(this); } public void dispose() { myConnection.disconnect(); VirtualFileManager.getInstance().removeVirtualFileListener(this); } @Override public void directoryMappingChanged() { loadRepoFilesForAllMappings(myRelativePathsToListen); } @Override public void contentsChanged(VirtualFileEvent event) { final String path = event.getFile().getPath(); for (String relativePath : myRelativePathsToListen) { if (path.endsWith(relativePath)) { if (!myInternalChangeInProgress.get() && !internalChangeHappenedRecently()) { // identify the vcs root for this file VirtualFile vcsRoot = null; for (VcsRoot root : ProjectLevelVcsManager.getInstance(myProject).getAllVcsRoots()) { if ((root.path.getPath() + "/" + relativePath).equals(path)) { vcsRoot = root.path; break; } } if (vcsRoot != null) { VcsDirtyScopeManager.getInstance(myProject).dirDirtyRecursively(vcsRoot); } } break; } } } /** * Notifies the listener that IDEA is going to change index file right away. * For example, at the beginning of the commit process. */ public void internalOperationStarted() { myInternalChangeInProgress.set(true); } /** * Notifies the listener that IDEA has finished changing index file. */ public void internalOperationEnded() { // no synchronization here, because it's not dangerous if index change time is a bit different. myInternalChangeInProgress.set(false); myInternalChangeEndTime.set(System.currentTimeMillis()); } /** * @return true if last index change via IDEA happened less than a second ago. */ private boolean internalChangeHappenedRecently() { return System.currentTimeMillis() - myInternalChangeEndTime.get() < 1000; } // load repository files for all repositories (and thus subscribe to changes in them) private void loadRepoFilesForAllMappings(String[] relativePathsToListen) { for (VcsRoot root : ProjectLevelVcsManager.getInstance(myProject).getAllVcsRoots()) { for (String path : relativePathsToListen) { loadRepoFile(root.path, path); } } } /** * Loads the file for the given repository root, * so that the correspondent VirtualFile is created and thus changes to this file will be fired to the listener. */ private void loadRepoFile(VirtualFile vcsRoot, String relativePath) { if (vcsRoot != null) { LocalFileSystem.getInstance().refreshAndFindFileByIoFile(new File(vcsRoot.getPath(), relativePath)); } } }
IDEA-62386 switching vcs mapping error [review=kirill.safonov]
platform/vcs-impl/src/com/intellij/openapi/vcs/RepositoryChangeListener.java
IDEA-62386 switching vcs mapping error [review=kirill.safonov]
Java
apache-2.0
7c81fb09efa200eaab68849b790f80a0c7793061
0
sedulam/CASSANDRA-12201,sedulam/CASSANDRA-12201,sedulam/CASSANDRA-12201
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.compaction; import java.time.LocalTime; import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import com.google.common.collect.ImmutableSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.DecoratedKey; import org.apache.cassandra.db.lifecycle.LifecycleTransaction; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.io.sstable.KeyIterator; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.schema.CompactionParams; import sun.reflect.generics.reflectiveObjects.NotImplementedException; /** * This strategy tries to take advantage of periods of the day where there's less I/O. * Full description can be found at CASSANDRA-12201. */ public class BurstHourCompactionStrategy extends AbstractCompactionStrategy { //TODO minThreshold is the minimum number of occurrences to trigger the compaction of the key references //TODO maxThreshold is the maximum of tables that we're compacting each time private volatile int estimatedRemainingTasks; private final Set<SSTableReader> sstables = new HashSet<>(); //TODO add logging private static final Logger logger = LoggerFactory.getLogger(BurstHourCompactionStrategy.class); private final BurstHourCompactionStrategyOptions bhcsOptions; public BurstHourCompactionStrategy(ColumnFamilyStore cfs, Map<String, String> options) { super(cfs, options); estimatedRemainingTasks = 0; bhcsOptions = new BurstHourCompactionStrategyOptions(options); } /** * Check every key of every table, until we've hit the threshold for SSTables with key repetitions. * * @return a set of tables that share a set of the same keys */ private Set<SSTableReader> getKeyReferences() { logger.info("Starting Burst Compaction analysis in CFS <" + cfs.getTableName() + ">."); Iterable<SSTableReader> candidates = filterSuspectSSTables(sstables); int minThreshold = cfs.getMinimumCompactionThreshold(); int maxThreshold = cfs.getMaximumCompactionThreshold(); Set<SSTableReader> ssTablesToCompact = new HashSet<>(); // Get all the keys and the corresponding SSTables in which they exist ExecutorService executor = Executors.newCachedThreadPool(); Set<Future<Set<SSTableReader>>> threads = new HashSet<>(); Set<Future> finishedThreads = new HashSet<>(); /* because candidates is an iterable, we don't know its size, which is required to calculate the number of remaining compaction tasks, hence we use this maxThresholdReached and numberOfCandidates variables to finish the counting of the set */ boolean maxThresholdReached = false; int numberOfCandidates = 0; for (SSTableReader ssTableReader : candidates) { if (!maxThresholdReached) { logger.info("Searching table " + ssTableReader.getFilename()); KeyIterator keyIterator = new KeyIterator(ssTableReader.descriptor, cfs.metadata()); Callable<Set<SSTableReader>> callable = new SSTableReferencesSearcher(candidates, keyIterator, maxThreshold, ssTablesToCompact); Future<Set<SSTableReader>> future = executor.submit(callable); threads.add(future); checkFinishedThreads(threads, finishedThreads, ssTablesToCompact, minThreshold, maxThreshold); if (ssTablesToCompact.size() >= maxThreshold) { maxThresholdReached = true; } } numberOfCandidates++; } while (ssTablesToCompact.size() < maxThreshold) { boolean allThreadsDone = checkFinishedThreads(threads, finishedThreads, ssTablesToCompact, minThreshold, maxThreshold); if (allThreadsDone || ssTablesToCompact.size() >= maxThreshold) { break; } } estimatedRemainingTasks = numberOfCandidates / cfs.getMaximumCompactionThreshold(); logger.info("Number of remaining compaction tasks for CFS <" + cfs.getTableName() + ">: " + estimatedRemainingTasks); return ssTablesToCompact; } private static boolean checkFinishedThreads(Set<Future<Set<SSTableReader>>> threads, Set<Future> finishedThreads, Set<SSTableReader> tablesWithRepeatedKeys, int minThreshold, int maxThreshold) { for (Future<Set<SSTableReader>> thread : threads) { if (thread.isDone() && !finishedThreads.contains(thread)) { finishedThreads.add(thread); try { Set<SSTableReader> references = thread.get(); if (references.size() >= minThreshold) { logger.info("Finished searching one of the candidates tables. Cross references with " + references.size() + " tables found."); tablesWithRepeatedKeys.addAll(references); if (tablesWithRepeatedKeys.size() >= maxThreshold) { terminateRemainingSearchThreads(threads, finishedThreads); return true; } } } catch (InterruptedException | ExecutionException e) { logger.error("One of the threads responsible for finding key references terminated unexpectadly", e); } } } if ((threads.size() - finishedThreads.size()) > 0) { logger.info("Still need to finish " + (threads.size() - finishedThreads.size()) + " threads."); return false; } else { return true; } } private static void terminateRemainingSearchThreads(Set<Future<Set<SSTableReader>>> threads, Set<Future> finishedThreads) { for (Future thread : threads) { if (!finishedThreads.contains(thread)) { boolean threadTerminated = thread.cancel(true); if (threadTerminated) { logger.info("Thread " + thread.toString() + " terminated."); } } } } /** * @param gcBefore throw away tombstones older than this * @return the next background/minor compaction task to run; null if nothing to do. * <p> * TODO does the following line still applies? If not, change the superclass doc. Repeat for other methods. * Is responsible for marking its sstables as compaction-pending. */ public AbstractCompactionTask getNextBackgroundTask(int gcBefore) { LocalTime now = LocalTime.now(); boolean isBurstHour = now.isAfter(bhcsOptions.startTime) && now.isBefore(bhcsOptions.endTime); if (!isBurstHour) { return null; } Set<SSTableReader> ssTablesToCompact = getKeyReferences(); return createBhcsCompactionTask(ssTablesToCompact, gcBefore); } /** * Creates the compaction task object. * * @param tables the tables we want to compact * @param gcBefore throw away tombstones older than this * @return a compaction task object which will be later used to run the compaction per se */ private AbstractCompactionTask createBhcsCompactionTask(Collection<SSTableReader> tables, int gcBefore) { if (tables.size() == 0) { return null; } else { LifecycleTransaction transaction = cfs.getTracker().tryModify(tables, OperationType.COMPACTION); return new CompactionTask(cfs, transaction, gcBefore); } } /** * @param gcBefore throw away tombstones older than this * @param splitOutput TODO * @return a compaction task that should be run to compact this columnfamilystore * as much as possible. Null if nothing to do. * <p> * Is responsible for marking its sstables as compaction-pending. */ public Collection<AbstractCompactionTask> getMaximalTask(int gcBefore, boolean splitOutput) { if (splitOutput) { //TODO throw new NotImplementedException(); } else { Set<SSTableReader> ssTablesToCompact = getKeyReferences(); if (ssTablesToCompact.size() > 0) { Set<AbstractCompactionTask> tasks = new HashSet<>(1); tasks.add(createBhcsCompactionTask(ssTablesToCompact, gcBefore)); return tasks; } else { return null; } } } /** * @param sstables SSTables to compact. Must be marked as compacting. * @param gcBefore throw away tombstones older than this * @return a compaction task corresponding to the requested sstables. * Will not be null. (Will throw if user requests an invalid compaction.) * <p> * Is responsible for marking its sstables as compaction-pending. * TODO DTS, STCS and now BHCS all do basically the same thing in this method. Wouldn't it be better to define this in the superclass and LCS would override it? */ public AbstractCompactionTask getUserDefinedTask(Collection<SSTableReader> sstables, int gcBefore) { return createBhcsCompactionTask(sstables, gcBefore); } /** * @return the number of background tasks estimated to still be needed for this columnfamilystore */ public int getEstimatedRemainingTasks() { return estimatedRemainingTasks; } /** * @return size in bytes of the largest sstables for this strategy */ public long getMaxSSTableBytes() { //TODO why is every strategy, except for LCS, returing this value? return Long.MAX_VALUE; } public void addSSTable(SSTableReader added) { sstables.add(added); } public void removeSSTable(SSTableReader sstable) { sstables.remove(sstable); } /** * Returns the sstables managed by this strategy instance */ protected Set<SSTableReader> getSSTables() { return ImmutableSet.copyOf(sstables); } public static Map<String, String> validateOptions(Map<String, String> options) throws ConfigurationException { Map<String, String> uncheckedOptions = AbstractCompactionStrategy.validateOptions(options); uncheckedOptions = BurstHourCompactionStrategyOptions.validateOptions(options, uncheckedOptions); uncheckedOptions.remove(CompactionParams.Option.MIN_THRESHOLD.toString()); uncheckedOptions.remove(CompactionParams.Option.MAX_THRESHOLD.toString()); return uncheckedOptions; } private static class SSTableReferencesSearcher implements Callable<Set<SSTableReader>> { private final Iterable<SSTableReader> uncompactingSsTables; private final int maxThreshold; private final Set<SSTableReader> ssTablesWithReferences; private final KeyIterator keyIterator; private SSTableReferencesSearcher(Iterable<SSTableReader> uncompactingSsTables, KeyIterator keyIterator, int maxThreshold, Set<SSTableReader> ssTablesWithReferences) { this.keyIterator = keyIterator; this.uncompactingSsTables = uncompactingSsTables; this.maxThreshold = maxThreshold; this.ssTablesWithReferences = ssTablesWithReferences; } /** * Computes a result, or throws an exception if unable to do so. * * @return computed result * @throws Exception if unable to compute a result */ public Set<SSTableReader> call() throws Exception { while (keyIterator.hasNext()) { DecoratedKey key = keyIterator.next(); logger.debug("Starting scan for key " + key.toString()); for (SSTableReader ssTable : uncompactingSsTables) { // check if the key actually exists in this sstable, without updating cache and stats if (ssTable.getPosition(key, SSTableReader.Operator.EQ, false) != null) { ssTablesWithReferences.add(ssTable); } } logger.debug("Key " + key.toString() + " is referenced by " + ssTablesWithReferences.size() + " tables."); if (ssTablesWithReferences.size() >= maxThreshold) { break; } } return ssTablesWithReferences; } } }
src/java/org/apache/cassandra/db/compaction/BurstHourCompactionStrategy.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.compaction; import java.time.LocalTime; import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import com.google.common.collect.ImmutableSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.DecoratedKey; import org.apache.cassandra.db.lifecycle.LifecycleTransaction; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.io.sstable.KeyIterator; import org.apache.cassandra.io.sstable.SSTable; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.schema.CompactionParams; import org.apache.cassandra.schema.TableMetadata; import sun.reflect.generics.reflectiveObjects.NotImplementedException; /** * This strategy tries to take advantage of periods of the day where there's less I/O. * Full description can be found at CASSANDRA-12201. */ // public class BurstHourCompactionStrategy extends AbstractCompactionStrategy { //TODO minThreshold is the minimum number of occurrences to trigger the compaction of the key references //TODO maxThreshold is the maximum of tables that we're compacting each time private volatile int estimatedRemainingTasks; //TODO do we really need this variable? private final Set<SSTableReader> sstables = new HashSet<>(); //TODO add logging private static final Logger logger = LoggerFactory.getLogger(BurstHourCompactionStrategy.class); private final BurstHourCompactionStrategyOptions bhcsOptions; public BurstHourCompactionStrategy(ColumnFamilyStore cfs, Map<String, String> options) { super(cfs, options); estimatedRemainingTasks = 0; bhcsOptions = new BurstHourCompactionStrategyOptions(options); } /** * Check every key of every table, until we've hit the threshold for SSTables with key repetitions. * * @return a set of tables that share a set of the same keys */ private Set<SSTableReader> getKeyReferences() { Iterable<SSTableReader> candidates = filterSuspectSSTables(cfs.getUncompactingSSTables()); int minThreshold = cfs.getMinimumCompactionThreshold(); int maxThreshold = cfs.getMaximumCompactionThreshold(); Set<SSTableReader> ssTablesToCompact = new HashSet<>(); // Get all the keys and the corresponding SSTables in which they exist ExecutorService executor = Executors.newCachedThreadPool(); Set<Future> threads = new HashSet<>(); Set<Future> finishedThreads = new HashSet<>(); /* because candidates is an iterable, we don't know its size, which is required to calculate the number of remaining compaction tasks, hence we use this maxThresholdReached and numberOfCandidates variables to finish the counting of the set */ boolean maxThresholdReached = false; int numberOfCandidates = 0; for (SSTableReader ssTableReader : candidates) { if (!maxThresholdReached) { System.out.println("Searching table " + ssTableReader.getFilename()); KeyIterator keyIterator = new KeyIterator(ssTableReader.descriptor, cfs.metadata()); Callable callable = new SSTableReferencesSearcher(candidates, keyIterator, maxThreshold, ssTablesToCompact); Future<Set<SSTableReader>> future = executor.submit(callable); threads.add(future); checkFinishedThreads(threads, finishedThreads, ssTablesToCompact, minThreshold, maxThreshold); if (ssTablesToCompact.size() >= maxThreshold) { maxThresholdReached = true; } } numberOfCandidates++; } while (ssTablesToCompact.size() < maxThreshold) { boolean allThreadsDone = checkFinishedThreads(threads, finishedThreads, ssTablesToCompact, minThreshold, maxThreshold); if (allThreadsDone || ssTablesToCompact.size() >= maxThreshold) { break; } } estimatedRemainingTasks = numberOfCandidates / cfs.getMaximumCompactionThreshold(); //TODO shall I add trimming of ssTablesToCompact until the reaches the maximum threshold? return ssTablesToCompact; } private static boolean checkFinishedThreads(Set<Future> threads, Set<Future> finishedThreads, Set<SSTableReader> tablesWithRepeatedKeys, int minThreshold, int maxThreshold) { for (Future<Set<SSTableReader>> thread : threads) { if (thread.isDone() && !finishedThreads.contains(thread)) { finishedThreads.add(thread); try { Set<SSTableReader> references = thread.get(); if (references.size() >= minThreshold) { tablesWithRepeatedKeys.addAll(references); if (tablesWithRepeatedKeys.size() >= maxThreshold) { terminateRemainingSearchThreads(threads, finishedThreads); return true; } } } catch (InterruptedException | ExecutionException e) { logger.error("One of the threads responsible for finding key references terminated unexpectadly", e); } } } if ((threads.size() - finishedThreads.size()) > 0) { System.out.println("Still need to finish " + (threads.size() - finishedThreads.size()) + " threads."); return false; } else { return true; } } private static void terminateRemainingSearchThreads(Set<Future> threads, Set<Future> finishedThreads) { for (Future thread : threads) { if (!finishedThreads.contains(thread)) { thread.cancel(true); System.out.println("Thread " + thread.toString() + " terminated."); } } } /** * @param gcBefore throw away tombstones older than this * @return the next background/minor compaction task to run; null if nothing to do. * <p> * TODO does the following line still applies? If not, change the superclass doc. Repeat for other methods. * Is responsible for marking its sstables as compaction-pending. */ public AbstractCompactionTask getNextBackgroundTask(int gcBefore) { LocalTime now = LocalTime.now(); boolean isBurstHour = now.isAfter(bhcsOptions.startTime) && now.isBefore(bhcsOptions.endTime); if (!isBurstHour) { return null; } Set<SSTableReader> ssTablesToCompact = getKeyReferences(); return createBhcsCompactionTask(ssTablesToCompact, gcBefore); } /** * Creates the compaction task object. * * @param tables the tables we want to compact * @param gcBefore throw away tombstones older than this * @return a compaction task object which will be later used to run the compaction per se */ private AbstractCompactionTask createBhcsCompactionTask(Collection<SSTableReader> tables, int gcBefore) { if (tables.size() == 0) { return null; } else { LifecycleTransaction transaction = cfs.getTracker().tryModify(tables, OperationType.COMPACTION); return new CompactionTask(cfs, transaction, gcBefore); } } /** * @param gcBefore throw away tombstones older than this * @param splitOutput TODO * @return a compaction task that should be run to compact this columnfamilystore * as much as possible. Null if nothing to do. * <p> * Is responsible for marking its sstables as compaction-pending. */ public Collection<AbstractCompactionTask> getMaximalTask(int gcBefore, boolean splitOutput) { Iterable keyReferences = cfs.getUncompactingSSTables(); LifecycleTransaction txn = cfs.getTracker().tryModify(keyReferences, OperationType.COMPACTION); if (splitOutput) { //TODO throw new NotImplementedException(); } else { Set<AbstractCompactionTask> tasks = new HashSet<>(1); tasks.add(new CompactionTask(cfs, txn, gcBefore)); return tasks; } } /** * @param sstables SSTables to compact. Must be marked as compacting. * @param gcBefore throw away tombstones older than this * @return a compaction task corresponding to the requested sstables. * Will not be null. (Will throw if user requests an invalid compaction.) * <p> * Is responsible for marking its sstables as compaction-pending. * TODO DTS, STCS and now BHCS all do basically the same thing in this method. Wouldn't it be better to define this in the superclass and LCS would override it? */ public AbstractCompactionTask getUserDefinedTask(Collection<SSTableReader> sstables, int gcBefore) { return createBhcsCompactionTask(sstables, gcBefore); } /** * @return the number of background tasks estimated to still be needed for this columnfamilystore */ public int getEstimatedRemainingTasks() { return estimatedRemainingTasks; } /** * @return size in bytes of the largest sstables for this strategy */ public long getMaxSSTableBytes() { //TODO why is every strategy, except for LCS, returing this value? return Long.MAX_VALUE; } public void addSSTable(SSTableReader added) { sstables.add(added); } public void removeSSTable(SSTableReader sstable) { sstables.remove(sstable); } /** * Returns the sstables managed by this strategy instance */ protected Set<SSTableReader> getSSTables() { return ImmutableSet.copyOf(sstables); } public static Map<String, String> validateOptions(Map<String, String> options) throws ConfigurationException { Map<String, String> uncheckedOptions = AbstractCompactionStrategy.validateOptions(options); uncheckedOptions = BurstHourCompactionStrategyOptions.validateOptions(options, uncheckedOptions); uncheckedOptions.remove(CompactionParams.Option.MIN_THRESHOLD.toString()); uncheckedOptions.remove(CompactionParams.Option.MAX_THRESHOLD.toString()); return uncheckedOptions; } private class SSTableReferencesSearcher implements Callable { private final Iterable<SSTableReader> uncompactingSsTables; private final int maxThreshold; private final Set ssTablesWithReferences; private final KeyIterator keyIterator; private SSTableReferencesSearcher(Iterable<SSTableReader> uncompactingSsTables, KeyIterator keyIterator, int maxThreshold, Set ssTablesWithReferences) { this.keyIterator = keyIterator; this.uncompactingSsTables = uncompactingSsTables; this.maxThreshold = maxThreshold; this.ssTablesWithReferences = ssTablesWithReferences; } /** * Computes a result, or throws an exception if unable to do so. * * @return computed result * @throws Exception if unable to compute a result */ public Set<SSTableReader> call() throws Exception { while (keyIterator.hasNext()) { DecoratedKey key = keyIterator.next(); System.out.println("Starting scan for key " + key.toString()); for (SSTableReader ssTable : uncompactingSsTables) { // check if the key actually exists in this sstable, without updating cache and stats if (ssTable.getPosition(key, SSTableReader.Operator.EQ, false) != null) { ssTablesWithReferences.add(ssTable); } } System.out.println("Key " + key.toString() + " is referenced by " + ssTablesWithReferences.size() + " tables."); if (ssTablesWithReferences.size() >= maxThreshold) { break; } } return ssTablesWithReferences; } } }
Started using instance variable sstables.
src/java/org/apache/cassandra/db/compaction/BurstHourCompactionStrategy.java
Started using instance variable sstables.
Java
apache-2.0
581033c0d4831505ceb9bb753447d12bc682d501
0
uchida/selendroid,frolovs/selendroid,ghrud92/selendroid,rasata/selendroid,asm89/selendroid,frolovs/selendroid,ghrud92/selendroid,uchida/selendroid,vishals79/appium-uiautomator2-driver,masbog/selendroid,rasata/selendroid,ghrud92/selendroid,masbog/selendroid,lukeis/selendroid,SpencerMalone/selendroid,SpencerMalone/selendroid,vishals79/appium-uiautomator2-driver,VikingDen/selendroid,sri096/selendroid,lukeis/selendroid,sri096/selendroid,koichirok/selendroid,smiklosovic/selendroid,selendroid/selendroid,masbog/selendroid,appium/selendroid,selendroid/selendroid,PrakashGoulla/selendroid,smiklosovic/selendroid,anandsadu/selendroid,mach6/selendroid,uchida/selendroid,anandsadu/selendroid,lukeis/selendroid,sri096/selendroid,SpencerMalone/selendroid,frolovs/selendroid,anandsadu/selendroid,christiannolte/selendroid,smiklosovic/selendroid,vishals79/appium-uiautomator2-driver,asm89/selendroid,SpencerMalone/selendroid,sri096/selendroid,VikingDen/selendroid,VikingDen/selendroid,PrakashGoulla/selendroid,koichirok/selendroid,rasata/selendroid,PrakashGoulla/selendroid,koichirok/selendroid,asm89/selendroid,vishals79/appium-uiautomator2-driver,ghrud92/selendroid,PrakashGoulla/selendroid,appium/selendroid,mach6/selendroid,christiannolte/selendroid,luohaoyu/selendroid,selendroid/selendroid,ghrud92/selendroid,0x1mason/selendroid,selendroid/selendroid,smiklosovic/selendroid,selendroid/selendroid,0x1mason/selendroid,mach6/selendroid,VikingDen/selendroid,mach6/selendroid,masbog/selendroid,luohaoyu/selendroid,rasata/selendroid,luohaoyu/selendroid,uchida/selendroid,luohaoyu/selendroid,frolovs/selendroid,anandsadu/selendroid,uchida/selendroid,DominikDary/selendroid,vishals79/appium-uiautomator2-driver,0x1mason/selendroid,luohaoyu/selendroid,SpencerMalone/selendroid,asm89/selendroid,christiannolte/selendroid,koichirok/selendroid,appium/selendroid,christiannolte/selendroid,DominikDary/selendroid,koichirok/selendroid,mach6/selendroid,christiannolte/selendroid,rasata/selendroid,smiklosovic/selendroid,lukeis/selendroid,masbog/selendroid,PrakashGoulla/selendroid,frolovs/selendroid,DominikDary/selendroid,asm89/selendroid,appium/selendroid,lukeis/selendroid,appium/selendroid,sri096/selendroid,0x1mason/selendroid,anandsadu/selendroid
/* * Copyright 2012-2013 eBay Software Foundation and selendroid committers. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.selendroid.server.model; import io.selendroid.ServerInstrumentation; import io.selendroid.android.ViewHierarchyAnalyzer; import io.selendroid.android.internal.DomWindow; import io.selendroid.exceptions.SelendroidException; import io.selendroid.exceptions.StaleElementReferenceException; import io.selendroid.server.model.js.AndroidAtoms; import io.selendroid.util.SelendroidLogger; import java.util.List; import java.util.Map; import java.util.Set; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.webkit.JsResult; import android.webkit.WebChromeClient; import android.webkit.WebSettings; import android.webkit.WebView; public class SelendroidWebDriver { private static final String ELEMENT_KEY = "ELEMENT"; private static final long FOCUS_TIMEOUT = 1000L; private static final long LOADING_TIMEOUT = 30000L; private static final long POLLING_INTERVAL = 50L; private static final long START_LOADING_TIMEOUT = 700L; static final long UI_TIMEOUT = 3000L; private volatile boolean pageDoneLoading; private volatile boolean pageStartedLoading; private volatile String result; private volatile WebView webview = null; private static final String WINDOW_KEY = "WINDOW"; private volatile boolean editAreaHasFocus; private final Object syncObject = new Object(); private boolean done = false; private ServerInstrumentation serverInstrumentation = null; private SessionCookieManager sm = new SessionCookieManager(); private SelendroidWebChromeClient chromeClient = null; public SelendroidWebDriver(ServerInstrumentation serverInstrumentation, String handle) { this.serverInstrumentation = serverInstrumentation; init(handle); } private static String escapeAndQuote(final String toWrap) { StringBuilder toReturn = new StringBuilder("\""); for (int i = 0; i < toWrap.length(); i++) { char c = toWrap.charAt(i); if (c == '\"') { toReturn.append("\\\""); } else if (c == '\\') { toReturn.append("\\\\"); } else { toReturn.append(c); } } toReturn.append("\""); return toReturn.toString(); } @SuppressWarnings("unchecked") private String convertToJsArgs(JSONArray args) throws JSONException { StringBuilder toReturn = new StringBuilder(); int length = args.length(); for (int i = 0; i < length; i++) { toReturn.append((i > 0) ? "," : ""); toReturn.append(convertToJsArgs(args.get(i))); } SelendroidLogger.log("convertToJsArgs: " + toReturn.toString()); return toReturn.toString(); } private String convertToJsArgs(Object obj) { StringBuilder toReturn = new StringBuilder(); if (obj instanceof List<?>) { toReturn.append("["); List<Object> aList = (List<Object>) obj; for (int j = 0; j < aList.size(); j++) { String comma = ((j == 0) ? "" : ","); toReturn.append(comma + convertToJsArgs(aList.get(j))); } toReturn.append("]"); } else if (obj instanceof Map<?, ?>) { Map<Object, Object> aMap = (Map<Object, Object>) obj; String toAdd = "{"; for (Object key : aMap.keySet()) { toAdd += key + ":" + convertToJsArgs(aMap.get(key)) + ","; } toReturn.append(toAdd.substring(0, toAdd.length() - 1) + "}"); } else if (obj instanceof AndroidWebElement) { // A WebElement is represented in JavaScript by an Object as // follow: {"ELEMENT":"id"} where "id" refers to the id // of the HTML element in the javascript cache that can // be accessed throught bot.inject.cache.getCache_() toReturn.append("{\"" + ELEMENT_KEY + "\":\"" + ((AndroidWebElement) obj).getId() + "\"}"); } else if (obj instanceof DomWindow) { // A DomWindow is represented in JavaScript by an Object as // follow {"WINDOW":"id"} where "id" refers to the id of the // DOM window in the cache. toReturn.append("{\"" + WINDOW_KEY + "\":\"" + ((DomWindow) obj).getKey() + "\"}"); } else if (obj instanceof Number || obj instanceof Boolean) { toReturn.append(String.valueOf(obj)); } else if (obj instanceof String) { toReturn.append(escapeAndQuote((String) obj)); } SelendroidLogger.log("convertToJsArgs: " + toReturn.toString()); return toReturn.toString(); } public Object executeAtom(AndroidAtoms atom, Object... args) { JSONArray array = new JSONArray(); for (int i = 0; i < args.length; i++) { array.put(args[i]); } try { return executeAtom(atom, array); } catch (JSONException je) { je.printStackTrace(); throw new RuntimeException(je); } } public Object executeAtom(AndroidAtoms atom, JSONArray args) throws JSONException { final String myScript = atom.getValue(); String scriptInWindow = "(function(){ " + " var win; try{win=window;}catch(e){win=window;}" + "with(win){return (" + myScript + ")(" + convertToJsArgs(args) + ")}})()"; String jsResult = executeJavascriptInWebView("alert('selendroid:'+" + scriptInWindow + ")"); SelendroidLogger.log("jsResult: " + jsResult); if (jsResult == null || "undefined".equals(jsResult)) { return null; } try { JSONObject json = new JSONObject(jsResult); if (0 != json.optInt("status")) { Object value = json.get("value"); if ((value instanceof String && value.equals("Element does not exist in cache")) || (value instanceof JSONObject && ((JSONObject) value).getString("message").equals( "Element does not exist in cache"))) { throw new StaleElementReferenceException(json.optString("value")); } throw new SelendroidException(json.optString("value")); } if (json.isNull("value")) { return null; } else { return json.get("value"); } } catch (JSONException e) { throw new SelendroidException(e); } } private String executeJavascriptInWebView(final String script) { result = null; ServerInstrumentation.getInstance().runOnUiThread(new Runnable() { public void run() { if (webview.getUrl() == null) { return; } //seems to be needed webview.setWebChromeClient(chromeClient); webview.loadUrl("javascript:" + script); } }); long timeout = System.currentTimeMillis() + 60000; /* * how long to wait to allow the script to * run? This could be arbitrarily high for * some users... setting extremely high for * now (1 min) */ synchronized (syncObject) { while (result == null && (System.currentTimeMillis() < timeout)) { try { syncObject.wait(2000); } catch (InterruptedException e) { throw new SelendroidException(e); } } return result; } } public Object executeScript(String script) { try { return injectJavascript(script, false, new JSONArray()); } catch (JSONException je) { je.printStackTrace(); throw new RuntimeException(je); } } public Object executeScript(String script, JSONArray args) { try { return injectJavascript(script, false, args); } catch (JSONException je) { je.printStackTrace(); throw new RuntimeException(je); } } public Object executeScript(String script, Object args) { try { return injectJavascript(script, false, args); } catch (JSONException je) { je.printStackTrace(); throw new RuntimeException(je); } } public String getCurrentUrl() { if (webview == null) { throw new SelendroidException("No open web view."); } long end = System.currentTimeMillis() + UI_TIMEOUT; final String[] url = new String[1]; done = false; Runnable r = new Runnable() { public void run() { url[0] = webview.getUrl(); synchronized (this) { this.notify(); } } }; runSynchronously(r, UI_TIMEOUT); return url[0]; } public void get(final String url) { serverInstrumentation.runOnUiThread(new Runnable() { public void run() { webview.loadUrl(url); } }); } public Object getWindowSource() throws JSONException { JSONObject source = new JSONObject( (String) executeScript("return (new XMLSerializer()).serializeToString(document.documentElement);")); return source.getString("value"); } protected void init(String handle) { System.out.println("Selendroid webdriver init"); long start = System.currentTimeMillis(); List<WebView> webviews = ViewHierarchyAnalyzer.getDefaultInstance().findWebViews(); while (webviews == null && (System.currentTimeMillis() - start <= ServerInstrumentation.getInstance() .getAndroidWait().getTimeoutInMillis())) { DefaultSelendroidDriver.sleepQuietly(500); webviews = ViewHierarchyAnalyzer.getDefaultInstance().findWebViews(); } if (handle.contains("_")) { int index = Integer.valueOf(handle.split("_")[1]); webview = webviews.get(index); } else { webview = webviews.get(0); } if (webview == null) { throw new SelendroidException("No webview found on current activity."); } configureWebView(webview); } private void configureWebView(final WebView view) { ServerInstrumentation.getInstance().runOnUiThread(new Runnable() { @Override public void run() { try { view.clearCache(true); view.clearFormData(); view.clearHistory(); view.setFocusable(true); view.setFocusableInTouchMode(true); view.setNetworkAvailable(true); chromeClient = new SelendroidWebChromeClient(); view.setWebChromeClient(chromeClient); WebSettings settings = view.getSettings(); settings.setJavaScriptCanOpenWindowsAutomatically(true); settings.setSupportMultipleWindows(true); settings.setBuiltInZoomControls(true); settings.setJavaScriptEnabled(true); settings.setAppCacheEnabled(true); settings.setAppCacheMaxSize(10 * 1024 * 1024); settings.setAppCachePath(""); settings.setDatabaseEnabled(true); settings.setDomStorageEnabled(true); settings.setGeolocationEnabled(true); settings.setSaveFormData(false); settings.setSavePassword(false); settings.setRenderPriority(WebSettings.RenderPriority.HIGH); // Flash settings settings.setPluginState(WebSettings.PluginState.ON); // Geo location settings settings.setGeolocationEnabled(true); settings.setGeolocationDatabasePath("/data/data/selendroid"); } catch (Exception e) { SelendroidLogger.log("An error occured while configuring the web view", e); } } }); } Object injectJavascript(String toExecute, boolean isAsync, Object args) throws JSONException { String executeScript = AndroidAtoms.EXECUTE_SCRIPT.getValue(); String window = "window;"; toExecute = "var win_context; try{win_context= " + window + "}catch(e){" + "win_context=window;}with(win_context){" + toExecute + "}"; String wrappedScript = "(function(){" + "var win; try{win=" + window + "}catch(e){win=window}" + "with(win){return (" + executeScript + ")(" + escapeAndQuote(toExecute) + ", ["; if (args instanceof JSONArray) { wrappedScript += convertToJsArgs((JSONArray) args); } else { wrappedScript += convertToJsArgs(args); } wrappedScript += "], true)}})()"; return executeJavascriptInWebView("alert('selendroid:'+" + wrappedScript + ")"); } void resetPageIsLoading() { pageStartedLoading = false; pageDoneLoading = false; } void setEditAreaHasFocus(boolean focused) { editAreaHasFocus = focused; } void waitForPageToLoad() { synchronized (syncObject) { long timeout = System.currentTimeMillis() + START_LOADING_TIMEOUT; while (!pageStartedLoading && (System.currentTimeMillis() < timeout)) { try { syncObject.wait(POLLING_INTERVAL); } catch (InterruptedException e) { throw new RuntimeException(); } } long end = System.currentTimeMillis() + LOADING_TIMEOUT; while (!pageDoneLoading && pageStartedLoading && (System.currentTimeMillis() < end)) { try { syncObject.wait(LOADING_TIMEOUT); } catch (InterruptedException e) { throw new RuntimeException(e); } } } } void waitUntilEditAreaHasFocus() { long timeout = System.currentTimeMillis() + FOCUS_TIMEOUT; while (!editAreaHasFocus && (System.currentTimeMillis() < timeout)) { try { Thread.sleep(POLLING_INTERVAL); } catch (InterruptedException e) { throw new RuntimeException(e); } } } public class SelendroidWebChromeClient extends WebChromeClient { /** * Unconventional way of adding a Javascript interface but the main reason why I took this way * is that it is working stable compared to the webview.addJavascriptInterface way. */ @Override public boolean onJsAlert(WebView view, String url, String message, JsResult jsResult) { if (message != null && message.startsWith("selendroid:")) { jsResult.confirm(); synchronized (syncObject) { result = message.replaceFirst("selendroid:", ""); syncObject.notify(); } return true; } else { return super.onJsAlert(view, url, message, jsResult); } } } public String getTitle() { if (webview == null) { throw new SelendroidException("No open web view."); } long end = System.currentTimeMillis() + UI_TIMEOUT; final String[] title = new String[1]; done = false; serverInstrumentation.runOnUiThread(new Runnable() { public void run() { synchronized (syncObject) { title[0] = webview.getTitle(); done = true; syncObject.notify(); } } }); waitForDone(end, UI_TIMEOUT, "Failed to get title"); return title[0]; } private void waitForDone(long end, long timeout, String error) { synchronized (syncObject) { while (!done && System.currentTimeMillis() < end) { try { syncObject.wait(timeout); } catch (InterruptedException e) { throw new SelendroidException(error, e); } } } } private void runSynchronously(Runnable r, long timeout) { synchronized (r) { serverInstrumentation.getCurrentActivity().runOnUiThread(r); try { r.wait(timeout); } catch (InterruptedException e) { e.printStackTrace(); } } } public WebView getWebview() { return webview; } public Set<Cookie> getCookies(String url) { return sm.getAllCookies(url); } public void removeAllCookie(String url) { sm.removeAllCookies(url); } public void remove(String url, String name) { sm.remove(url, name); } public void setCookies(String url, Cookie cookie) { sm.addCookie(url, cookie); } }
selendroid-server/src/main/java/io/selendroid/server/model/SelendroidWebDriver.java
/* * Copyright 2012-2013 eBay Software Foundation and selendroid committers. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.selendroid.server.model; import io.selendroid.ServerInstrumentation; import io.selendroid.android.ViewHierarchyAnalyzer; import io.selendroid.android.internal.DomWindow; import io.selendroid.exceptions.SelendroidException; import io.selendroid.exceptions.StaleElementReferenceException; import io.selendroid.server.model.js.AndroidAtoms; import io.selendroid.util.SelendroidLogger; import java.util.List; import java.util.Map; import java.util.Set; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.webkit.JsResult; import android.webkit.WebChromeClient; import android.webkit.WebSettings; import android.webkit.WebView; public class SelendroidWebDriver { private static final String ELEMENT_KEY = "ELEMENT"; private static final long FOCUS_TIMEOUT = 1000L; private static final long LOADING_TIMEOUT = 30000L; private static final long POLLING_INTERVAL = 50L; private static final long START_LOADING_TIMEOUT = 700L; static final long UI_TIMEOUT = 3000L; private volatile boolean pageDoneLoading; private volatile boolean pageStartedLoading; private volatile String result; private volatile WebView webview = null; private static final String WINDOW_KEY = "WINDOW"; private volatile boolean editAreaHasFocus; private final Object syncObject = new Object(); private boolean done = false; private ServerInstrumentation serverInstrumentation = null; private SessionCookieManager sm = new SessionCookieManager(); public SelendroidWebDriver(ServerInstrumentation serverInstrumentation, String handle) { this.serverInstrumentation = serverInstrumentation; init(handle); } private static String escapeAndQuote(final String toWrap) { StringBuilder toReturn = new StringBuilder("\""); for (int i = 0; i < toWrap.length(); i++) { char c = toWrap.charAt(i); if (c == '\"') { toReturn.append("\\\""); } else if (c == '\\') { toReturn.append("\\\\"); } else { toReturn.append(c); } } toReturn.append("\""); return toReturn.toString(); } @SuppressWarnings("unchecked") private String convertToJsArgs(JSONArray args) throws JSONException { StringBuilder toReturn = new StringBuilder(); int length = args.length(); for (int i = 0; i < length; i++) { toReturn.append((i > 0) ? "," : ""); toReturn.append(convertToJsArgs(args.get(i))); } SelendroidLogger.log("convertToJsArgs: " + toReturn.toString()); return toReturn.toString(); } private String convertToJsArgs(Object obj) { StringBuilder toReturn = new StringBuilder(); if (obj instanceof List<?>) { toReturn.append("["); List<Object> aList = (List<Object>) obj; for (int j = 0; j < aList.size(); j++) { String comma = ((j == 0) ? "" : ","); toReturn.append(comma + convertToJsArgs(aList.get(j))); } toReturn.append("]"); } else if (obj instanceof Map<?, ?>) { Map<Object, Object> aMap = (Map<Object, Object>) obj; String toAdd = "{"; for (Object key : aMap.keySet()) { toAdd += key + ":" + convertToJsArgs(aMap.get(key)) + ","; } toReturn.append(toAdd.substring(0, toAdd.length() - 1) + "}"); } else if (obj instanceof AndroidWebElement) { // A WebElement is represented in JavaScript by an Object as // follow: {"ELEMENT":"id"} where "id" refers to the id // of the HTML element in the javascript cache that can // be accessed throught bot.inject.cache.getCache_() toReturn.append("{\"" + ELEMENT_KEY + "\":\"" + ((AndroidWebElement) obj).getId() + "\"}"); } else if (obj instanceof DomWindow) { // A DomWindow is represented in JavaScript by an Object as // follow {"WINDOW":"id"} where "id" refers to the id of the // DOM window in the cache. toReturn.append("{\"" + WINDOW_KEY + "\":\"" + ((DomWindow) obj).getKey() + "\"}"); } else if (obj instanceof Number || obj instanceof Boolean) { toReturn.append(String.valueOf(obj)); } else if (obj instanceof String) { toReturn.append(escapeAndQuote((String) obj)); } SelendroidLogger.log("convertToJsArgs: " + toReturn.toString()); return toReturn.toString(); } public Object executeAtom(AndroidAtoms atom, Object... args) { JSONArray array = new JSONArray(); for (int i = 0; i < args.length; i++) { array.put(args[i]); } try { return executeAtom(atom, array); } catch (JSONException je) { je.printStackTrace(); throw new RuntimeException(je); } } public Object executeAtom(AndroidAtoms atom, JSONArray args) throws JSONException { final String myScript = atom.getValue(); String scriptInWindow = "(function(){ " + " var win; try{win=window;}catch(e){win=window;}" + "with(win){return (" + myScript + ")(" + convertToJsArgs(args) + ")}})()"; String jsResult = executeJavascriptInWebView("alert('selendroid:'+" + scriptInWindow + ")"); SelendroidLogger.log("jsResult: " + jsResult); if (jsResult == null || "undefined".equals(jsResult)) { return null; } try { JSONObject json = new JSONObject(jsResult); if (0 != json.optInt("status")) { Object value = json.get("value"); if ((value instanceof String && value.equals("Element does not exist in cache")) || (value instanceof JSONObject && ((JSONObject) value).getString("message").equals( "Element does not exist in cache"))) { throw new StaleElementReferenceException(json.optString("value")); } throw new SelendroidException(json.optString("value")); } if (json.isNull("value")) { return null; } else { return json.get("value"); } } catch (JSONException e) { throw new SelendroidException(e); } } private String executeJavascriptInWebView(final String script) { result = null; ServerInstrumentation.getInstance().runOnUiThread(new Runnable() { public void run() { if (webview.getUrl() == null) { return; } webview.loadUrl("javascript:" + script); } }); long timeout = System.currentTimeMillis() + 60000; /* * how long to wait to allow the script to * run? This could be arbitrarily high for * some users... setting extremely high for * now (1 min) */ synchronized (syncObject) { while (result == null && (System.currentTimeMillis() < timeout)) { try { syncObject.wait(2000); } catch (InterruptedException e) { throw new SelendroidException(e); } } return result; } } public Object executeScript(String script) { try { return injectJavascript(script, false, new JSONArray()); } catch (JSONException je) { je.printStackTrace(); throw new RuntimeException(je); } } public Object executeScript(String script, JSONArray args) { try { return injectJavascript(script, false, args); } catch (JSONException je) { je.printStackTrace(); throw new RuntimeException(je); } } public Object executeScript(String script, Object args) { try { return injectJavascript(script, false, args); } catch (JSONException je) { je.printStackTrace(); throw new RuntimeException(je); } } public String getCurrentUrl() { if (webview == null) { throw new SelendroidException("No open web view."); } long end = System.currentTimeMillis() + UI_TIMEOUT; final String[] url = new String[1]; done = false; Runnable r = new Runnable() { public void run() { url[0] = webview.getUrl(); synchronized (this) { this.notify(); } } }; runSynchronously(r, UI_TIMEOUT); return url[0]; } public void get(final String url) { serverInstrumentation.runOnUiThread(new Runnable() { public void run() { webview.loadUrl(url); } }); } public Object getWindowSource() throws JSONException { JSONObject source = new JSONObject( (String) executeScript("return (new XMLSerializer()).serializeToString(document.documentElement);")); return source.getString("value"); } protected void init(String handle) { System.out.println("Selendroid webdriver init"); long start = System.currentTimeMillis(); List<WebView> webviews = ViewHierarchyAnalyzer.getDefaultInstance().findWebViews(); while (webviews == null && (System.currentTimeMillis() - start <= ServerInstrumentation.getInstance() .getAndroidWait().getTimeoutInMillis())) { DefaultSelendroidDriver.sleepQuietly(500); webviews = ViewHierarchyAnalyzer.getDefaultInstance().findWebViews(); } if (handle.contains("_")) { int index = Integer.valueOf(handle.split("_")[1]); webview = webviews.get(index); } else { webview = webviews.get(0); } if (webview == null) { throw new SelendroidException("No webview found on current activity."); } configureWebView(webview); } private void configureWebView(final WebView view) { ServerInstrumentation.getInstance().runOnUiThread(new Runnable() { @Override public void run() { try { view.clearCache(true); view.clearFormData(); view.clearHistory(); view.setFocusable(true); view.setFocusableInTouchMode(true); view.setNetworkAvailable(true); view.setWebChromeClient(new MyWebChromeClient()); WebSettings settings = view.getSettings(); settings.setJavaScriptCanOpenWindowsAutomatically(true); settings.setSupportMultipleWindows(true); settings.setBuiltInZoomControls(true); settings.setJavaScriptEnabled(true); settings.setAppCacheEnabled(true); settings.setAppCacheMaxSize(10 * 1024 * 1024); settings.setAppCachePath(""); settings.setDatabaseEnabled(true); settings.setDomStorageEnabled(true); settings.setGeolocationEnabled(true); settings.setSaveFormData(false); settings.setSavePassword(false); settings.setRenderPriority(WebSettings.RenderPriority.HIGH); // Flash settings settings.setPluginState(WebSettings.PluginState.ON); // Geo location settings settings.setGeolocationEnabled(true); settings.setGeolocationDatabasePath("/data/data/selendroid"); } catch (Exception e) { SelendroidLogger.log("An error occured while configuring the web view", e); } } }); } Object injectJavascript(String toExecute, boolean isAsync, Object args) throws JSONException { String executeScript = AndroidAtoms.EXECUTE_SCRIPT.getValue(); String window = "window;"; toExecute = "var win_context; try{win_context= " + window + "}catch(e){" + "win_context=window;}with(win_context){" + toExecute + "}"; String wrappedScript = "(function(){" + "var win; try{win=" + window + "}catch(e){win=window}" + "with(win){return (" + executeScript + ")(" + escapeAndQuote(toExecute) + ", ["; if (args instanceof JSONArray) { wrappedScript += convertToJsArgs((JSONArray) args); } else { wrappedScript += convertToJsArgs(args); } wrappedScript += "], true)}})()"; return executeJavascriptInWebView("alert('selendroid:'+" + wrappedScript + ")"); } void resetPageIsLoading() { pageStartedLoading = false; pageDoneLoading = false; } void setEditAreaHasFocus(boolean focused) { editAreaHasFocus = focused; } void waitForPageToLoad() { synchronized (syncObject) { long timeout = System.currentTimeMillis() + START_LOADING_TIMEOUT; while (!pageStartedLoading && (System.currentTimeMillis() < timeout)) { try { syncObject.wait(POLLING_INTERVAL); } catch (InterruptedException e) { throw new RuntimeException(); } } long end = System.currentTimeMillis() + LOADING_TIMEOUT; while (!pageDoneLoading && pageStartedLoading && (System.currentTimeMillis() < end)) { try { syncObject.wait(LOADING_TIMEOUT); } catch (InterruptedException e) { throw new RuntimeException(e); } } } } void waitUntilEditAreaHasFocus() { long timeout = System.currentTimeMillis() + FOCUS_TIMEOUT; while (!editAreaHasFocus && (System.currentTimeMillis() < timeout)) { try { Thread.sleep(POLLING_INTERVAL); } catch (InterruptedException e) { throw new RuntimeException(e); } } } public class MyWebChromeClient extends WebChromeClient { /** * Unconventional way of adding a Javascript interface but the main reason why I took this way * is that it is working stable compared to the webview.addJavascriptInterface way. */ @Override public boolean onJsAlert(WebView view, String url, String message, JsResult jsResult) { if (message != null && message.startsWith("selendroid:")) { jsResult.confirm(); synchronized (syncObject) { result = message.replaceFirst("selendroid:", ""); syncObject.notify(); } return true; } else { return super.onJsAlert(view, url, message, jsResult); } } } public String getTitle() { if (webview == null) { throw new SelendroidException("No open web view."); } long end = System.currentTimeMillis() + UI_TIMEOUT; final String[] title = new String[1]; done = false; serverInstrumentation.runOnUiThread(new Runnable() { public void run() { synchronized (syncObject) { title[0] = webview.getTitle(); done = true; syncObject.notify(); } } }); waitForDone(end, UI_TIMEOUT, "Failed to get title"); return title[0]; } private void waitForDone(long end, long timeout, String error) { synchronized (syncObject) { while (!done && System.currentTimeMillis() < end) { try { syncObject.wait(timeout); } catch (InterruptedException e) { throw new SelendroidException(error, e); } } } } private void runSynchronously(Runnable r, long timeout) { synchronized (r) { serverInstrumentation.getCurrentActivity().runOnUiThread(r); try { r.wait(timeout); } catch (InterruptedException e) { e.printStackTrace(); } } } public WebView getWebview() { return webview; } public Set<Cookie> getCookies(String url) { return sm.getAllCookies(url); } public void removeAllCookie(String url) { sm.removeAllCookies(url); } public void remove(String url, String name) { sm.remove(url, name); } public void setCookies(String url, Cookie cookie) { sm.addCookie(url, cookie); } }
adding webChromeClient by default to the webview for the case it has been replaced.
selendroid-server/src/main/java/io/selendroid/server/model/SelendroidWebDriver.java
adding webChromeClient by default to the webview for the case it has been replaced.
Java
apache-2.0
e869ceab44894f2e8f231355aa8e016b3ac6202c
0
sitespeedio/browsertime,yesman82/browsertime,sitespeedio/browsertime,sitespeedio/browsertime,sitespeedio/browsertime,tobli/browsertime
/******************************************************************************************************************************* * It's Browser Time! * * * Copyright (C) 2013 by Tobias Lidskog (https://twitter.com/tobiaslidskog) & Peter Hedenskog (http://peterhedenskog.com) * ******************************************************************************************************************************** * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. * ******************************************************************************************************************************** */ package net.browsertime.tool.datacollector; import net.browsertime.tool.timings.TimingMark; import net.browsertime.tool.timings.TimingRun; import org.openqa.selenium.JavascriptExecutor; /** * http://msdn.microsoft.com/en-us/library/ie/ff974719(v=vs.85).aspx */ public class InternetExplorerDataCollector extends TimingDataCollector { @Override public void collectTimingData(JavascriptExecutor js, TimingRun results) { collectMarks(js, results); collectMeasurements(results); } private void collectMarks(JavascriptExecutor js, TimingRun results) { Double time = doubleFromJs(js, "return window.performance.timing.msFirstPaint"); results.addMark(new TimingMark("msFirstPaint", time)); } private void collectMeasurements(TimingRun results) { MarkInterval interval = new MarkInterval("firstPaintTime", "navigationStart", "msFirstPaint"); interval.collectMeasurement(results); } }
src/main/java/net/browsertime/tool/datacollector/InternetExplorerDataCollector.java
/******************************************************************************************************************************* * It's Browser Time! * * * Copyright (C) 2013 by Tobias Lidskog (https://twitter.com/tobiaslidskog) & Peter Hedenskog (http://peterhedenskog.com) * ******************************************************************************************************************************** * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. * ******************************************************************************************************************************** */ package net.browsertime.tool.datacollector; import net.browsertime.tool.timings.TimingMark; import net.browsertime.tool.timings.TimingRun; import org.openqa.selenium.JavascriptExecutor; /** * http://msdn.microsoft.com/en-us/library/ie/ff974719(v=vs.85).aspx */ public class InternetExplorerDataCollector extends TimingDataCollector { @Override public void collectTimingData(JavascriptExecutor js, TimingRun results) { collectMarks(js, results); collectMeasurements(results); } private void collectMarks(JavascriptExecutor js, TimingRun results) { Long time = longFromJs(js, "return window.performance.timing.msFirstPaint"); results.addMark(new TimingMark("msFirstPaint", time)); } private void collectMeasurements(TimingRun results) { MarkInterval interval = new MarkInterval("firstPaintTime", "navigationStart", "msFirstPaint"); interval.collectMeasurement(results); } }
changed type to Double for firstpaint
src/main/java/net/browsertime/tool/datacollector/InternetExplorerDataCollector.java
changed type to Double for firstpaint
Java
apache-2.0
c33789f55a5c18952aba0fe2555b27b0d2dad467
0
apache/jena,apache/jena,apache/jena,apache/jena,apache/jena,apache/jena,apache/jena,apache/jena
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. */ package org.seaborne.dboe.trans.bplustree; import static org.seaborne.dboe.sys.SystemIndex.SizeOfPointer ; import org.apache.jena.atlas.logging.Log ; import org.seaborne.dboe.DBOpEnvException ; import org.seaborne.dboe.base.file.MetaFile ; import org.seaborne.dboe.base.record.RecordFactory ; import org.seaborne.dboe.sys.Names ; import org.seaborne.dboe.sys.SystemIndex ; import org.slf4j.Logger ; /** Configuration for a B+Tree */ final public class BPlusTreeParams { // Global settings public static boolean CheckingTree = SystemIndex.Checking ; // Check on exit of B+Tree modifiying operations public static boolean CheckingNode = false ; // Check within BPTreeNode public static boolean CheckingConcurrency = SystemIndex.Checking ; // Check on exit of B+Tree modifiying operations // Metadata //public static final String NS = BPlusTreeParams.class.getName() ; public static final String NS = Names.keyNSBPlusTree ; public static final String ParamOrder = NS+".order" ; public static final String ParamKeyLength = NS+".keyLength" ; public static final String ParamValueLength = NS+".valueLength" ; public static final String ParamBlockSize = NS+".blockSize" ; public static void checkAll() { CheckingTree = true ; CheckingNode = true ; } public static boolean DumpTree = false ; // Dump the tree during top level logging public static boolean Logging = false ; // Turn on/off logging the hard way public static void infoAll() { DumpTree = true ; Logging = true ; } /* The gap is extra space in a node - some books have node size as 2*N * (often for the classic insertion algorithm where it's easier to implement * by inserting then splitting). */ private static final int Gap = 0 ; public static final int RootId = 0 ; public static final int RootParent = -2 ; public static final int NoParent = -99 ; // Used when getting a block and we don't need/want/know the parent. public static final int UnsetParent = -98 ; // Per instance settings /** Order of the BTree */ final int order ; /** Record factory */ final RecordFactory recordFactory ; /** Factory for key-only records */ final RecordFactory keyFactory ; // ---- Derived constants. /** Maximum number of keys per non-leaf block */ final int MaxRec ; /** Maximum number of pointers per block per non-leaf block */ final int MaxPtr ; /** Minimum number of keys per non-leaf block */ final int MinRec ; /** Minimum number of pointers per block */ final int MinPtr ; /** Index of the split point */ final int SplitIndex ; /** High index of keys array */ final int HighRec ; /** High index of pointers array */ final int HighPtr ; /** Space in a block needed for extra information - the count * The parent is not stored on-disk because a block is always created by fetching from it's parent. */ static int BlockHeaderSize = 4 ; static final boolean logging(Logger log) { return Logging && log.isDebugEnabled() ; } @Override public String toString() { return String.format("Order=%d : Records [key=%d, value=%d] : records=[%d,%d] : pointers=[%d,%d] : split=%d", order, keyFactory.keyLength() , recordFactory.valueLength() , MinRec, MaxRec, MinPtr, MaxPtr, SplitIndex ) ; } public static BPlusTreeParams readMeta(MetaFile mf) { try { int pOrder = mf.getPropertyAsInteger(ParamOrder) ; int pKeyLen = mf.getPropertyAsInteger(ParamKeyLength) ; int pRecLen = mf.getPropertyAsInteger(ParamValueLength) ; return new BPlusTreeParams(pOrder, pKeyLen, pRecLen) ; } catch (NumberFormatException ex) { Log.fatal(BPlusTreeParams.class, "Badly formed metadata for B+Tree") ; throw new DBOpEnvException("Failed to read metadata") ; } } public void addToMetaData(MetaFile mf) { mf.setProperty(ParamOrder, order) ; mf.setProperty(ParamKeyLength, recordFactory.keyLength()) ; mf.setProperty(ParamValueLength, recordFactory.valueLength()) ; mf.flush() ; } public BPlusTreeParams(int order, int keyLen, int valLen) { this(order, new RecordFactory(keyLen, valLen)) ; } public BPlusTreeParams(int order, RecordFactory factory) { // BTrees of order one aren't strictly BTrees // Order 1 => Min size = 0 and max size = 2*N-1 = 1. // If there is a gap, then the code may be defensive enough // and something will work. The B+Trees may have empty nodes // (i.e. no keys, single child). if ( order < 2 ) throw new IllegalArgumentException("BPTree: illegal order (min 2): "+order); this.order = order ; recordFactory = factory ; keyFactory = factory.keyFactory() ; // Derived constants. MaxRec = 2*order-1 + Gap ; MaxPtr = 2*order + Gap ; MinRec = order-1 ; MinPtr = order ; SplitIndex = order-1+Gap; HighPtr = MaxPtr - 1 ; HighRec = HighPtr-1 ; } public int getOrder() { return order ; } public int getPtrLength() { return SizeOfPointer ; } public int getRecordLength() { return recordFactory.recordLength() ; } public RecordFactory getRecordFactory(){ return recordFactory ; } public int getKeyLength() { return keyFactory.recordLength() ; } public RecordFactory getKeyFactory() { return keyFactory ; } public int getCalcBlockSize() { return calcBlockSize(order, recordFactory) ; } /** * Return the best fit for the blocksize and the record length. Knows about * block header space. */ public static int calcOrder(int blockSize, RecordFactory factory) { return calcOrder(blockSize, factory.recordLength()) ; } /** * Return the best fit for the blocksize and the record length. Knows about * block header space. */ public static int calcOrder(int blockSize, int recordLength) { // Length = X*recordLength+(X+1)*PtrLength // => X = (Length-PtrLength)/(recordLength+PtrLength) // BTree order N // MaxRec = 2N-1+Gap = X // N = (X+1-Gap)/2 blockSize -= BlockHeaderSize ; int X = (blockSize-recordLength)/(recordLength+SizeOfPointer) ; int order = (X+1-Gap)/2 ; return order ; } /** return the size of a block */ public static int calcBlockSize(int bpTreeOrder, RecordFactory factory) { BPlusTreeParams p = new BPlusTreeParams(bpTreeOrder, factory) ; int x = p.getMaxRec() * factory.recordLength() + p.getMaxPtr() * SizeOfPointer ; x += BlockHeaderSize ; return x ; } public int getMaxRec() { return MaxRec ; } public int getMaxPtr() { return MaxPtr ; } public int getMinRec() { return MinRec ; } public int getMinPtr() { return MinPtr ; } // /** return the size of a block */ // public static int calcBlockSize(int bTreeOrder, int recordLength) { // BTreeParams p = new BTreeParams(bTreeOrder, recordLength, 0) ; // int x = p.getMaxRec()*recordLength + p.getMaxPtr()*PtrLength ; // x += BlockHeaderSize ; // return x ; // } }
dboe-trans-data/src/main/java/org/seaborne/dboe/trans/bplustree/BPlusTreeParams.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. */ package org.seaborne.dboe.trans.bplustree; import static org.seaborne.dboe.sys.SystemIndex.SizeOfPointer ; import org.apache.jena.atlas.logging.Log ; import org.seaborne.dboe.DBOpEnvException ; import org.seaborne.dboe.base.file.MetaFile ; import org.seaborne.dboe.base.record.RecordFactory ; import org.seaborne.dboe.sys.Names ; import org.seaborne.dboe.sys.SystemIndex ; import org.slf4j.Logger ; /** Configuration for a B+Tree */ final public class BPlusTreeParams { // Global settings public static boolean CheckingTree = SystemIndex.Checking ; // Check on exit of B+Tree modifiying operations public static boolean CheckingNode = false ; // Check within BPTreeNode public static boolean CheckingConcurrency = SystemIndex.Checking ; // Check on exit of B+Tree modifiying operations // Metadata //public static final String NS = BPlusTreeParams.class.getName() ; public static final String NS = Names.keyNSBPlusTree ; public static final String ParamOrder = NS+".order" ; public static final String ParamKeyLength = NS+".keyLength" ; public static final String ParamValueLength = NS+".valueLength" ; public static final String ParamBlockSize = NS+".blockSize" ; public static void checkAll() { CheckingTree = true ; CheckingNode = true ; } public static boolean DumpTree = false ; // Dump the tree during top level logging public static boolean Logging = false ; // Turn on/off logging the hard way public static void infoAll() { DumpTree = true ; Logging = true ; } /* The gap is extra space in a node - some books have node size as 2*N * (often for the classic insertion algorithm where it's easier to implement * by inserting then splitting). */ private static final int Gap = 0 ; public static final int RootId = 0 ; public static final int RootParent = -2 ; public static final int NoParent = -99 ; // Per instance settings /** Order of the BTree */ final int order ; /** Record factory */ final RecordFactory recordFactory ; /** Factory for key-only records */ final RecordFactory keyFactory ; // ---- Derived constants. /** Maximum number of keys per non-leaf block */ final int MaxRec ; /** Maximum number of pointers per block per non-leaf block */ final int MaxPtr ; /** Minimum number of keys per non-leaf block */ final int MinRec ; /** Minimum number of pointers per block */ final int MinPtr ; /** Index of the split point */ final int SplitIndex ; /** High index of keys array */ final int HighRec ; /** High index of pointers array */ final int HighPtr ; /** Space in a block needed for extra information - the count * The parent is not stored on-disk because a block is always created by fetching from it's parent. */ static int BlockHeaderSize = 4 ; static final boolean logging(Logger log) { return Logging && log.isDebugEnabled() ; } @Override public String toString() { return String.format("Order=%d : Records [key=%d, value=%d] : records=[%d,%d] : pointers=[%d,%d] : split=%d", order, keyFactory.keyLength() , recordFactory.valueLength() , MinRec, MaxRec, MinPtr, MaxPtr, SplitIndex ) ; } public static BPlusTreeParams readMeta(MetaFile mf) { try { int pOrder = mf.getPropertyAsInteger(ParamOrder) ; int pKeyLen = mf.getPropertyAsInteger(ParamKeyLength) ; int pRecLen = mf.getPropertyAsInteger(ParamValueLength) ; return new BPlusTreeParams(pOrder, pKeyLen, pRecLen) ; } catch (NumberFormatException ex) { Log.fatal(BPlusTreeParams.class, "Badly formed metadata for B+Tree") ; throw new DBOpEnvException("Failed to read metadata") ; } } public void addToMetaData(MetaFile mf) { mf.setProperty(ParamOrder, order) ; mf.setProperty(ParamKeyLength, recordFactory.keyLength()) ; mf.setProperty(ParamValueLength, recordFactory.valueLength()) ; mf.flush() ; } public BPlusTreeParams(int order, int keyLen, int valLen) { this(order, new RecordFactory(keyLen, valLen)) ; } public BPlusTreeParams(int order, RecordFactory factory) { // BTrees of order one aren't strictly BTrees // Order 1 => Min size = 0 and max size = 2*N-1 = 1. // If there is a gap, then the code may be defensive enough // and something will work. The B+Trees may have empty nodes // (i.e. no keys, single child). if ( order < 2 ) throw new IllegalArgumentException("BPTree: illegal order (min 2): "+order); this.order = order ; recordFactory = factory ; keyFactory = factory.keyFactory() ; // Derived constants. MaxRec = 2*order-1 + Gap ; MaxPtr = 2*order + Gap ; MinRec = order-1 ; MinPtr = order ; SplitIndex = order-1+Gap; HighPtr = MaxPtr - 1 ; HighRec = HighPtr-1 ; } public int getOrder() { return order ; } public int getPtrLength() { return SizeOfPointer ; } public int getRecordLength() { return recordFactory.recordLength() ; } public RecordFactory getRecordFactory(){ return recordFactory ; } public int getKeyLength() { return keyFactory.recordLength() ; } public RecordFactory getKeyFactory() { return keyFactory ; } public int getCalcBlockSize() { return calcBlockSize(order, recordFactory) ; } /** * Return the best fit for the blocksize and the record length. Knows about * block header space. */ public static int calcOrder(int blockSize, RecordFactory factory) { return calcOrder(blockSize, factory.recordLength()) ; } /** * Return the best fit for the blocksize and the record length. Knows about * block header space. */ public static int calcOrder(int blockSize, int recordLength) { // Length = X*recordLength+(X+1)*PtrLength // => X = (Length-PtrLength)/(recordLength+PtrLength) // BTree order N // MaxRec = 2N-1+Gap = X // N = (X+1-Gap)/2 blockSize -= BlockHeaderSize ; int X = (blockSize-recordLength)/(recordLength+SizeOfPointer) ; int order = (X+1-Gap)/2 ; return order ; } /** return the size of a block */ public static int calcBlockSize(int bpTreeOrder, RecordFactory factory) { BPlusTreeParams p = new BPlusTreeParams(bpTreeOrder, factory) ; int x = p.getMaxRec() * factory.recordLength() + p.getMaxPtr() * SizeOfPointer ; x += BlockHeaderSize ; return x ; } public int getMaxRec() { return MaxRec ; } public int getMaxPtr() { return MaxPtr ; } public int getMinRec() { return MinRec ; } public int getMinPtr() { return MinPtr ; } // /** return the size of a block */ // public static int calcBlockSize(int bTreeOrder, int recordLength) { // BTreeParams p = new BTreeParams(bTreeOrder, recordLength, 0) ; // int x = p.getMaxRec()*recordLength + p.getMaxPtr()*PtrLength ; // x += BlockHeaderSize ; // return x ; // } }
Value for "unset" parent.
dboe-trans-data/src/main/java/org/seaborne/dboe/trans/bplustree/BPlusTreeParams.java
Value for "unset" parent.
Java
apache-2.0
56ff7738a696ce5a7527aa3cd938287fcfe9b5d8
0
jnidzwetzki/bboxdb,jnidzwetzki/bboxdb,jnidzwetzki/scalephant,jnidzwetzki/bboxdb,jnidzwetzki/scalephant
package de.fernunihagen.dna.scalephant.distribution.membership; import java.net.InetSocketAddress; import de.fernunihagen.dna.scalephant.distribution.membership.event.DistributedInstanceState; public class DistributedInstance implements Comparable<DistributedInstance> { public final static String UNKOWN_VERSION = "unknown"; /** * The IP address of the instance */ protected final String ip; /** * The port of the instance */ protected final int port; /** * The version number of the instance */ protected String version = UNKOWN_VERSION; /** * The state of the instance */ protected DistributedInstanceState state = DistributedInstanceState.UNKNOWN; public DistributedInstance(final String connectionString, final String version, final DistributedInstanceState state) { this(connectionString); this.version = version; this.state = state; } public DistributedInstance(final String connectionString) { final String[] parts = connectionString.split(":"); if(parts.length != 2) { throw new IllegalArgumentException("Unable to parse:" + connectionString); } try { final Integer portInterger = Integer.parseInt(parts[1]); ip = parts[0]; port = portInterger; } catch(NumberFormatException e) { throw new IllegalArgumentException("Unable to parse: " + parts[1], e); } } public DistributedInstance(final String localIp, final Integer localPort, final String version) { this.ip = localIp; this.port = localPort; this.version = version; } public String getIp() { return ip; } public int getPort() { return port; } public String getVersion() { return version; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((ip == null) ? 0 : ip.hashCode()); result = prime * result + port; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; DistributedInstance other = (DistributedInstance) obj; if (ip == null) { if (other.ip != null) return false; } else if (!ip.equals(other.ip)) return false; if (port != other.port) return false; return true; } /** * An alternative equals method that checks version and state information too * @param obj * @return */ public boolean fullEquals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; DistributedInstance other = (DistributedInstance) obj; if (ip == null) { if (other.ip != null) return false; } else if (!ip.equals(other.ip)) return false; if (port != other.port) return false; if (state != other.state) return false; if (version == null) { if (other.version != null) return false; } else if (!version.equals(other.version)) return false; return true; } public String toGUIString() { if(version == UNKOWN_VERSION) { return "DistributedInstance [ip=" + ip + ", port=" + port + ", state=" + state + "]"; } else { return "DistributedInstance [ip=" + ip + ", port=" + port + ", state=" + state + ", version=" + version + "]"; } } @Override public String toString() { return "DistributedInstance [ip=" + ip + ", port=" + port + ", version=" + version + ", state=" + state + "]"; } /** * Get the inet socket address from the instance * @return */ public InetSocketAddress getInetSocketAddress() { return new InetSocketAddress(ip, port); } /** * Convert the data back into a string * @return */ public String getStringValue() { return ip + ":" + port; } @Override public int compareTo(final DistributedInstance otherInstance) { return getStringValue().compareTo(otherInstance.getStringValue()); } /** * Get the state of the instance * @return */ public DistributedInstanceState getState() { return state; } /** * Set the state of the instance * @param state */ public void setState(final DistributedInstanceState state) { this.state = state; } }
src/main/java/de/fernunihagen/dna/scalephant/distribution/membership/DistributedInstance.java
package de.fernunihagen.dna.scalephant.distribution.membership; import java.net.InetSocketAddress; import de.fernunihagen.dna.scalephant.distribution.membership.event.DistributedInstanceState; public class DistributedInstance implements Comparable<DistributedInstance> { public final static String UNKOWN_VERSION = "unknown"; /** * The IP address of the instance */ protected final String ip; /** * The port of the instance */ protected final int port; /** * The version number of the instance */ protected String version = UNKOWN_VERSION; /** * The state of the instance */ protected DistributedInstanceState state = DistributedInstanceState.UNKNOWN; public DistributedInstance(final String connectionString, final String version) { this(connectionString); this.version = version; } public DistributedInstance(final String connectionString) { final String[] parts = connectionString.split(":"); if(parts.length != 2) { throw new IllegalArgumentException("Unable to parse:" + connectionString); } try { final Integer portInterger = Integer.parseInt(parts[1]); ip = parts[0]; port = portInterger; } catch(NumberFormatException e) { throw new IllegalArgumentException("Unable to parse: " + parts[1], e); } } public DistributedInstance(final String localIp, final Integer localPort, final String version) { this.ip = localIp; this.port = localPort; this.version = version; } public String getIp() { return ip; } public int getPort() { return port; } public String getVersion() { return version; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((ip == null) ? 0 : ip.hashCode()); result = prime * result + port; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; DistributedInstance other = (DistributedInstance) obj; if (ip == null) { if (other.ip != null) return false; } else if (!ip.equals(other.ip)) return false; if (port != other.port) return false; return true; } public String toGUIString() { if(version == UNKOWN_VERSION) { return "DistributedInstance [ip=" + ip + ", port=" + port + ", state=" + state + "]"; } else { return "DistributedInstance [ip=" + ip + ", port=" + port + ", state=" + state + ", version=" + version + "]"; } } @Override public String toString() { return "DistributedInstance [ip=" + ip + ", port=" + port + ", version=" + version + ", state=" + state + "]"; } /** * Get the inet socket address from the instance * @return */ public InetSocketAddress getInetSocketAddress() { return new InetSocketAddress(ip, port); } /** * Convert the data back into a string * @return */ public String getStringValue() { return ip + ":" + port; } @Override public int compareTo(final DistributedInstance otherInstance) { return getStringValue().compareTo(otherInstance.getStringValue()); } /** * Get the state of the instance * @return */ public DistributedInstanceState getState() { return state; } /** * Set the state of the instance * @param state */ public void setState(final DistributedInstanceState state) { this.state = state; } }
Implemented full equals
src/main/java/de/fernunihagen/dna/scalephant/distribution/membership/DistributedInstance.java
Implemented full equals
Java
apache-2.0
27c06540d16ad4249d4a0c39ea9a2689ef249fe8
0
bebo/jitsi,martin7890/jitsi,HelioGuilherme66/jitsi,ringdna/jitsi,marclaporte/jitsi,gpolitis/jitsi,level7systems/jitsi,martin7890/jitsi,mckayclarey/jitsi,mckayclarey/jitsi,bebo/jitsi,ringdna/jitsi,martin7890/jitsi,procandi/jitsi,marclaporte/jitsi,jibaro/jitsi,459below/jitsi,dkcreinoso/jitsi,HelioGuilherme66/jitsi,tuijldert/jitsi,damencho/jitsi,459below/jitsi,laborautonomo/jitsi,jibaro/jitsi,damencho/jitsi,mckayclarey/jitsi,bhatvv/jitsi,tuijldert/jitsi,cobratbq/jitsi,dkcreinoso/jitsi,procandi/jitsi,damencho/jitsi,ibauersachs/jitsi,iant-gmbh/jitsi,martin7890/jitsi,gpolitis/jitsi,damencho/jitsi,level7systems/jitsi,pplatek/jitsi,level7systems/jitsi,ibauersachs/jitsi,459below/jitsi,gpolitis/jitsi,gpolitis/jitsi,tuijldert/jitsi,ibauersachs/jitsi,ringdna/jitsi,pplatek/jitsi,procandi/jitsi,bebo/jitsi,marclaporte/jitsi,level7systems/jitsi,bhatvv/jitsi,jitsi/jitsi,bhatvv/jitsi,laborautonomo/jitsi,cobratbq/jitsi,jitsi/jitsi,dkcreinoso/jitsi,bebo/jitsi,ringdna/jitsi,cobratbq/jitsi,HelioGuilherme66/jitsi,damencho/jitsi,laborautonomo/jitsi,iant-gmbh/jitsi,iant-gmbh/jitsi,level7systems/jitsi,laborautonomo/jitsi,mckayclarey/jitsi,dkcreinoso/jitsi,gpolitis/jitsi,ibauersachs/jitsi,tuijldert/jitsi,ringdna/jitsi,mckayclarey/jitsi,HelioGuilherme66/jitsi,bhatvv/jitsi,jitsi/jitsi,iant-gmbh/jitsi,ibauersachs/jitsi,bebo/jitsi,HelioGuilherme66/jitsi,laborautonomo/jitsi,pplatek/jitsi,jitsi/jitsi,procandi/jitsi,martin7890/jitsi,iant-gmbh/jitsi,459below/jitsi,marclaporte/jitsi,bhatvv/jitsi,marclaporte/jitsi,procandi/jitsi,459below/jitsi,jitsi/jitsi,cobratbq/jitsi,dkcreinoso/jitsi,tuijldert/jitsi,pplatek/jitsi,cobratbq/jitsi,pplatek/jitsi,jibaro/jitsi,jibaro/jitsi,jibaro/jitsi
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Distributable under LGPL license. * See terms of license at gnu.org. */ package net.java.sip.communicator.impl.protocol.jabber.extensions.colibri; import java.util.*; import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.*; import org.jitsi.service.neomedia.*; import org.jivesoftware.smack.packet.*; /** * Implements the Jitsi Videobridge <tt>conference</tt> IQ within the * COnferencing with LIghtweight BRIdging. * * @author Lyubomir Marinov * @author Boris Grozev * @author George Politis */ public class ColibriConferenceIQ extends IQ { /** * The XML element name of the Jitsi Videobridge <tt>conference</tt> IQ. */ public static final String ELEMENT_NAME = "conference"; /** * The XML name of the <tt>id</tt> attribute of the Jitsi Videobridge * <tt>conference</tt> IQ which represents the value of the <tt>id</tt> * property of <tt>ColibriConferenceIQ</tt>. */ public static final String ID_ATTR_NAME = "id"; /** * The XML COnferencing with LIghtweight BRIdging namespace of the Jitsi * Videobridge <tt>conference</tt> IQ. */ public static final String NAMESPACE = "http://jitsi.org/protocol/colibri"; /** * An array of <tt>int</tt>s which represents the lack of any (RTP) SSRCs * seen/received on a <tt>Channel</tt>. Explicitly defined to reduce * unnecessary allocations. */ public static final int[] NO_SSRCS = new int[0]; /** * The list of {@link ChannelBundle}s included into this <tt>conference</tt> * IQ. */ private final List<ChannelBundle> channelBundles = new LinkedList<ChannelBundle>(); /** * The list of {@link Content}s included into this <tt>conference</tt> IQ. */ private final List<Content> contents = new LinkedList<Content>(); /** * The list of <tt>Endpoint</tt>s included into this <tt>conference</tt> IQ. */ private final List<Endpoint> endpoints = new LinkedList<Endpoint>(); /** * The ID of the conference represented by this IQ. */ private String id; /** * Media recording. */ private Recording recording; private RTCPTerminationStrategy rtcpTerminationStrategy; /** Initializes a new <tt>ColibriConferenceIQ</tt> instance. */ public ColibriConferenceIQ() { } /** * Adds a specific {@link Content} instance to the list of <tt>Content</tt> * instances included into this <tt>conference</tt> IQ. * @param the <tt>ChannelBundle</tt> to add. */ public boolean addChannelBundle(ChannelBundle channelBundle) { if (channelBundle == null) throw new NullPointerException("channelBundle"); return channelBundles.contains(channelBundles) ? false : channelBundles.add(channelBundle); } /** * Adds a specific {@link Content} instance to the list of <tt>Content</tt> * instances included into this <tt>conference</tt> IQ. * * @param content the <tt>Content</tt> instance to be added to this list of * <tt>Content</tt> instances included into this <tt>conference</tt> IQ * @return <tt>true</tt> if the list of <tt>Content</tt> instances included * into this <tt>conference</tt> IQ has been modified as a result of the * method call; otherwise, <tt>false</tt> * @throws NullPointerException if the specified <tt>content</tt> is * <tt>null</tt> */ public boolean addContent(Content content) { if (content == null) throw new NullPointerException("content"); return contents.contains(content) ? false : contents.add(content); } /** * Initializes a new {@link Content} instance with a specific name and adds * it to the list of <tt>Content</tt> instances included into this * <tt>conference</tt> IQ. * * @param contentName the name which which the new <tt>Content</tt> instance * is to be initialized * @return <tt>true</tt> if the list of <tt>Content</tt> instances included * into this <tt>conference</tt> IQ has been modified as a result of the * method call; otherwise, <tt>false</tt> */ public boolean addContent(String contentName) { return addContent(new Content(contentName)); } /** * Add an <tt>Endpoint</tt> to this <tt>ColibriConferenceIQ</tt>. * @param endpoint the <tt>Endpoint</tt> to add. */ public void addEndpoint(Endpoint endpoint) { endpoints.add(endpoint); } /** * Returns a list of the <tt>ChannelBundle</tt>s included into this * <tt>conference</tt> IQ. * * @return an unmodifiable <tt>List</tt> of the <tt>ChannelBundle</tt>s * included into this <tt>conference</tt> IQ. */ public List<ChannelBundle> getChannelBundles() { return Collections.unmodifiableList(channelBundles); } /** * Returns an XML <tt>String</tt> representation of this <tt>IQ</tt>. * * @return an XML <tt>String</tt> representation of this <tt>IQ</tt> */ @Override public String getChildElementXML() { StringBuilder xml = new StringBuilder(); xml.append('<').append(ELEMENT_NAME); xml.append(" xmlns='").append(NAMESPACE).append('\''); String id = getID(); if (id != null) xml.append(' ').append(ID_ATTR_NAME).append("='").append(id) .append('\''); List<Content> contents = getContents(); List<ChannelBundle> channelBundles = getChannelBundles(); boolean hasChildren = (recording != null) || (rtcpTerminationStrategy != null) || (contents.size() > 0) || (channelBundles.size() > 0); if (!hasChildren) { xml.append(" />"); } else { xml.append('>'); for (Content content : contents) content.toXML(xml); for (ChannelBundle channelBundle : channelBundles) channelBundle.toXML(xml); if (recording != null) recording.toXML(xml); if (rtcpTerminationStrategy != null) rtcpTerminationStrategy.toXML(xml); xml.append("</").append(ELEMENT_NAME).append('>'); } return xml.toString(); } /** * Returns a <tt>Content</tt> from the list of <tt>Content</tt>s of this * <tt>conference</tt> IQ which has a specific name. If no such * <tt>Content</tt> exists, returns <tt>null</tt>. * * @param contentName the name of the <tt>Content</tt> to be returned * @return a <tt>Content</tt> from the list of <tt>Content</tt>s of this * <tt>conference</tt> IQ which has the specified <tt>contentName</tt> if * such a <tt>Content</tt> exists; otherwise, <tt>null</tt> */ public Content getContent(String contentName) { for (Content content : getContents()) { if (contentName.equals(content.getName())) return content; } return null; } /** * Returns a list of the <tt>Content</tt>s included into this * <tt>conference</tt> IQ. * * @return an unmodifiable <tt>List</tt> of the <tt>Content</tt>s included * into this <tt>conference</tt> IQ */ public List<Content> getContents() { return Collections.unmodifiableList(contents); } /** * Returns the list of <tt>Endpoint</tt>s included in this * <tt>ColibriConferenceIQ</tt>. * @return the list of <tt>Endpoint</tt>s included in this * <tt>ColibriConferenceIQ</tt>. */ public List<Endpoint> getEndpoints() { return Collections.unmodifiableList(endpoints); } /** * Gets the ID of the conference represented by this IQ. * * @return the ID of the conference represented by this IQ */ public String getID() { return id; } /** * Returns a <tt>Content</tt> from the list of <tt>Content</tt>s of this * <tt>conference</tt> IQ which has a specific name. If no such * <tt>Content</tt> exists at the time of the invocation of the method, * initializes a new <tt>Content</tt> instance with the specified * <tt>contentName</tt> and includes it into this <tt>conference</tt> IQ. * * @param contentName the name of the <tt>Content</tt> to be returned * @return a <tt>Content</tt> from the list of <tt>Content</tt>s of this * <tt>conference</tt> IQ which has the specified <tt>contentName</tt> */ public Content getOrCreateContent(String contentName) { Content content = getContent(contentName); if (content == null) { content = new Content(contentName); addContent(content); } return content; } /** * Gets the value of the recording field. * @return the value of the recording field. */ public Recording getRecording() { return recording; } public RTCPTerminationStrategy getRTCPTerminationStrategy() { return rtcpTerminationStrategy; } /** * Removes a specific {@link Content} instance from the list of * <tt>Content</tt> instances included into this <tt>conference</tt> IQ. * * @param content the <tt>Content</tt> instance to be removed from the list * of <tt>Content</tt> instances included into this <tt>conference</tt> IQ * @return <tt>true</tt> if the list of <tt>Content</tt> instances included * into this <tt>conference</tt> IQ has been modified as a result of the * method call; otherwise, <tt>false</tt> */ public boolean removeContent(Content content) { return contents.remove(content); } /** * Sets the ID of the conference represented by this IQ. * * @param id the ID of the conference represented by this IQ */ public void setID(String id) { this.id = id; } /** * Sets the recording field. * @param recording the value to set. */ public void setRecording(Recording recording) { this.recording = recording; } public void setRTCPTerminationStrategy( RTCPTerminationStrategy rtcpTerminationStrategy) { this.rtcpTerminationStrategy = rtcpTerminationStrategy; } /** * Represents a <tt>channel</tt> included into a <tt>content</tt> of a Jitsi * Videobridge <tt>conference</tt> IQ. */ public static class Channel extends ChannelCommon { /** * The name of the XML attribute of a <tt>channel</tt> which represents * its direction. */ public static final String DIRECTION_ATTR_NAME = "direction"; /** * The XML element name of a <tt>channel</tt> of a <tt>content</tt> of a * Jitsi Videobridge <tt>conference</tt> IQ. */ public static final String ELEMENT_NAME = "channel"; /** * The XML name of the <tt>host</tt> attribute of a <tt>channel</tt> of * a <tt>content</tt> of a <tt>conference</tt> IQ which represents the * value of the <tt>host</tt> property of * <tt>ColibriConferenceIQ.Channel</tt>. * * @deprecated The attribute is supported for the purposes of * compatibility with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public static final String HOST_ATTR_NAME = "host"; /** * The XML name of the <tt>id</tt> attribute of a <tt>channel</tt> of a * <tt>content</tt> of a <tt>conference</tt> IQ which represents the * value of the <tt>id</tt> property of * <tt>ColibriConferenceIQ.Channel</tt>. */ public static final String ID_ATTR_NAME = "id"; /** * The XML name of the <tt>last-n</tt> attribute of a video * <tt>channel</tt> which specifies the maximum number of video RTP * streams to be sent from Jitsi Videobridge to the endpoint associated * with the video <tt>channel</tt>. The value of the <tt>last-n</tt> * attribute is a positive number. */ public static final String LAST_N_ATTR_NAME = "last-n"; /** * The XML name of the <tt>receive-simulcast-layer</tt> attribute of a * video <tt>Channel</tt> which specifies the target quality of the * simulcast substreams to be sent from Jitsi Videobridge to the * endpoint associated with the video <tt>Channel</tt>. The value of the * <tt>receive-simulcast-layer</tt> attribute is an unsigned integer. * Typically used for debugging purposes. */ public static final String RECEIVING_SIMULCAST_LAYER = "receive-simulcast-layer"; /** * The XML name of the <tt>rtcpport</tt> attribute of a <tt>channel</tt> * of a <tt>content</tt> of a <tt>conference</tt> IQ which represents * the value of the <tt>rtcpPort</tt> property of * <tt>ColibriConferenceIQ.Channel</tt>. * * @deprecated The attribute is supported for the purposes of * compatibility with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public static final String RTCP_PORT_ATTR_NAME = "rtcpport"; public static final String RTP_LEVEL_RELAY_TYPE_ATTR_NAME = "rtp-level-relay-type"; /** * The XML name of the <tt>rtpport</tt> attribute of a <tt>channel</tt> * of a <tt>content</tt> of a <tt>conference</tt> IQ which represents * the value of the <tt>rtpPort</tt> property of * <tt>ColibriConferenceIQ.Channel</tt>. * * @deprecated The attribute is supported for the purposes of * compatibility with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public static final String RTP_PORT_ATTR_NAME = "rtpport"; /** * The name of the XML element which is a child of the &lt;channel&gt; * element and which identifies/specifies an (RTP) SSRC which has been * seen/received on the respective <tt>Channel</tt>. */ public static final String SSRC_ELEMENT_NAME = "ssrc"; /** * The direction of the <tt>channel</tt> represented by this instance. */ private MediaDirection direction; /** * The host of the <tt>channel</tt> represented by this instance. * * @deprecated The field is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated private String host; /** * The ID of the <tt>channel</tt> represented by this instance. */ private String id; /** * The maximum number of video RTP streams to be sent from Jitsi * Videobridge to the endpoint associated with this video * <tt>Channel</tt>. */ private Integer lastN; /** * The <tt>payload-type</tt> elements defined by XEP-0167: Jingle RTP * Sessions associated with this <tt>channel</tt>. */ private final List<PayloadTypePacketExtension> payloadTypes = new ArrayList<PayloadTypePacketExtension>(); /** * The target quality of the simulcast substreams to be sent from Jitsi * Videobridge to the endpoint associated with this video * <tt>Channel</tt>. */ private Integer receivingSimulcastLayer; /** * The RTCP port of the <tt>channel</tt> represented by this instance. * * @deprecated The field is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated private int rtcpPort; /** * The type of RTP-level relay (in the terms specified by RFC 3550 * &quot;RTP: A Transport Protocol for Real-Time Applications&quot; in * section 2.3 &quot;Mixers and Translators&quot;) used for this * <tt>Channel</tt>. */ private RTPLevelRelayType rtpLevelRelayType; /** * The RTP port of the <tt>channel</tt> represented by this instance. * * @deprecated The field is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated private int rtpPort; /** * The <tt>SourceGroupPacketExtension</tt>s of this channel. */ private List<SourceGroupPacketExtension> sourceGroups; /** * The <tt>SourcePacketExtension</tt>s of this channel. */ private final List<SourcePacketExtension> sources = new LinkedList<SourcePacketExtension>(); /** * The list of (RTP) SSRCs which have been seen/received on this * <tt>Channel</tt> by now. These may exclude SSRCs which are no longer * active. Set by the Jitsi Videobridge server, not its clients. */ private int[] ssrcs = NO_SSRCS; /** Initializes a new <tt>Channel</tt> instance. */ public Channel() { super(Channel.ELEMENT_NAME); } /** * Adds a <tt>payload-type</tt> element defined by XEP-0167: Jingle RTP * Sessions to this <tt>channel</tt>. * * @param payloadType the <tt>payload-type</tt> element to be added to * this <tt>channel</tt> * @return <tt>true</tt> if the list of <tt>payload-type</tt> elements * associated with this <tt>channel</tt> has been modified as part of * the method call; otherwise, <tt>false</tt> * @throws NullPointerException if the specified <tt>payloadType</tt> is * <tt>null</tt> */ public boolean addPayloadType(PayloadTypePacketExtension payloadType) { if (payloadType == null) throw new NullPointerException("payloadType"); // Make sure that the COLIBRI namespace is used. payloadType.setNamespace(null); for (ParameterPacketExtension p : payloadType.getParameters()) p.setNamespace(null); return payloadTypes.contains(payloadType) ? false : payloadTypes.add(payloadType); } /** * Adds a <tt>SourcePacketExtension</tt> to the list of sources of this * channel. * * @param source the <tt>SourcePacketExtension</tt> to add to the list * of sources of this channel * @return <tt>true</tt> if the list of sources of this channel changed * as a result of the execution of the method; otherwise, <tt>false</tt> */ public synchronized boolean addSource(SourcePacketExtension source) { if (source == null) throw new NullPointerException("source"); return sources.contains(source) ? false : sources.add(source); } /** * Adds a <tt>SourceGroupPacketExtension</tt> to the list of source * groups of this channel. * * @param sourceGroup the <tt>SourcePacketExtension</tt> to add to the * list of sources of this channel * * @return <tt>true</tt> if the list of sources of this channel changed * as a result of the execution of the method; otherwise, <tt>false</tt> */ public synchronized boolean addSourceGroup( SourceGroupPacketExtension sourceGroup) { if (sourceGroup == null) throw new NullPointerException("sourceGroup"); if (sourceGroups == null) sourceGroups = new LinkedList<SourceGroupPacketExtension>(); return sourceGroups.contains(sourceGroup) ? false : sourceGroups.add(sourceGroup); } /** * Adds a specific (RTP) SSRC to the list of SSRCs seen/received on this * <tt>Channel</tt>. Invoked by the Jitsi Videobridge server, not its * clients. * * @param ssrc the (RTP) SSRC to be added to the list of SSRCs * seen/received on this <tt>Channel</tt> * @return <tt>true</tt> if the list of SSRCs seen/received on this * <tt>Channel</tt> has been modified as part of the method call; * otherwise, <tt>false</tt> */ public synchronized boolean addSSRC(int ssrc) { // contains for (int i = 0; i < ssrcs.length; i++) { if (ssrcs[i] == ssrc) return false; } // add int[] newSSRCs = new int[ssrcs.length + 1]; System.arraycopy(ssrcs, 0, newSSRCs, 0, ssrcs.length); newSSRCs[ssrcs.length] = ssrc; ssrcs = newSSRCs; return true; } /** * Gets the <tt>direction</tt> of this <tt>Channel</tt>. * * @return the <tt>direction</tt> of this <tt>Channel</tt>. */ public MediaDirection getDirection() { return (direction == null) ? MediaDirection.SENDRECV : direction; } /** * Gets the IP address (as a <tt>String</tt> value) of the host on which * the <tt>channel</tt> represented by this instance has been allocated. * * @return a <tt>String</tt> value which represents the IP address of * the host on which the <tt>channel</tt> represented by this instance * has been allocated * * @deprecated The method is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public String getHost() { return host; } /** * Gets the ID of the <tt>channel</tt> represented by this instance. * * @return the ID of the <tt>channel</tt> represented by this instance */ public String getID() { return id; } /** * Gets the maximum number of video RTP streams to be sent from Jitsi * Videobridge to the endpoint associated with this video * <tt>Channel</tt>. * * @return the maximum number of video RTP streams to be sent from Jitsi * Videobridge to the endpoint associated with this video * <tt>Channel</tt> */ public Integer getLastN() { return lastN; } /** * Gets a list of <tt>payload-type</tt> elements defined by XEP-0167: * Jingle RTP Sessions added to this <tt>channel</tt>. * * @return an unmodifiable <tt>List</tt> of <tt>payload-type</tt> * elements defined by XEP-0167: Jingle RTP Sessions added to this * <tt>channel</tt> */ public List<PayloadTypePacketExtension> getPayloadTypes() { return Collections.unmodifiableList(payloadTypes); } /** * Gets the target quality of the simulcast substreams to be sent from * Jitsi Videobridge to the endpoint associated with this video * <tt>Channel</tt>. * * @return the target quality of the simulcast substreams to be sent * from Jitsi Videobridge to the endpoint associated with this video * <tt>Channel</tt>. */ public Integer getReceivingSimulcastLayer() { return receivingSimulcastLayer; } /** * Gets the port which has been allocated to this <tt>channel</tt> for * the purposes of transmitting RTCP packets. * * @return the port which has been allocated to this <tt>channel</tt> * for the purposes of transmitting RTCP packets * * @deprecated The method is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public int getRTCPPort() { return rtcpPort; } /** * Gets the type of RTP-level relay (in the terms specified by RFC 3550 * &quot;RTP: A Transport Protocol for Real-Time Applications&quot; in * section 2.3 &quot;Mixers and Translators&quot;) used for this * <tt>Channel</tt>. * * @return the type of RTP-level relay used for this <tt>Channel</tt> */ public RTPLevelRelayType getRTPLevelRelayType() { return rtpLevelRelayType; } /** * Gets the port which has been allocated to this <tt>channel</tt> for * the purposes of transmitting RTP packets. * * @return the port which has been allocated to this <tt>channel</tt> * for the purposes of transmitting RTP packets * * @deprecated The method is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public int getRTPPort() { return rtpPort; } /** * Gets the list of <tt>SourceGroupPacketExtensions</tt>s which * represent the source groups of this channel. * * @return a <tt>List</tt> of <tt>SourceGroupPacketExtension</tt>s which * represent the source groups of this channel */ public synchronized List<SourceGroupPacketExtension> getSourceGroups() { return (sourceGroups == null) ? null : new ArrayList<SourceGroupPacketExtension>(sourceGroups); } /** * Gets the list of <tt>SourcePacketExtensions</tt>s which represent the * sources of this channel. * * @return a <tt>List</tt> of <tt>SourcePacketExtension</tt>s which * represent the sources of this channel */ public synchronized List<SourcePacketExtension> getSources() { return new ArrayList<SourcePacketExtension>(sources); } /** * Gets (a copy of) the list of (RTP) SSRCs seen/received on this * <tt>Channel</tt>. * * @return an array of <tt>int</tt>s which represents (a copy of) the * list of (RTP) SSRCs seen/received on this <tt>Channel</tt> */ public synchronized int[] getSSRCs() { return (ssrcs.length == 0) ? NO_SSRCS : ssrcs.clone(); } @Override protected boolean hasContent() { List<PayloadTypePacketExtension> payloadTypes = getPayloadTypes(); if (!payloadTypes.isEmpty()) return true; List<SourcePacketExtension> sources = getSources(); if (!sources.isEmpty()) return true; int[] ssrcs = getSSRCs(); return (ssrcs.length != 0); } @Override protected void printAttributes(StringBuilder xml) { // direction MediaDirection direction = getDirection(); if ((direction != null) && (direction != MediaDirection.SENDRECV)) { xml.append(' ').append(DIRECTION_ATTR_NAME).append("='") .append(direction.toString()).append('\''); } // host String host = getHost(); if (host != null) { xml.append(' ').append(HOST_ATTR_NAME).append("='").append(host) .append('\''); } // id String id = getID(); if (id != null) { xml.append(' ').append(ID_ATTR_NAME).append("='").append(id) .append('\''); } // lastN Integer lastN = getLastN(); if (lastN != null) { xml.append(' ').append(LAST_N_ATTR_NAME).append("='") .append(lastN).append('\''); } // rtcpPort int rtcpPort = getRTCPPort(); if (rtcpPort > 0) { xml.append(' ').append(RTCP_PORT_ATTR_NAME).append("='") .append(rtcpPort).append('\''); } // rtpLevelRelayType RTPLevelRelayType rtpLevelRelayType = getRTPLevelRelayType(); if (rtpLevelRelayType != null) { xml.append(' ').append(RTP_LEVEL_RELAY_TYPE_ATTR_NAME) .append("='").append(rtpLevelRelayType).append('\''); } // rtpPort int rtpPort = getRTPPort(); if (rtpPort > 0) { xml.append(' ').append(RTP_PORT_ATTR_NAME).append("='") .append(rtpPort).append('\''); } } @Override protected void printContent(StringBuilder xml) { List<PayloadTypePacketExtension> payloadTypes = getPayloadTypes(); List<SourcePacketExtension> sources = getSources(); List<SourceGroupPacketExtension> souceGroups = getSourceGroups(); int[] ssrcs = getSSRCs(); for (PayloadTypePacketExtension payloadType : payloadTypes) xml.append(payloadType.toXML()); for (SourcePacketExtension source : sources) xml.append(source.toXML()); if (souceGroups != null && souceGroups.size() != 0) for (SourceGroupPacketExtension sourceGroup : souceGroups) xml.append(sourceGroup.toXML()); for (int i = 0; i < ssrcs.length; i++) { xml.append('<').append(SSRC_ELEMENT_NAME).append('>') .append(Long.toString(ssrcs[i] & 0xFFFFFFFFL)) .append("</").append(SSRC_ELEMENT_NAME) .append('>'); } } /** * Removes a <tt>payload-type</tt> element defined by XEP-0167: Jingle * RTP Sessions from this <tt>channel</tt>. * * @param payloadType the <tt>payload-type</tt> element to be removed * from this <tt>channel</tt> * @return <tt>true</tt> if the list of <tt>payload-type</tt> elements * associated with this <tt>channel</tt> has been modified as part of * the method call; otherwise, <tt>false</tt> */ public boolean removePayloadType(PayloadTypePacketExtension payloadType) { return payloadTypes.remove(payloadType); } /** * Removes a <tt>SourcePacketExtension</tt> from the list of sources of * this channel. * * @param source the <tt>SourcePacketExtension</tt> to remove from the * list of sources of this channel * @return <tt>true</tt> if the list of sources of this channel changed * as a result of the execution of the method; otherwise, <tt>false</tt> */ public synchronized boolean removeSource(SourcePacketExtension source) { return sources.remove(source); } /** * Removes a specific (RTP) SSRC from the list of SSRCs seen/received on * this <tt>Channel</tt>. Invoked by the Jitsi Videobridge server, not * its clients. * * @param ssrc the (RTP) SSRC to be removed from the list of SSRCs * seen/received on this <tt>Channel</tt> * @return <tt>true</tt> if the list of SSRCs seen/received on this * <tt>Channel</tt> has been modified as part of the method call; * otherwise, <tt>false</tt> */ public synchronized boolean removeSSRC(int ssrc) { if (ssrcs.length == 1) { if (ssrcs[0] == ssrc) { ssrcs = NO_SSRCS; return true; } else return false; } else { for (int i = 0; i < ssrcs.length; i++) { if (ssrcs[i] == ssrc) { int[] newSSRCs = new int[ssrcs.length - 1]; if (i != 0) System.arraycopy(ssrcs, 0, newSSRCs, 0, i); if (i != newSSRCs.length) { System.arraycopy( ssrcs, i + 1, newSSRCs, i, newSSRCs.length - i); } ssrcs = newSSRCs; return true; } } return false; } } /** * Sets the <tt>direction</tt> of this <tt>Channel</tt> * * @param direction the <tt>MediaDirection</tt> to set the * <tt>direction</tt> of this <tt>Channel</tt> to. */ public void setDirection(MediaDirection direction) { this.direction = direction; } /** * Sets the IP address (as a <tt>String</tt> value) of the host on which * the <tt>channel</tt> represented by this instance has been allocated. * * @param host a <tt>String</tt> value which represents the IP address * of the host on which the <tt>channel</tt> represented by this * instance has been allocated * * @deprecated The method is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public void setHost(String host) { this.host = host; } /** * Sets the ID of the <tt>channel</tt> represented by this instance. * * @param id the ID of the <tt>channel</tt> represented by this instance */ public void setID(String id) { this.id = id; } /** * Sets the maximum number of video RTP streams to be sent from Jitsi * Videobridge to the endpoint associated with this video * <tt>Channel</tt>. * * @param lastN the maximum number of video RTP streams to be sent from * Jitsi Videobridge to the endpoint associated with this video * <tt>Channel</tt> */ public void setLastN(Integer lastN) { this.lastN = lastN; } /** * Sets the target quality of the simulcast substreams to be sent from * Jitsi Videobridge to the endpoint associated with this video * <tt>Channel</tt>. * * @param simulcastLayer the target quality of the simulcast substreams * to be sent from Jitsi Videobridge to the endpoint associated with * this video <tt>Channel</tt>. */ public void setReceivingSimulcastLayer(Integer simulcastLayer) { this.receivingSimulcastLayer = simulcastLayer; } /** * Sets the port which has been allocated to this <tt>channel</tt> for * the purposes of transmitting RTCP packets. * * @param rtcpPort the port which has been allocated to this * <tt>channel</tt> for the purposes of transmitting RTCP packets * * @deprecated The method is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public void setRTCPPort(int rtcpPort) { this.rtcpPort = rtcpPort; } /** * Sets the type of RTP-level relay (in the terms specified by RFC 3550 * &quot;RTP: A Transport Protocol for Real-Time Applications&quot; in * section 2.3 &quot;Mixers and Translators&quot;) used for this * <tt>Channel</tt>. * * @param rtpLevelRelayType the type of RTP-level relay used for * this <tt>Channel</tt> */ public void setRTPLevelRelayType(RTPLevelRelayType rtpLevelRelayType) { this.rtpLevelRelayType = rtpLevelRelayType; } /** * Sets the type of RTP-level relay (in the terms specified by RFC 3550 * &quot;RTP: A Transport Protocol for Real-Time Applications&quot; in * section 2.3 &quot;Mixers and Translators&quot;) used for this * <tt>Channel</tt>. * * @param s the type of RTP-level relay used for this <tt>Channel</tt> */ public void setRTPLevelRelayType(String s) { setRTPLevelRelayType(RTPLevelRelayType.parseRTPLevelRelayType(s)); } /** * Sets the port which has been allocated to this <tt>channel</tt> for * the purposes of transmitting RTP packets. * * @param rtpPort the port which has been allocated to this * <tt>channel</tt> for the purposes of transmitting RTP packets * * @deprecated The method is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public void setRTPPort(int rtpPort) { this.rtpPort = rtpPort; } /** * Sets the list of (RTP) SSRCs seen/received on this <tt>Channel</tt>. * * @param ssrcs the list of (RTP) SSRCs to be set as seen/received on * this <tt>Channel</tt> */ public void setSSRCs(int[] ssrcs) { /* * TODO Make sure that the SSRCs set on this instance do not contain * duplicates. */ this.ssrcs = ((ssrcs == null) || (ssrcs.length == 0)) ? NO_SSRCS : ssrcs.clone(); } } /** * Represents a "channel-bundle" element. */ public static class ChannelBundle { /** * The name of the "channel-bundle" element. */ public static final String ELEMENT_NAME = "channel-bundle"; /** * The name of the "id" attribute. */ public static final String ID_ATTR_NAME = "id"; /** * The ID of this <tt>ChannelBundle</tt>. */ private String id; /** * The transport element of this <tt>ChannelBundle</tt>. */ private IceUdpTransportPacketExtension transport; /** * Initializes a new <tt>ChannelBundle</tt> with the given ID. * @param id the ID. */ public ChannelBundle(String id) { this.id = id; } /** * Returns the ID of this <tt>ChannelBundle</tt>. * @return the ID of this <tt>ChannelBundle</tt>. */ public String getId() { return id; } /** * Returns the transport element of this <tt>ChannelBundle</tt>. * @return the transport element of this <tt>ChannelBundle</tt>. */ public IceUdpTransportPacketExtension getTransport() { return transport; } /** * Sets the ID of this <tt>ChannelBundle</tt>. * @param id the ID to set. */ public void setId(String id) { this.id = id; } /** * Sets the transport element of this <tt>ChannelBundle</tt>. * @param transport the transport to set. */ public void setTransport(IceUdpTransportPacketExtension transport) { this.transport = transport; } /** * Appends an XML representation of this <tt>ChannelBundle</tt> to * <tt>xml</tt>. * @param xml the <tt>StringBuilder</tt> to append to. */ public void toXML(StringBuilder xml) { xml.append('<').append(ELEMENT_NAME).append(' ') .append(ID_ATTR_NAME).append("='").append(id).append('\''); if (transport != null) { xml.append('>'); xml.append(transport.toXML()); xml.append("</").append(ELEMENT_NAME).append('>'); } else { xml.append(" />"); } } } /** * Class contains common code for both <tt>Channel</tt> and * <tt>SctpConnection</tt> IQ classes. * * @author Pawel Domas */ public static abstract class ChannelCommon { /** * The name of the "channel-bundle-id" attribute. */ public static final String CHANNEL_BUNDLE_ID_ATTR_NAME = "channel-bundle-id"; /** * The XML name of the <tt>endpoint</tt> attribute which specifies the * optional identifier of the endpoint of the conference participant * associated with a <tt>channel</tt>. The value of the * <tt>endpoint</tt> attribute is an opaque <tt>String</tt> from the * point of view of Jitsi Videobridge. */ public static final String ENDPOINT_ATTR_NAME = "endpoint"; /** * The XML name of the <tt>expire</tt> attribute of a <tt>channel</tt> * of a <tt>content</tt> of a <tt>conference</tt> IQ which represents * the value of the <tt>expire</tt> property of * <tt>ColibriConferenceIQ.Channel</tt>. */ public static final String EXPIRE_ATTR_NAME = "expire"; /** * The value of the <tt>expire</tt> property of * <tt>ColibriConferenceIQ.Channel</tt> which indicates that no actual * value has been specified for the property in question. */ public static final int EXPIRE_NOT_SPECIFIED = -1; /** * The XML name of the <tt>initiator</tt> attribute of a * <tt>channel</tt> of a <tt>content</tt> of a <tt>conference</tt> IQ * which represents the value of the <tt>initiator</tt> property of * <tt>ColibriConferenceIQ.Channel</tt>. */ public static final String INITIATOR_ATTR_NAME = "initiator"; /** * The channel-bundle-id attribute of this <tt>CommonChannel</tt>. */ private String channelBundleId = null; /** * XML element name. */ private String elementName; /** * The identifier of the endpoint of the conference participant * associated with this <tt>Channel</tt>. */ private String endpoint; /** * The number of seconds of inactivity after which the <tt>channel</tt> * represented by this instance expires. */ private int expire = EXPIRE_NOT_SPECIFIED; /** * The indicator which determines whether the conference focus is the * initiator/offerer (as opposed to the responder/answerer) of the media * negotiation associated with this instance. */ private Boolean initiator; private IceUdpTransportPacketExtension transport; /** * Initializes this class with given XML <tt>elementName</tt>. * @param elementName XML element name to be used for producing XML * representation of derived IQ class. */ protected ChannelCommon(String elementName) { this.elementName = elementName; } /** * Get the channel-bundle-id attribute of this <tt>CommonChannel</tt>. * @return the channel-bundle-id attribute of this * <tt>CommonChannel</tt>. */ public String getChannelBundleId() { return channelBundleId; } /** * Gets the identifier of the endpoint of the conference participant * associated with this <tt>Channel</tt>. * * @return the identifier of the endpoint of the conference participant * associated with this <tt>Channel</tt> */ public String getEndpoint() { return endpoint; } /** * Gets the number of seconds of inactivity after which the * <tt>channel</tt> represented by this instance expires. * * @return the number of seconds of inactivity after which the * <tt>channel</tt> represented by this instance expires */ public int getExpire() { return expire; } public IceUdpTransportPacketExtension getTransport() { return transport; } /** * Indicates whether there are some contents that should be printed as * child elements of this IQ. If <tt>true</tt> is returned * {@link #printContent(StringBuilder)} method will be called when * XML representation of this IQ is being constructed. * @return <tt>true</tt> if there are content to be printed as child * elements of this IQ or <tt>false</tt> otherwise. */ protected abstract boolean hasContent(); /** * Gets the indicator which determines whether the conference focus is * the initiator/offerer (as opposed to the responder/answerer) of the * media negotiation associated with this instance. * * @return {@link Boolean#TRUE} if the conference focus is the * initiator/offerer of the media negotiation associated with this * instance, {@link Boolean#FALSE} if the conference focus is the * responder/answerer or <tt>null</tt> if the <tt>initiator</tt> state * is unspecified */ public Boolean isInitiator() { return initiator; } /** * Derived class implements this method in order to print additional * attributes to main XML element. * @param xml <the <tt>StringBuilder</tt> to which the XML * <tt>String</tt> representation of this <tt>Channel</tt> * is to be appended</tt> */ protected abstract void printAttributes(StringBuilder xml); /** * Implement in order to print content child elements of this IQ using * given <tt>StringBuilder</tt>. Called during construction of XML * representation if {@link #hasContent()} returns <tt>true</tt>. * * @param xml the <tt>StringBuilder</tt> to which the XML * <tt>String</tt> representation of this <tt>Channel</tt> * is to be appended</tt></tt>. */ protected abstract void printContent(StringBuilder xml); /** * Sets the channel-bundle-id attribute of this <tt>CommonChannel</tt>. * @param channelBundleId the value to set. */ public void setChannelBundleId(String channelBundleId) { this.channelBundleId = channelBundleId; } /** * Sets the identifier of the endpoint of the conference participant * associated with this <tt>Channel</tt>. * * @param endpoint the identifier of the endpoint of the conference * participant associated with this <tt>Channel</tt> */ public void setEndpoint(String endpoint) { this.endpoint = endpoint; } /** * Sets the number of seconds of inactivity after which the * <tt>channel</tt> represented by this instance expires. * * @param expire the number of seconds of activity after which the * <tt>channel</tt> represented by this instance expires * @throws IllegalArgumentException if the value of the specified * <tt>expire</tt> is other than {@link #EXPIRE_NOT_SPECIFIED} and * negative */ public void setExpire(int expire) { if ((expire != EXPIRE_NOT_SPECIFIED) && (expire < 0)) throw new IllegalArgumentException("expire"); this.expire = expire; } /** * Sets the indicator which determines whether the conference focus is * the initiator/offerer (as opposed to the responder/answerer) of the * media negotiation associated with this instance. * * @param initiator {@link Boolean#TRUE} if the conference focus is the * initiator/offerer of the media negotiation associated with this * instance, {@link Boolean#FALSE} if the conference focus is the * responder/answerer or <tt>null</tt> if the <tt>initiator</tt> state * is to be unspecified */ public void setInitiator(Boolean initiator) { this.initiator = initiator; } public void setTransport(IceUdpTransportPacketExtension transport) { this.transport = transport; } /** * Appends the XML <tt>String</tt> representation of this * <tt>Channel</tt> to a specific <tt>StringBuilder</tt>. * * @param xml the <tt>StringBuilder</tt> to which the XML * <tt>String</tt> representation of this <tt>Channel</tt> is to be * appended */ public void toXML(StringBuilder xml) { xml.append('<').append(elementName); // endpoint String endpoint = getEndpoint(); if (endpoint != null) { xml.append(' ').append(ENDPOINT_ATTR_NAME).append("='") .append(endpoint).append('\''); } // expire int expire = getExpire(); if (expire >= 0) { xml.append(' ').append(EXPIRE_ATTR_NAME).append("='") .append(expire).append('\''); } // initiator Boolean initiator = isInitiator(); if (initiator != null) { xml.append(' ').append(INITIATOR_ATTR_NAME).append("='") .append(initiator).append('\''); } String channelBundleId = getChannelBundleId(); if (channelBundleId != null) { xml.append(' ').append(CHANNEL_BUNDLE_ID_ATTR_NAME) .append("='").append(channelBundleId).append('\''); } // Print derived class attributes printAttributes(xml); IceUdpTransportPacketExtension transport = getTransport(); boolean hasTransport = (transport != null); if (hasTransport || hasContent()) { xml.append('>'); if(hasContent()) printContent(xml); if (hasTransport) xml.append(transport.toXML()); xml.append("</").append(elementName).append('>'); } else { xml.append(" />"); } } } /** * Represents a <tt>content</tt> included into a Jitsi Videobridge * <tt>conference</tt> IQ. */ public static class Content { /** * The XML element name of a <tt>content</tt> of a Jitsi Videobridge * <tt>conference</tt> IQ. */ public static final String ELEMENT_NAME = "content"; /** * The XML name of the <tt>name</tt> attribute of a <tt>content</tt> of * a <tt>conference</tt> IQ which represents the <tt>name</tt> property * of <tt>ColibriConferenceIQ.Content</tt>. */ public static final String NAME_ATTR_NAME = "name"; /** * The list of {@link Channel}s included into this <tt>content</tt> of a * <tt>conference</tt> IQ. */ private final List<Channel> channels = new LinkedList<Channel>(); /** * The name of the <tt>content</tt> represented by this instance. */ private String name; /** * The list of {@link SctpConnection}s included into this * <tt>content</tt> of a <tt>conference</tt> IQ. */ private final List<SctpConnection> sctpConnections = new LinkedList<SctpConnection>(); /** * Initializes a new <tt>Content</tt> instance without a name and * channels. */ public Content() { } /** * Initializes a new <tt>Content</tt> instance with a specific name and * without channels. * * @param name the name to initialize the new instance with */ public Content(String name) { setName(name); } /** * Adds a specific <tt>Channel</tt> to the list of <tt>Channel</tt>s * included into this <tt>Content</tt>. * * @param channel the <tt>Channel</tt> to be included into this * <tt>Content</tt> * @return <tt>true</tt> if the list of <tt>Channel</tt>s included into * this <tt>Content</tt> was modified as a result of the execution of * the method; otherwise, <tt>false</tt> * @throws NullPointerException if the specified <tt>channel</tt> is * <tt>null</tt> */ public boolean addChannel(Channel channel) { if (channel == null) throw new NullPointerException("channel"); return channels.contains(channel) ? false : channels.add(channel); } /** * Adds a specific <tt>SctpConnection</tt> to the list of * <tt>SctpConnection</tt>s included into this <tt>Content</tt>. * * @param conn the <tt>SctpConnection</tt> to be included into this * <tt>Content</tt> * @return <tt>true</tt> if the list of <tt>SctpConnection</tt>s * included into this <tt>Content</tt> was modified as a result of * the execution of the method; otherwise, <tt>false</tt> * @throws NullPointerException if the specified <tt>conn</tt> is * <tt>null</tt> */ public boolean addSctpConnection(SctpConnection conn) { if(conn == null) throw new NullPointerException("Sctp connection"); return !sctpConnections.contains(conn) && sctpConnections.add(conn); } /** * Gets the <tt>Channel</tt> at a specific index/position within the * list of <tt>Channel</tt>s included in this <tt>Content</tt>. * * @param channelIndex the index/position within the list of * <tt>Channel</tt>s included in this <tt>Content</tt> of the * <tt>Channel</tt> to be returned * @return the <tt>Channel</tt> at the specified <tt>channelIndex</tt> * within the list of <tt>Channel</tt>s included in this * <tt>Content</tt> */ public Channel getChannel(int channelIndex) { return getChannels().get(channelIndex); } /** * Gets a <tt>Channel</tt> which is included into this <tt>Content</tt> * and which has a specific ID. * * @param channelID the ID of the <tt>Channel</tt> included into this * <tt>Content</tt> to be returned * @return the <tt>Channel</tt> which is included into this * <tt>Content</tt> and which has the specified <tt>channelID</tt> if * such a <tt>Channel</tt> exists; otherwise, <tt>null</tt> */ public Channel getChannel(String channelID) { for (Channel channel : getChannels()) { if (channelID.equals(channel.getID())) return channel; } return null; } /** * Gets the number of <tt>Channel</tt>s included into/associated with * this <tt>Content</tt>. * * @return the number of <tt>Channel</tt>s included into/associated with * this <tt>Content</tt> */ public int getChannelCount() { return getChannels().size(); } /** * Gets a list of the <tt>Channel</tt> included into/associated with * this <tt>Content</tt>. * * @return an unmodifiable <tt>List</tt> of the <tt>Channel</tt>s * included into/associated with this <tt>Content</tt> */ public List<Channel> getChannels() { return Collections.unmodifiableList(channels); } /** * Gets the name of the <tt>content</tt> represented by this instance. * * @return the name of the <tt>content</tt> represented by this instance */ public String getName() { return name; } /** * Gets a list of the <tt>SctpConnection</tt>s included into/associated * with this <tt>Content</tt>. * * @return an unmodifiable <tt>List</tt> of the <tt>SctpConnection</tt>s * included into/associated with this <tt>Content</tt> */ public List<SctpConnection> getSctpConnections() { return Collections.unmodifiableList(sctpConnections); } /** * Removes a specific <tt>Channel</tt> from the list of * <tt>Channel</tt>s included into this <tt>Content</tt>. * * @param channel the <tt>Channel</tt> to be excluded from this * <tt>Content</tt> * @return <tt>true</tt> if the list of <tt>Channel</tt>s included into * this <tt>Content</tt> was modified as a result of the execution of * the method; otherwise, <tt>false</tt> */ public boolean removeChannel(Channel channel) { return channels.remove(channel); } /** * Sets the name of the <tt>content</tt> represented by this instance. * * @param name the name of the <tt>content</tt> represented by this * instance * @throws NullPointerException if the specified <tt>name</tt> is * <tt>null</tt> */ public void setName(String name) { if (name == null) throw new NullPointerException("name"); this.name = name; } /** * Appends the XML <tt>String</tt> representation of this * <tt>Content</tt> to a specific <tt>StringBuilder</tt>. * * @param xml the <tt>StringBuilder</tt> to which the XML * <tt>String</tt> representation of this <tt>Content</tt> is to be * appended */ public void toXML(StringBuilder xml) { xml.append('<').append(ELEMENT_NAME); xml.append(' ').append(NAME_ATTR_NAME).append("='") .append(getName()).append('\''); List<Channel> channels = getChannels(); List<SctpConnection> connections = getSctpConnections(); if (channels.size() == 0 && connections.size() == 0) { xml.append(" />"); } else { xml.append('>'); for (Channel channel : channels) channel.toXML(xml); for(SctpConnection conn : connections) conn.toXML(xml); xml.append("</").append(ELEMENT_NAME).append('>'); } } } /** * Represents an 'endpoint' element. */ public static class Endpoint { /** * The name of the 'displayname' attribute. */ public static final String DISPLAYNAME_ATTR_NAME = "displayname"; /** * The name of the 'endpoint' element. */ public static final String ELEMENT_NAME = "endpoint"; /** * The name of the 'id' attribute. */ public static final String ID_ATTR_NAME = "id"; /** * The 'display name' of this <tt>Endpoint</tt>. */ private String displayName; /** * The 'id' of this <tt>Endpoint</tt>. */ private String id; /** * Initializes a new <tt>Endpoint</tt> with the given ID and display * name. * @param id the ID. * @param displayName the display name. */ public Endpoint(String id, String displayName) { this.id = id; this.displayName = displayName; } /** * Returns the display name of this <tt>Endpoint</tt>. * @return the display name of this <tt>Endpoint</tt>. */ public String getDisplayName() { return displayName; } /** * Returns the ID of this <tt>Endpoint</tt>. * @return the ID of this <tt>Endpoint</tt>. */ public String getId() { return id; } /** * Sets the display name of this <tt>Endpoint</tt>. * @param displayName the display name to set. */ public void setDisplayName(String displayName) { this.displayName = displayName; } /** * Sets the ID of this <tt>Endpoint</tt>. * @param id the ID to set. */ public void setId(String id) { this.id = id; } } /** * Represents a <tt>recording</tt> element. */ public static class Recording { /** * The XML name of the <tt>recording</tt> element. */ public static final String ELEMENT_NAME = "recording"; /** * The XML name of the <tt>path</tt> attribute. */ public static final String PATH_ATTR_NAME = "path"; /** * The XML name of the <tt>state</tt> attribute. */ public static final String STATE_ATTR_NAME = "state"; /** * The XML name of the <tt>token</tt> attribute. */ public static final String TOKEN_ATTR_NAME = "token"; private String path; private boolean state; private String token; public Recording(boolean state) { this.state = state; } public Recording(boolean state, String token) { this(state); this.token = token; } public String getPath() { return path; } public boolean getState() { return state; } public String getToken() { return token; } public void setPath(String path) { this.path = path; } public void toXML(StringBuilder xml) { xml.append('<').append(ELEMENT_NAME); xml.append(' ').append(STATE_ATTR_NAME).append("='") .append(state).append('\''); if (token != null) { xml.append(' ').append(TOKEN_ATTR_NAME).append("='") .append(token).append('\''); } if (path != null) { xml.append(' ').append(PATH_ATTR_NAME).append("='") .append(path).append('\''); } xml.append("/>"); } } public static class RTCPTerminationStrategy { public static final String ELEMENT_NAME = "rtcp-termination-strategy"; public static final String NAME_ATTR_NAME = "name"; private String name; public String getName() { return name; } public void setName(String name) { this.name = name; } public void toXML(StringBuilder xml) { xml.append('<').append(ELEMENT_NAME); xml.append(' ').append(NAME_ATTR_NAME).append("='") .append(name).append('\''); xml.append("/>"); } } /** * Represents a <tt>SCTP connection</tt> included into a <tt>content</tt> * of a Jitsi Videobridge <tt>conference</tt> IQ. * * @author Pawel Domas */ public static class SctpConnection extends ChannelCommon { /** * The XML element name of a <tt>content</tt> of a Jitsi Videobridge * <tt>conference</tt> IQ. */ public static final String ELEMENT_NAME = "sctpconnection"; /** * The XML name of the <tt>port</tt> attribute of a * <tt>SctpConnection</tt> of a <tt>conference</tt> IQ which represents * the SCTP port property of * <tt>ColibriConferenceIQ.SctpConnection</tt>. */ public static final String PORT_ATTR_NAME = "port"; /** * SCTP port attribute. 5000 by default. */ private int port = 5000; /** * Initializes a new <tt>SctpConnection</tt> instance without an * endpoint name and with default port value set. */ public SctpConnection() { super(SctpConnection.ELEMENT_NAME); } /** * Gets the SCTP port of the <tt>SctpConnection</tt> described by this * instance. * * @return the SCTP port of the <tt>SctpConnection</tt> represented by * this instance. */ public int getPort() { return port; } /** * {@inheritDoc} * * No content other than transport for <tt>SctpConnection</tt>. */ @Override protected boolean hasContent() { return false; } /** * {@inheritDoc} */ @Override protected void printAttributes(StringBuilder xml) { xml.append(' ').append(PORT_ATTR_NAME).append("='") .append(getPort()).append('\''); } @Override protected void printContent(StringBuilder xml) { // No other content than the transport shared from ChannelCommon } /** * Sets the SCTP port of the <tt>SctpConnection</tt> represented by this * instance. * * @param port the SCTP port of the <tt>SctpConnection</tt> * represented by this instance */ public void setPort(int port) { this.port = port; } } }
src/net/java/sip/communicator/impl/protocol/jabber/extensions/colibri/ColibriConferenceIQ.java
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Distributable under LGPL license. * See terms of license at gnu.org. */ package net.java.sip.communicator.impl.protocol.jabber.extensions.colibri; import java.util.*; import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.*; import org.jitsi.service.neomedia.*; import org.jivesoftware.smack.packet.*; /** * Implements the Jitsi Videobridge <tt>conference</tt> IQ within the * COnferencing with LIghtweight BRIdging. * * @author Lyubomir Marinov * @author Boris Grozev */ public class ColibriConferenceIQ extends IQ { /** * The XML element name of the Jitsi Videobridge <tt>conference</tt> IQ. */ public static final String ELEMENT_NAME = "conference"; /** * The XML name of the <tt>id</tt> attribute of the Jitsi Videobridge * <tt>conference</tt> IQ which represents the value of the <tt>id</tt> * property of <tt>ColibriConferenceIQ</tt>. */ public static final String ID_ATTR_NAME = "id"; /** * The XML COnferencing with LIghtweight BRIdging namespace of the Jitsi * Videobridge <tt>conference</tt> IQ. */ public static final String NAMESPACE = "http://jitsi.org/protocol/colibri"; /** * An array of <tt>int</tt>s which represents the lack of any (RTP) SSRCs * seen/received on a <tt>Channel</tt>. Explicitly defined to reduce * unnecessary allocations. */ public static final int[] NO_SSRCS = new int[0]; /** * The list of {@link Content}s included into this <tt>conference</tt> IQ. */ private final List<Content> contents = new LinkedList<Content>(); /** * The list of {@link ChannelBundle}s included into this <tt>conference</tt> * IQ. */ private final List<ChannelBundle> channelBundles = new LinkedList<ChannelBundle>(); /** * The ID of the conference represented by this IQ. */ private String id; /** * Media recording. */ public Recording recording = null; private RTCPTerminationStrategy rtcpTerminationStrategy = null; /** * The list of <tt>Endpoint</tt>s included into this <tt>conference</tt> IQ. */ private final List<Endpoint> endpoints = new LinkedList<Endpoint>(); /** Initializes a new <tt>ColibriConferenceIQ</tt> instance. */ public ColibriConferenceIQ() { } /** * Initializes a new {@link Content} instance with a specific name and adds * it to the list of <tt>Content</tt> instances included into this * <tt>conference</tt> IQ. * * @param contentName the name which which the new <tt>Content</tt> instance * is to be initialized * @return <tt>true</tt> if the list of <tt>Content</tt> instances included * into this <tt>conference</tt> IQ has been modified as a result of the * method call; otherwise, <tt>false</tt> */ public boolean addContent(String contentName) { return addContent(new Content(contentName)); } /** * Adds a specific {@link Content} instance to the list of <tt>Content</tt> * instances included into this <tt>conference</tt> IQ. * * @param content the <tt>Content</tt> instance to be added to this list of * <tt>Content</tt> instances included into this <tt>conference</tt> IQ * @return <tt>true</tt> if the list of <tt>Content</tt> instances included * into this <tt>conference</tt> IQ has been modified as a result of the * method call; otherwise, <tt>false</tt> * @throws NullPointerException if the specified <tt>content</tt> is * <tt>null</tt> */ public boolean addContent(Content content) { if (content == null) throw new NullPointerException("content"); return contents.contains(content) ? false : contents.add(content); } /** * Adds a specific {@link Content} instance to the list of <tt>Content</tt> * instances included into this <tt>conference</tt> IQ. * @param the <tt>ChannelBundle</tt> to add. */ public boolean addChannelBundle(ChannelBundle channelBundle) { if (channelBundle == null) throw new NullPointerException("channelBundle"); return channelBundles.contains(channelBundles) ? false : channelBundles.add(channelBundle); } /** * Returns an XML <tt>String</tt> representation of this <tt>IQ</tt>. * * @return an XML <tt>String</tt> representation of this <tt>IQ</tt> */ @Override public String getChildElementXML() { StringBuilder xml = new StringBuilder(); xml.append('<').append(ELEMENT_NAME); xml.append(" xmlns='").append(NAMESPACE).append('\''); String id = getID(); if (id != null) xml.append(' ').append(ID_ATTR_NAME).append("='").append(id) .append('\''); List<Content> contents = getContents(); List<ChannelBundle> channelBundles = getChannelBundles(); boolean hasChildren = recording != null || rtcpTerminationStrategy != null || contents.size() > 0 || channelBundles.size() > 0; if (!hasChildren) { xml.append(" />"); } else { xml.append('>'); for (Content content : contents) content.toXML(xml); for (ChannelBundle channelBundle : channelBundles) channelBundle.toXML(xml); if (recording != null) recording.toXML(xml); if (rtcpTerminationStrategy != null) rtcpTerminationStrategy.toXML(xml); xml.append("</").append(ELEMENT_NAME).append('>'); } return xml.toString(); } /** * Returns a <tt>Content</tt> from the list of <tt>Content</tt>s of this * <tt>conference</tt> IQ which has a specific name. If no such * <tt>Content</tt> exists, returns <tt>null</tt>. * * @param contentName the name of the <tt>Content</tt> to be returned * @return a <tt>Content</tt> from the list of <tt>Content</tt>s of this * <tt>conference</tt> IQ which has the specified <tt>contentName</tt> if * such a <tt>Content</tt> exists; otherwise, <tt>null</tt> */ public Content getContent(String contentName) { for (Content content : getContents()) if (contentName.equals(content.getName())) return content; return null; } /** * Returns a list of the <tt>Content</tt>s included into this * <tt>conference</tt> IQ. * * @return an unmodifiable <tt>List</tt> of the <tt>Content</tt>s included * into this <tt>conference</tt> IQ */ public List<Content> getContents() { return Collections.unmodifiableList(contents); } /** * Returns a list of the <tt>ChannelBundle</tt>s included into this * <tt>conference</tt> IQ. * * @return an unmodifiable <tt>List</tt> of the <tt>ChannelBundle</tt>s * included into this <tt>conference</tt> IQ. */ public List<ChannelBundle> getChannelBundles() { return Collections.unmodifiableList(channelBundles); } /** * Gets the ID of the conference represented by this IQ. * * @return the ID of the conference represented by this IQ */ public String getID() { return id; } /** * Gets the value of the recording field. * @return the value of the recording field. */ public Recording getRecording() { return recording; } /** * Sets the recording field. * @param recording the value to set. */ public void setRecording(Recording recording) { this.recording = recording; } /** * Returns a <tt>Content</tt> from the list of <tt>Content</tt>s of this * <tt>conference</tt> IQ which has a specific name. If no such * <tt>Content</tt> exists at the time of the invocation of the method, * initializes a new <tt>Content</tt> instance with the specified * <tt>contentName</tt> and includes it into this <tt>conference</tt> IQ. * * @param contentName the name of the <tt>Content</tt> to be returned * @return a <tt>Content</tt> from the list of <tt>Content</tt>s of this * <tt>conference</tt> IQ which has the specified <tt>contentName</tt> */ public Content getOrCreateContent(String contentName) { Content content = getContent(contentName); if (content == null) { content = new Content(contentName); addContent(content); } return content; } /** * Removes a specific {@link Content} instance from the list of * <tt>Content</tt> instances included into this <tt>conference</tt> IQ. * * @param content the <tt>Content</tt> instance to be removed from the list * of <tt>Content</tt> instances included into this <tt>conference</tt> IQ * @return <tt>true</tt> if the list of <tt>Content</tt> instances included * into this <tt>conference</tt> IQ has been modified as a result of the * method call; otherwise, <tt>false</tt> */ public boolean removeContent(Content content) { return contents.remove(content); } /** * Sets the ID of the conference represented by this IQ. * * @param id the ID of the conference represented by this IQ */ public void setID(String id) { this.id = id; } public RTCPTerminationStrategy getRTCPTerminationStrategy() { return rtcpTerminationStrategy; } public void setRTCPTerminationStrategy(RTCPTerminationStrategy rtcpTerminationStrategy) { this.rtcpTerminationStrategy = rtcpTerminationStrategy; } /** * Returns the list of <tt>Endpoint</tt>s included in this * <tt>ColibriConferenceIQ</tt>. * @return the list of <tt>Endpoint</tt>s included in this * <tt>ColibriConferenceIQ</tt>. */ public List<Endpoint> getEndpoints() { return Collections.unmodifiableList(endpoints); } /** * Add an <tt>Endpoint</tt> to this <tt>ColibriConferenceIQ</tt>. * @param endpoint the <tt>Endpoint</tt> to add. */ public void addEndpoint(Endpoint endpoint) { endpoints.add(endpoint); } /** * Class contains common code for both <tt>Channel</tt> and * <tt>SctpConnection</tt> IQ classes. * * @author Pawel Domas */ public static abstract class ChannelCommon { /** * The XML name of the <tt>endpoint</tt> attribute which specifies the * optional identifier of the endpoint of the conference participant * associated with a <tt>channel</tt>. The value of the * <tt>endpoint</tt> attribute is an opaque <tt>String</tt> from the * point of view of Jitsi Videobridge. */ public static final String ENDPOINT_ATTR_NAME = "endpoint"; /** * The XML name of the <tt>expire</tt> attribute of a <tt>channel</tt> * of a <tt>content</tt> of a <tt>conference</tt> IQ which represents * the value of the <tt>expire</tt> property of * <tt>ColibriConferenceIQ.Channel</tt>. */ public static final String EXPIRE_ATTR_NAME = "expire"; /** * The name of the "channel-bundle-id" attribute. */ public static final String CHANNEL_BUNDLE_ID_ATTR_NAME = "channel-bundle-id"; /** * The value of the <tt>expire</tt> property of * <tt>ColibriConferenceIQ.Channel</tt> which indicates that no actual * value has been specified for the property in question. */ public static final int EXPIRE_NOT_SPECIFIED = -1; /** * The XML name of the <tt>initiator</tt> attribute of a * <tt>channel</tt> of a <tt>content</tt> of a <tt>conference</tt> IQ * which represents the value of the <tt>initiator</tt> property of * <tt>ColibriConferenceIQ.Channel</tt>. */ public static final String INITIATOR_ATTR_NAME = "initiator"; /** * The identifier of the endpoint of the conference participant * associated with this <tt>Channel</tt>. */ private String endpoint; /** * The number of seconds of inactivity after which the <tt>channel</tt> * represented by this instance expires. */ private int expire = EXPIRE_NOT_SPECIFIED; /** * The indicator which determines whether the conference focus is the * initiator/offerer (as opposed to the responder/answerer) of the media * negotiation associated with this instance. */ private Boolean initiator; private IceUdpTransportPacketExtension transport; /** * XML element name. */ private String elementName; /** * The channel-bundle-id attribute of this <tt>CommonChannel</tt>. */ private String channelBundleId = null; /** * Initializes this class with given XML <tt>elementName</tt>. * @param elementName XML element name to be used for producing XML * representation of derived IQ class. */ protected ChannelCommon(String elementName) { this.elementName = elementName; } /** * Gets the identifier of the endpoint of the conference participant * associated with this <tt>Channel</tt>. * * @return the identifier of the endpoint of the conference participant * associated with this <tt>Channel</tt> */ public String getEndpoint() { return endpoint; } /** * Gets the number of seconds of inactivity after which the * <tt>channel</tt> represented by this instance expires. * * @return the number of seconds of inactivity after which the * <tt>channel</tt> represented by this instance expires */ public int getExpire() { return expire; } public IceUdpTransportPacketExtension getTransport() { return transport; } /** * Gets the indicator which determines whether the conference focus is * the initiator/offerer (as opposed to the responder/answerer) of the * media negotiation associated with this instance. * * @return {@link Boolean#TRUE} if the conference focus is the * initiator/offerer of the media negotiation associated with this * instance, {@link Boolean#FALSE} if the conference focus is the * responder/answerer or <tt>null</tt> if the <tt>initiator</tt> state * is unspecified */ public Boolean isInitiator() { return initiator; } /** * Get the channel-bundle-id attribute of this <tt>CommonChannel</tt>. * @return the channel-bundle-id attribute of this * <tt>CommonChannel</tt>. */ public String getChannelBundleId() { return channelBundleId; } /** * Sets the identifier of the endpoint of the conference participant * associated with this <tt>Channel</tt>. * * @param endpoint the identifier of the endpoint of the conference * participant associated with this <tt>Channel</tt> */ public void setEndpoint(String endpoint) { this.endpoint = endpoint; } /** * Sets the number of seconds of inactivity after which the * <tt>channel</tt> represented by this instance expires. * * @param expire the number of seconds of activity after which the * <tt>channel</tt> represented by this instance expires * @throws IllegalArgumentException if the value of the specified * <tt>expire</tt> is other than {@link #EXPIRE_NOT_SPECIFIED} and * negative */ public void setExpire(int expire) { if ((expire != EXPIRE_NOT_SPECIFIED) && (expire < 0)) throw new IllegalArgumentException("expire"); this.expire = expire; } /** * Sets the indicator which determines whether the conference focus is * the initiator/offerer (as opposed to the responder/answerer) of the * media negotiation associated with this instance. * * @param initiator {@link Boolean#TRUE} if the conference focus is the * initiator/offerer of the media negotiation associated with this * instance, {@link Boolean#FALSE} if the conference focus is the * responder/answerer or <tt>null</tt> if the <tt>initiator</tt> state * is to be unspecified */ public void setInitiator(Boolean initiator) { this.initiator = initiator; } public void setTransport(IceUdpTransportPacketExtension transport) { this.transport = transport; } /** * Sets the channel-bundle-id attribute of this <tt>CommonChannel</tt>. * @param channelBundleId the value to set. */ public void setChannelBundleId(String channelBundleId) { this.channelBundleId = channelBundleId; } /** * Derived class implements this method in order to print additional * attributes to main XML element. * @param xml <the <tt>StringBuilder</tt> to which the XML * <tt>String</tt> representation of this <tt>Channel</tt> * is to be appended</tt> */ protected abstract void printAttributes(StringBuilder xml); /** * Indicates whether there are some contents that should be printed as * child elements of this IQ. If <tt>true</tt> is returned * {@link #printContent(StringBuilder)} method will be called when * XML representation of this IQ is being constructed. * @return <tt>true</tt> if there are content to be printed as child * elements of this IQ or <tt>false</tt> otherwise. */ protected abstract boolean hasContent(); /** * Implement in order to print content child elements of this IQ using * given <tt>StringBuilder</tt>. Called during construction of XML * representation if {@link #hasContent()} returns <tt>true</tt>. * * @param xml the <tt>StringBuilder</tt> to which the XML * <tt>String</tt> representation of this <tt>Channel</tt> * is to be appended</tt></tt>. */ protected abstract void printContent(StringBuilder xml); /** * Appends the XML <tt>String</tt> representation of this * <tt>Channel</tt> to a specific <tt>StringBuilder</tt>. * * @param xml the <tt>StringBuilder</tt> to which the XML * <tt>String</tt> representation of this <tt>Channel</tt> is to be * appended */ public void toXML(StringBuilder xml) { xml.append('<').append(elementName); // endpoint String endpoint = getEndpoint(); if (endpoint != null) { xml.append(' ').append(ENDPOINT_ATTR_NAME).append("='") .append(endpoint).append('\''); } // expire int expire = getExpire(); if (expire >= 0) { xml.append(' ').append(EXPIRE_ATTR_NAME).append("='") .append(expire).append('\''); } // initiator Boolean initiator = isInitiator(); if (initiator != null) { xml.append(' ').append(INITIATOR_ATTR_NAME).append("='") .append(initiator).append('\''); } String channelBundleId = getChannelBundleId(); if (channelBundleId != null) { xml.append(' ').append(CHANNEL_BUNDLE_ID_ATTR_NAME) .append("='").append(channelBundleId).append('\''); } // Print derived class attributes printAttributes(xml); IceUdpTransportPacketExtension transport = getTransport(); boolean hasTransport = (transport != null); if (hasTransport || hasContent()) { xml.append('>'); if(hasContent()) printContent(xml); if (hasTransport) xml.append(transport.toXML()); xml.append("</").append(elementName).append('>'); } else { xml.append(" />"); } } } public static class RTCPTerminationStrategy { public static final String ELEMENT_NAME = "rtcp-termination-strategy"; public static final String NAME_ATTR_NAME = "name"; private String name; public void setName(String name) { this.name = name; } public String getName() { return name; } public void toXML(StringBuilder xml) { xml.append('<').append(ELEMENT_NAME); xml.append(' ').append(NAME_ATTR_NAME).append("='") .append(name).append('\''); xml.append("/>"); } } /** * Represents a "channel-bundle" element. */ public static class ChannelBundle { /** * The name of the "channel-bundle" element. */ public static final String ELEMENT_NAME = "channel-bundle"; /** * The name of the "id" attribute. */ public static final String ID_ATTR_NAME = "id"; /** * The ID of this <tt>ChannelBundle</tt>. */ private String id; /** * The transport element of this <tt>ChannelBundle</tt>. */ private IceUdpTransportPacketExtension transport; /** * Initializes a new <tt>ChannelBundle</tt> with the given ID. * @param id the ID. */ public ChannelBundle(String id) { this.id = id; } /** * Returns the transport element of this <tt>ChannelBundle</tt>. * @return the transport element of this <tt>ChannelBundle</tt>. */ public IceUdpTransportPacketExtension getTransport() { return transport; } /** * Sets the transport element of this <tt>ChannelBundle</tt>. * @param transport the transport to set. */ public void setTransport(IceUdpTransportPacketExtension transport) { this.transport = transport; } /** * Returns the ID of this <tt>ChannelBundle</tt>. * @return the ID of this <tt>ChannelBundle</tt>. */ public String getId() { return id; } /** * Sets the ID of this <tt>ChannelBundle</tt>. * @param id the ID to set. */ public void setId(String id) { this.id = id; } /** * Appends an XML representation of this <tt>ChannelBundle</tt> to * <tt>xml</tt>. * @param xml the <tt>StringBuilder</tt> to append to. */ public void toXML(StringBuilder xml) { xml.append('<').append(ELEMENT_NAME).append(' ') .append(ID_ATTR_NAME).append("='").append(id).append('\''); if (transport != null) { xml.append('>'); xml.append(transport.toXML()); xml.append("</").append(ELEMENT_NAME).append('>'); } else { xml.append(" />"); } } } /** * Represents a <tt>channel</tt> included into a <tt>content</tt> of a Jitsi * Videobridge <tt>conference</tt> IQ. */ public static class Channel extends ChannelCommon { /** * The name of the XML attribute of a <tt>channel</tt> which represents * its direction. */ public static final String DIRECTION_ATTR_NAME = "direction"; /** * The XML element name of a <tt>channel</tt> of a <tt>content</tt> of a * Jitsi Videobridge <tt>conference</tt> IQ. */ public static final String ELEMENT_NAME = "channel"; /** * The XML name of the <tt>host</tt> attribute of a <tt>channel</tt> of * a <tt>content</tt> of a <tt>conference</tt> IQ which represents the * value of the <tt>host</tt> property of * <tt>ColibriConferenceIQ.Channel</tt>. * * @deprecated The attribute is supported for the purposes of * compatibility with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public static final String HOST_ATTR_NAME = "host"; /** * The XML name of the <tt>id</tt> attribute of a <tt>channel</tt> of a * <tt>content</tt> of a <tt>conference</tt> IQ which represents the * value of the <tt>id</tt> property of * <tt>ColibriConferenceIQ.Channel</tt>. */ public static final String ID_ATTR_NAME = "id"; /** * The XML name of the <tt>last-n</tt> attribute of a video * <tt>channel</tt> which specifies the maximum number of video RTP * streams to be sent from Jitsi Videobridge to the endpoint associated * with the video <tt>channel</tt>. The value of the <tt>last-n</tt> * attribute is a positive number. */ public static final String LAST_N_ATTR_NAME = "last-n"; /** * The XML name of the <tt>receive-simulcast-layer</tt> attribute of a * video <tt>Channel</tt> which specifies the target quality of the * simulcast substreams to be sent from Jitsi Videobridge to the * endpoint associated with the video <tt>Channel</tt>. The value of the * <tt>receive-simulcast-layer</tt> attribute is an unsigned integer. * Typically used for debugging purposes. */ public static final String RECEIVING_SIMULCAST_LAYER = "receive-simulcast-layer"; /** * The XML name of the <tt>rtcpport</tt> attribute of a <tt>channel</tt> * of a <tt>content</tt> of a <tt>conference</tt> IQ which represents * the value of the <tt>rtcpPort</tt> property of * <tt>ColibriConferenceIQ.Channel</tt>. * * @deprecated The attribute is supported for the purposes of * compatibility with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public static final String RTCP_PORT_ATTR_NAME = "rtcpport"; public static final String RTP_LEVEL_RELAY_TYPE_ATTR_NAME = "rtp-level-relay-type"; /** * The XML name of the <tt>rtpport</tt> attribute of a <tt>channel</tt> * of a <tt>content</tt> of a <tt>conference</tt> IQ which represents * the value of the <tt>rtpPort</tt> property of * <tt>ColibriConferenceIQ.Channel</tt>. * * @deprecated The attribute is supported for the purposes of * compatibility with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public static final String RTP_PORT_ATTR_NAME = "rtpport"; /** * The name of the XML element which is a child of the &lt;channel&gt; * element and which identifies/specifies an (RTP) SSRC which has been * seen/received on the respective <tt>Channel</tt>. */ public static final String SSRC_ELEMENT_NAME = "ssrc"; /** * The direction of the <tt>channel</tt> represented by this instance. */ private MediaDirection direction; /** * The host of the <tt>channel</tt> represented by this instance. * * @deprecated The field is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated private String host; /** * The ID of the <tt>channel</tt> represented by this instance. */ private String id; /** * The maximum number of video RTP streams to be sent from Jitsi * Videobridge to the endpoint associated with this video * <tt>Channel</tt>. */ private Integer lastN; /** * The target quality of the simulcast substreams to be sent from Jitsi * Videobridge to the endpoint associated with this video * <tt>Channel</tt>. */ private Integer receivingSimulcastLayer; /** * The <tt>payload-type</tt> elements defined by XEP-0167: Jingle RTP * Sessions associated with this <tt>channel</tt>. */ private final List<PayloadTypePacketExtension> payloadTypes = new ArrayList<PayloadTypePacketExtension>(); /** * The RTCP port of the <tt>channel</tt> represented by this instance. * * @deprecated The field is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated private int rtcpPort; /** * The type of RTP-level relay (in the terms specified by RFC 3550 * &quot;RTP: A Transport Protocol for Real-Time Applications&quot; in * section 2.3 &quot;Mixers and Translators&quot;) used for this * <tt>Channel</tt>. */ private RTPLevelRelayType rtpLevelRelayType; /** * The RTP port of the <tt>channel</tt> represented by this instance. * * @deprecated The field is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated private int rtpPort; /** * The <tt>SourcePacketExtension</tt>s of this channel. */ private final List<SourcePacketExtension> sources = new LinkedList<SourcePacketExtension>(); /** * The <tt>SourceGroupPacketExtension</tt>s of this channel. */ private List<SourceGroupPacketExtension> sourceGroups; /** * The list of (RTP) SSRCs which have been seen/received on this * <tt>Channel</tt> by now. These may exclude SSRCs which are no longer * active. Set by the Jitsi Videobridge server, not its clients. */ private int[] ssrcs = NO_SSRCS; /** Initializes a new <tt>Channel</tt> instance. */ public Channel() { super(Channel.ELEMENT_NAME); } /** * Adds a <tt>payload-type</tt> element defined by XEP-0167: Jingle RTP * Sessions to this <tt>channel</tt>. * * @param payloadType the <tt>payload-type</tt> element to be added to * this <tt>channel</tt> * @return <tt>true</tt> if the list of <tt>payload-type</tt> elements * associated with this <tt>channel</tt> has been modified as part of * the method call; otherwise, <tt>false</tt> * @throws NullPointerException if the specified <tt>payloadType</tt> is * <tt>null</tt> */ public boolean addPayloadType(PayloadTypePacketExtension payloadType) { if (payloadType == null) throw new NullPointerException("payloadType"); // Make sure that the COLIBRI namespace is used. payloadType.setNamespace(null); for (ParameterPacketExtension p : payloadType.getParameters()) p.setNamespace(null); return payloadTypes.contains(payloadType) ? false : payloadTypes.add(payloadType); } /** * Adds a <tt>SourcePacketExtension</tt> to the list of sources of this * channel. * * @param source the <tt>SourcePacketExtension</tt> to add to the list * of sources of this channel * @return <tt>true</tt> if the list of sources of this channel changed * as a result of the execution of the method; otherwise, <tt>false</tt> */ public synchronized boolean addSource(SourcePacketExtension source) { if (source == null) throw new NullPointerException("source"); return sources.contains(source) ? false : sources.add(source); } /** * Adds a <tt>SourceGroupPacketExtension</tt> to the list of source * groups of this channel. * * @param sourceGroup the <tt>SourcePacketExtension</tt> to add to the * list of sources of this channel * * @return <tt>true</tt> if the list of sources of this channel changed * as a result of the execution of the method; otherwise, <tt>false</tt> */ public synchronized boolean addSourceGroup( SourceGroupPacketExtension sourceGroup) { if (sourceGroup == null) throw new NullPointerException("sourceGroup"); if (sourceGroups == null) sourceGroups = new LinkedList<SourceGroupPacketExtension>(); return sourceGroups.contains(sourceGroup) ? false : sourceGroups.add(sourceGroup); } /** * Adds a specific (RTP) SSRC to the list of SSRCs seen/received on this * <tt>Channel</tt>. Invoked by the Jitsi Videobridge server, not its * clients. * * @param ssrc the (RTP) SSRC to be added to the list of SSRCs * seen/received on this <tt>Channel</tt> * @return <tt>true</tt> if the list of SSRCs seen/received on this * <tt>Channel</tt> has been modified as part of the method call; * otherwise, <tt>false</tt> */ public synchronized boolean addSSRC(int ssrc) { // contains for (int i = 0; i < ssrcs.length; i++) if (ssrcs[i] == ssrc) return false; // add int[] newSSRCs = new int[ssrcs.length + 1]; System.arraycopy(ssrcs, 0, newSSRCs, 0, ssrcs.length); newSSRCs[ssrcs.length] = ssrc; ssrcs = newSSRCs; return true; } /** * Gets the <tt>direction</tt> of this <tt>Channel</tt>. * * @return the <tt>direction</tt> of this <tt>Channel</tt>. */ public MediaDirection getDirection() { return (direction == null) ? MediaDirection.SENDRECV : direction; } /** * Gets the IP address (as a <tt>String</tt> value) of the host on which * the <tt>channel</tt> represented by this instance has been allocated. * * @return a <tt>String</tt> value which represents the IP address of * the host on which the <tt>channel</tt> represented by this instance * has been allocated * * @deprecated The method is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public String getHost() { return host; } /** * Gets the ID of the <tt>channel</tt> represented by this instance. * * @return the ID of the <tt>channel</tt> represented by this instance */ public String getID() { return id; } /** * Gets the maximum number of video RTP streams to be sent from Jitsi * Videobridge to the endpoint associated with this video * <tt>Channel</tt>. * * @return the maximum number of video RTP streams to be sent from Jitsi * Videobridge to the endpoint associated with this video * <tt>Channel</tt> */ public Integer getLastN() { return lastN; } /** * Gets the target quality of the simulcast substreams to be sent from * Jitsi Videobridge to the endpoint associated with this video * <tt>Channel</tt>. * * @return the target quality of the simulcast substreams to be sent * from Jitsi Videobridge to the endpoint associated with this video * <tt>Channel</tt>. */ public Integer getReceivingSimulcastLayer() { return receivingSimulcastLayer; } /** * Gets a list of <tt>payload-type</tt> elements defined by XEP-0167: * Jingle RTP Sessions added to this <tt>channel</tt>. * * @return an unmodifiable <tt>List</tt> of <tt>payload-type</tt> * elements defined by XEP-0167: Jingle RTP Sessions added to this * <tt>channel</tt> */ public List<PayloadTypePacketExtension> getPayloadTypes() { return Collections.unmodifiableList(payloadTypes); } /** * Gets the port which has been allocated to this <tt>channel</tt> for * the purposes of transmitting RTCP packets. * * @return the port which has been allocated to this <tt>channel</tt> * for the purposes of transmitting RTCP packets * * @deprecated The method is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public int getRTCPPort() { return rtcpPort; } /** * Gets the type of RTP-level relay (in the terms specified by RFC 3550 * &quot;RTP: A Transport Protocol for Real-Time Applications&quot; in * section 2.3 &quot;Mixers and Translators&quot;) used for this * <tt>Channel</tt>. * * @return the type of RTP-level relay used for this <tt>Channel</tt> */ public RTPLevelRelayType getRTPLevelRelayType() { return rtpLevelRelayType; } /** * Gets the port which has been allocated to this <tt>channel</tt> for * the purposes of transmitting RTP packets. * * @return the port which has been allocated to this <tt>channel</tt> * for the purposes of transmitting RTP packets * * @deprecated The method is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public int getRTPPort() { return rtpPort; } /** * Gets the list of <tt>SourcePacketExtensions</tt>s which represent the * sources of this channel. * * @return a <tt>List</tt> of <tt>SourcePacketExtension</tt>s which * represent the sources of this channel */ public synchronized List<SourcePacketExtension> getSources() { return new ArrayList<SourcePacketExtension>(sources); } /** * Gets the list of <tt>SourceGroupPacketExtensions</tt>s which * represent the source groups of this channel. * * @return a <tt>List</tt> of <tt>SourceGroupPacketExtension</tt>s which * represent the source groups of this channel */ public synchronized List<SourceGroupPacketExtension> getSourceGroups() { return (sourceGroups == null) ? null : new ArrayList<SourceGroupPacketExtension>(sourceGroups); } /** * Gets (a copy of) the list of (RTP) SSRCs seen/received on this * <tt>Channel</tt>. * * @return an array of <tt>int</tt>s which represents (a copy of) the * list of (RTP) SSRCs seen/received on this <tt>Channel</tt> */ public synchronized int[] getSSRCs() { return (ssrcs.length == 0) ? NO_SSRCS : ssrcs.clone(); } /** * Removes a <tt>payload-type</tt> element defined by XEP-0167: Jingle * RTP Sessions from this <tt>channel</tt>. * * @param payloadType the <tt>payload-type</tt> element to be removed * from this <tt>channel</tt> * @return <tt>true</tt> if the list of <tt>payload-type</tt> elements * associated with this <tt>channel</tt> has been modified as part of * the method call; otherwise, <tt>false</tt> */ public boolean removePayloadType(PayloadTypePacketExtension payloadType) { return payloadTypes.remove(payloadType); } /** * Removes a <tt>SourcePacketExtension</tt> from the list of sources of * this channel. * * @param source the <tt>SourcePacketExtension</tt> to remove from the * list of sources of this channel * @return <tt>true</tt> if the list of sources of this channel changed * as a result of the execution of the method; otherwise, <tt>false</tt> */ public synchronized boolean removeSource(SourcePacketExtension source) { return sources.remove(source); } /** * Removes a specific (RTP) SSRC from the list of SSRCs seen/received on * this <tt>Channel</tt>. Invoked by the Jitsi Videobridge server, not * its clients. * * @param ssrc the (RTP) SSRC to be removed from the list of SSRCs * seen/received on this <tt>Channel</tt> * @return <tt>true</tt> if the list of SSRCs seen/received on this * <tt>Channel</tt> has been modified as part of the method call; * otherwise, <tt>false</tt> */ public synchronized boolean removeSSRC(int ssrc) { if (ssrcs.length == 1) { if (ssrcs[0] == ssrc) { ssrcs = NO_SSRCS; return true; } else return false; } else { for (int i = 0; i < ssrcs.length; i++) { if (ssrcs[i] == ssrc) { int[] newSSRCs = new int[ssrcs.length - 1]; if (i != 0) System.arraycopy(ssrcs, 0, newSSRCs, 0, i); if (i != newSSRCs.length) { System.arraycopy( ssrcs, i + 1, newSSRCs, i, newSSRCs.length - i); } ssrcs = newSSRCs; return true; } } return false; } } /** * Sets the <tt>direction</tt> of this <tt>Channel</tt> * * @param direction the <tt>MediaDirection</tt> to set the * <tt>direction</tt> of this <tt>Channel</tt> to. */ public void setDirection(MediaDirection direction) { this.direction = direction; } /** * Sets the IP address (as a <tt>String</tt> value) of the host on which * the <tt>channel</tt> represented by this instance has been allocated. * * @param host a <tt>String</tt> value which represents the IP address * of the host on which the <tt>channel</tt> represented by this * instance has been allocated * * @deprecated The method is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public void setHost(String host) { this.host = host; } /** * Sets the ID of the <tt>channel</tt> represented by this instance. * * @param id the ID of the <tt>channel</tt> represented by this instance */ public void setID(String id) { this.id = id; } /** * Sets the maximum number of video RTP streams to be sent from Jitsi * Videobridge to the endpoint associated with this video * <tt>Channel</tt>. * * @param lastN the maximum number of video RTP streams to be sent from * Jitsi Videobridge to the endpoint associated with this video * <tt>Channel</tt> */ public void setLastN(Integer lastN) { this.lastN = lastN; } /** * Sets the target quality of the simulcast substreams to be sent from * Jitsi Videobridge to the endpoint associated with this video * <tt>Channel</tt>. * * @param simulcastLayer the target quality of the simulcast substreams * to be sent from Jitsi Videobridge to the endpoint associated with * this video <tt>Channel</tt>. */ public void setReceivingSimulcastLayer(Integer simulcastLayer) { this.receivingSimulcastLayer = simulcastLayer; } /** * Sets the port which has been allocated to this <tt>channel</tt> for * the purposes of transmitting RTCP packets. * * @param rtcpPort the port which has been allocated to this * <tt>channel</tt> for the purposes of transmitting RTCP packets * * @deprecated The method is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public void setRTCPPort(int rtcpPort) { this.rtcpPort = rtcpPort; } /** * Sets the type of RTP-level relay (in the terms specified by RFC 3550 * &quot;RTP: A Transport Protocol for Real-Time Applications&quot; in * section 2.3 &quot;Mixers and Translators&quot;) used for this * <tt>Channel</tt>. * * @param rtpLevelRelayType the type of RTP-level relay used for * this <tt>Channel</tt> */ public void setRTPLevelRelayType(RTPLevelRelayType rtpLevelRelayType) { this.rtpLevelRelayType = rtpLevelRelayType; } /** * Sets the type of RTP-level relay (in the terms specified by RFC 3550 * &quot;RTP: A Transport Protocol for Real-Time Applications&quot; in * section 2.3 &quot;Mixers and Translators&quot;) used for this * <tt>Channel</tt>. * * @param s the type of RTP-level relay used for this <tt>Channel</tt> */ public void setRTPLevelRelayType(String s) { setRTPLevelRelayType(RTPLevelRelayType.parseRTPLevelRelayType(s)); } /** * Sets the port which has been allocated to this <tt>channel</tt> for * the purposes of transmitting RTP packets. * * @param rtpPort the port which has been allocated to this * <tt>channel</tt> for the purposes of transmitting RTP packets * * @deprecated The method is supported for the purposes of compatibility * with legacy versions of Jitsi and Jitsi Videobridge. */ @Deprecated public void setRTPPort(int rtpPort) { this.rtpPort = rtpPort; } /** * Sets the list of (RTP) SSRCs seen/received on this <tt>Channel</tt>. * * @param ssrcs the list of (RTP) SSRCs to be set as seen/received on * this <tt>Channel</tt> */ public void setSSRCs(int[] ssrcs) { /* * TODO Make sure that the SSRCs set on this instance do not contain * duplicates. */ this.ssrcs = ((ssrcs == null) || (ssrcs.length == 0)) ? NO_SSRCS : ssrcs.clone(); } @Override protected void printAttributes(StringBuilder xml) { // direction MediaDirection direction = getDirection(); if ((direction != null) && (direction != MediaDirection.SENDRECV)) { xml.append(' ').append(DIRECTION_ATTR_NAME).append("='") .append(direction.toString()).append('\''); } // host String host = getHost(); if (host != null) { xml.append(' ').append(HOST_ATTR_NAME).append("='").append(host) .append('\''); } // id String id = getID(); if (id != null) { xml.append(' ').append(ID_ATTR_NAME).append("='").append(id) .append('\''); } // lastN Integer lastN = getLastN(); if (lastN != null) { xml.append(' ').append(LAST_N_ATTR_NAME).append("='") .append(lastN).append('\''); } // rtcpPort int rtcpPort = getRTCPPort(); if (rtcpPort > 0) { xml.append(' ').append(RTCP_PORT_ATTR_NAME).append("='") .append(rtcpPort).append('\''); } // rtpLevelRelayType RTPLevelRelayType rtpLevelRelayType = getRTPLevelRelayType(); if (rtpLevelRelayType != null) { xml.append(' ').append(RTP_LEVEL_RELAY_TYPE_ATTR_NAME) .append("='").append(rtpLevelRelayType).append('\''); } // rtpPort int rtpPort = getRTPPort(); if (rtpPort > 0) { xml.append(' ').append(RTP_PORT_ATTR_NAME).append("='") .append(rtpPort).append('\''); } } @Override protected boolean hasContent() { List<PayloadTypePacketExtension> payloadTypes = getPayloadTypes(); boolean hasPayloadTypes = !payloadTypes.isEmpty(); List<SourcePacketExtension> sources = getSources(); boolean hasSources = !sources.isEmpty(); int[] ssrcs = getSSRCs(); boolean hasSSRCs = (ssrcs.length != 0); return hasPayloadTypes || hasSources || hasSSRCs; } @Override protected void printContent(StringBuilder xml) { List<PayloadTypePacketExtension> payloadTypes = getPayloadTypes(); List<SourcePacketExtension> sources = getSources(); List<SourceGroupPacketExtension> souceGroups = getSourceGroups(); int[] ssrcs = getSSRCs(); for (PayloadTypePacketExtension payloadType : payloadTypes) xml.append(payloadType.toXML()); for (SourcePacketExtension source : sources) xml.append(source.toXML()); if (souceGroups != null && souceGroups.size() != 0) for (SourceGroupPacketExtension sourceGroup : souceGroups) xml.append(sourceGroup.toXML()); for (int i = 0; i < ssrcs.length; i++) { xml.append('<').append(SSRC_ELEMENT_NAME).append('>') .append(Long.toString(ssrcs[i] & 0xFFFFFFFFL)) .append("</").append(SSRC_ELEMENT_NAME) .append('>'); } } } /** * Represents a <tt>content</tt> included into a Jitsi Videobridge * <tt>conference</tt> IQ. */ public static class Content { /** * The XML element name of a <tt>content</tt> of a Jitsi Videobridge * <tt>conference</tt> IQ. */ public static final String ELEMENT_NAME = "content"; /** * The XML name of the <tt>name</tt> attribute of a <tt>content</tt> of * a <tt>conference</tt> IQ which represents the <tt>name</tt> property * of <tt>ColibriConferenceIQ.Content</tt>. */ public static final String NAME_ATTR_NAME = "name"; /** * The list of {@link Channel}s included into this <tt>content</tt> of a * <tt>conference</tt> IQ. */ private final List<Channel> channels = new LinkedList<Channel>(); /** * The list of {@link SctpConnection}s included into this * <tt>content</tt> of a <tt>conference</tt> IQ. */ private final List<SctpConnection> sctpConnections = new LinkedList<SctpConnection>(); /** * The name of the <tt>content</tt> represented by this instance. */ private String name; /** * Initializes a new <tt>Content</tt> instance without a name and * channels. */ public Content() { } /** * Initializes a new <tt>Content</tt> instance with a specific name and * without channels. * * @param name the name to initialize the new instance with */ public Content(String name) { setName(name); } /** * Adds a specific <tt>Channel</tt> to the list of <tt>Channel</tt>s * included into this <tt>Content</tt>. * * @param channel the <tt>Channel</tt> to be included into this * <tt>Content</tt> * @return <tt>true</tt> if the list of <tt>Channel</tt>s included into * this <tt>Content</tt> was modified as a result of the execution of * the method; otherwise, <tt>false</tt> * @throws NullPointerException if the specified <tt>channel</tt> is * <tt>null</tt> */ public boolean addChannel(Channel channel) { if (channel == null) throw new NullPointerException("channel"); return channels.contains(channel) ? false : channels.add(channel); } /** * Gets the <tt>Channel</tt> at a specific index/position within the * list of <tt>Channel</tt>s included in this <tt>Content</tt>. * * @param channelIndex the index/position within the list of * <tt>Channel</tt>s included in this <tt>Content</tt> of the * <tt>Channel</tt> to be returned * @return the <tt>Channel</tt> at the specified <tt>channelIndex</tt> * within the list of <tt>Channel</tt>s included in this * <tt>Content</tt> */ public Channel getChannel(int channelIndex) { return getChannels().get(channelIndex); } /** * Gets a <tt>Channel</tt> which is included into this <tt>Content</tt> * and which has a specific ID. * * @param channelID the ID of the <tt>Channel</tt> included into this * <tt>Content</tt> to be returned * @return the <tt>Channel</tt> which is included into this * <tt>Content</tt> and which has the specified <tt>channelID</tt> if * such a <tt>Channel</tt> exists; otherwise, <tt>null</tt> */ public Channel getChannel(String channelID) { for (Channel channel : getChannels()) if (channelID.equals(channel.getID())) return channel; return null; } /** * Gets the number of <tt>Channel</tt>s included into/associated with * this <tt>Content</tt>. * * @return the number of <tt>Channel</tt>s included into/associated with * this <tt>Content</tt> */ public int getChannelCount() { return getChannels().size(); } /** * Gets a list of the <tt>Channel</tt> included into/associated with * this <tt>Content</tt>. * * @return an unmodifiable <tt>List</tt> of the <tt>Channel</tt>s * included into/associated with this <tt>Content</tt> */ public List<Channel> getChannels() { return Collections.unmodifiableList(channels); } /** * Adds a specific <tt>SctpConnection</tt> to the list of * <tt>SctpConnection</tt>s included into this <tt>Content</tt>. * * @param conn the <tt>SctpConnection</tt> to be included into this * <tt>Content</tt> * @return <tt>true</tt> if the list of <tt>SctpConnection</tt>s * included into this <tt>Content</tt> was modified as a result of * the execution of the method; otherwise, <tt>false</tt> * @throws NullPointerException if the specified <tt>conn</tt> is * <tt>null</tt> */ public boolean addSctpConnection(SctpConnection conn) { if(conn == null) throw new NullPointerException("Sctp connection"); return !sctpConnections.contains(conn) && sctpConnections.add(conn); } /** * Gets a list of the <tt>SctpConnection</tt>s included into/associated * with this <tt>Content</tt>. * * @return an unmodifiable <tt>List</tt> of the <tt>SctpConnection</tt>s * included into/associated with this <tt>Content</tt> */ public List<SctpConnection> getSctpConnections() { return Collections.unmodifiableList(sctpConnections); } /** * Gets the name of the <tt>content</tt> represented by this instance. * * @return the name of the <tt>content</tt> represented by this instance */ public String getName() { return name; } /** * Removes a specific <tt>Channel</tt> from the list of * <tt>Channel</tt>s included into this <tt>Content</tt>. * * @param channel the <tt>Channel</tt> to be excluded from this * <tt>Content</tt> * @return <tt>true</tt> if the list of <tt>Channel</tt>s included into * this <tt>Content</tt> was modified as a result of the execution of * the method; otherwise, <tt>false</tt> */ public boolean removeChannel(Channel channel) { return channels.remove(channel); } /** * Sets the name of the <tt>content</tt> represented by this instance. * * @param name the name of the <tt>content</tt> represented by this * instance * @throws NullPointerException if the specified <tt>name</tt> is * <tt>null</tt> */ public void setName(String name) { if (name == null) throw new NullPointerException("name"); this.name = name; } /** * Appends the XML <tt>String</tt> representation of this * <tt>Content</tt> to a specific <tt>StringBuilder</tt>. * * @param xml the <tt>StringBuilder</tt> to which the XML * <tt>String</tt> representation of this <tt>Content</tt> is to be * appended */ public void toXML(StringBuilder xml) { xml.append('<').append(ELEMENT_NAME); xml.append(' ').append(NAME_ATTR_NAME).append("='") .append(getName()).append('\''); List<Channel> channels = getChannels(); List<SctpConnection> connections = getSctpConnections(); if (channels.size() == 0 && connections.size() == 0) { xml.append(" />"); } else { xml.append('>'); for (Channel channel : channels) channel.toXML(xml); for(SctpConnection conn : connections) conn.toXML(xml); xml.append("</").append(ELEMENT_NAME).append('>'); } } } /** * Represents a <tt>SCTP connection</tt> included into a <tt>content</tt> * of a Jitsi Videobridge <tt>conference</tt> IQ. * * @author Pawel Domas */ public static class SctpConnection extends ChannelCommon { /** * The XML element name of a <tt>content</tt> of a Jitsi Videobridge * <tt>conference</tt> IQ. */ public static final String ELEMENT_NAME = "sctpconnection"; /** * The XML name of the <tt>port</tt> attribute of a * <tt>SctpConnection</tt> of a <tt>conference</tt> IQ which represents * the SCTP port property of * <tt>ColibriConferenceIQ.SctpConnection</tt>. */ public static final String PORT_ATTR_NAME = "port"; /** * SCTP port attribute. 5000 by default. */ private int port = 5000; /** * Initializes a new <tt>SctpConnection</tt> instance without an * endpoint name and with default port value set. */ public SctpConnection() { super(SctpConnection.ELEMENT_NAME); } /** * Gets the SCTP port of the <tt>SctpConnection</tt> described by this * instance. * * @return the SCTP port of the <tt>SctpConnection</tt> represented by * this instance. */ public int getPort() { return port; } /** * Sets the SCTP port of the <tt>SctpConnection</tt> represented by this * instance. * * @param port the SCTP port of the <tt>SctpConnection</tt> * represented by this instance */ public void setPort(int port) { this.port = port; } /** * {@inheritDoc} */ @Override protected void printAttributes(StringBuilder xml) { xml.append(' ').append(PORT_ATTR_NAME).append("='") .append(getPort()).append('\''); } /** * {@inheritDoc} * * No content other than transport for <tt>SctpConnection</tt>. */ @Override protected boolean hasContent() { return false; } @Override protected void printContent(StringBuilder xml) { // No other content than the transport shared from ChannelCommon } } /** * Represents a <tt>recording</tt> element. */ public static class Recording { /** * The XML name of the <tt>recording</tt> element. */ public static final String ELEMENT_NAME = "recording"; /** * The XML name of the <tt>state</tt> attribute. */ public static final String STATE_ATTR_NAME = "state"; /** * The XML name of the <tt>token</tt> attribute. */ public static final String TOKEN_ATTR_NAME = "token"; /** * The XML name of the <tt>path</tt> attribute. */ public static final String PATH_ATTR_NAME = "path"; private String token = null; private boolean state; private String path = null; public Recording(boolean state) { this.state = state; } public Recording(boolean state, String token) { this(state); this.token = token; } public String getToken() { return token; } public String getPath() { return path; } public void setPath(String path) { this.path = path; } public boolean getState() { return state; } public void toXML(StringBuilder xml) { xml.append('<').append(ELEMENT_NAME); xml.append(' ').append(STATE_ATTR_NAME).append("='") .append(state).append('\''); if (token != null) { xml.append(' ').append(TOKEN_ATTR_NAME).append("='") .append(token).append('\''); } if (path != null) { xml.append(' ').append(PATH_ATTR_NAME).append("='") .append(path).append('\''); } xml.append("/>"); } } /** * Represents an 'endpoint' element. */ public static class Endpoint { /** * The name of the 'endpoint' element. */ public static final String ELEMENT_NAME = "endpoint"; /** * The name of the 'id' attribute. */ public static final String ID_ATTR_NAME = "id"; /** * The name of the 'displayname' attribute. */ public static final String DISPLAYNAME_ATTR_NAME = "displayname"; /** * The 'id' of this <tt>Endpoint</tt>. */ private String id; /** * The 'display name' of this <tt>Endpoint</tt>. */ private String displayName; /** * Initializes a new <tt>Endpoint</tt> with the given ID and display * name. * @param id the ID. * @param displayName the display name. */ public Endpoint(String id, String displayName) { this.id = id; this.displayName = displayName; } /** * Sets the ID of this <tt>Endpoint</tt>. * @param id the ID to set. */ public void setId(String id) { this.id = id; } /** * Returns the ID of this <tt>Endpoint</tt>. * @return the ID of this <tt>Endpoint</tt>. */ public String getId() { return id; } /** * Sets the display name of this <tt>Endpoint</tt>. * @param displayName the display name to set. */ public void setDisplayName(String displayName) { this.displayName = displayName; } /** * Returns the display name of this <tt>Endpoint</tt>. * @return the display name of this <tt>Endpoint</tt>. */ public String getDisplayName() { return displayName; } } }
Betters the performance of a boolean check. Manually formats source code.
src/net/java/sip/communicator/impl/protocol/jabber/extensions/colibri/ColibriConferenceIQ.java
Betters the performance of a boolean check. Manually formats source code.
Java
apache-2.0
899304163af2381bf9f9a790baf3c9a82d9c342e
0
ubikloadpack/jmeter,ubikloadpack/jmeter,etnetera/jmeter,ubikloadpack/jmeter,etnetera/jmeter,etnetera/jmeter,ubikloadpack/jmeter,etnetera/jmeter,etnetera/jmeter
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.visualizers.backend; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.LockSupport; import org.apache.jmeter.config.Arguments; import org.apache.jmeter.engine.util.NoThreadClone; import org.apache.jmeter.samplers.Remoteable; import org.apache.jmeter.samplers.SampleEvent; import org.apache.jmeter.samplers.SampleListener; import org.apache.jmeter.samplers.SampleResult; import org.apache.jmeter.testelement.AbstractTestElement; import org.apache.jmeter.testelement.TestElement; import org.apache.jmeter.testelement.TestStateListener; import org.apache.jmeter.testelement.property.TestElementProperty; import org.apache.jmeter.visualizers.backend.graphite.GraphiteBackendListenerClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Async Listener that delegates SampleResult handling to implementations of {@link BackendListenerClient} * @since 2.13 */ public class BackendListener extends AbstractTestElement implements Backend, Serializable, SampleListener, TestStateListener, NoThreadClone, Remoteable { private static final class ListenerClientData { private BackendListenerClient client; private BlockingQueue<SampleResult> queue; private LongAdder queueWaits; // how many times we had to wait to queue a SampleResult private LongAdder queueWaitTime; // how long we had to wait (nanoSeconds) // @GuardedBy("LOCK") private int instanceCount; // number of active tests private CountDownLatch latch; } private static final long serialVersionUID = 1L; private static final Logger log = LoggerFactory.getLogger(BackendListener.class); /** * Property key representing the classname of the BackendListenerClient to user. */ public static final String CLASSNAME = "classname"; /** * Queue size */ public static final String QUEUE_SIZE = "QUEUE_SIZE"; /** * Lock used to protect accumulators update + instanceCount update */ private static final Object LOCK = new Object(); /** * Property key representing the arguments for the BackendListenerClient. */ public static final String ARGUMENTS = "arguments"; /** * The BackendListenerClient class used by this sampler. * Created by testStarted; copied to cloned instances. */ private Class<?> clientClass; public static final String DEFAULT_QUEUE_SIZE = "5000"; // Create unique object as marker for end of queue private static final transient SampleResult FINAL_SAMPLE_RESULT = new SampleResult(); /* * This is needed for distributed testing where there is 1 instance * per server. But we need the total to be shared. */ private static final Map<String, ListenerClientData> queuesByTestElementName = new ConcurrentHashMap<>(); // Name of the test element. Set up by testStarted(). private transient String myName; // Holds listenerClientData for this test element private transient ListenerClientData listenerClientData; /** * Create a BackendListener. */ public BackendListener() { setArguments(new Arguments()); } /* * Ensure that the required class variables are cloned, * as this is not currently done by the super-implementation. */ @Override public Object clone() { BackendListener clone = (BackendListener) super.clone(); clone.clientClass = this.clientClass; return clone; } private Class<?> initClass() { String name = getClassname().trim(); try { return Class.forName(name, false, Thread.currentThread().getContextClassLoader()); } catch (Exception e) { log.error("{}\tException initialising: {}", whoAmI(), name, e); } return null; } /** * Generate a String identifier of this instance for debugging purposes. * * @return a String identifier for this sampler instance */ private String whoAmI() { StringBuilder sb = new StringBuilder(); sb.append(Thread.currentThread().getName()); sb.append("@"); sb.append(Integer.toHexString(hashCode())); sb.append("-"); sb.append(getName()); return sb.toString(); } /* (non-Javadoc) * @see org.apache.jmeter.samplers.SampleListener#sampleOccurred(org.apache.jmeter.samplers.SampleEvent) */ @Override public void sampleOccurred(SampleEvent event) { Arguments args = getArguments(); BackendListenerContext context = new BackendListenerContext(args); SampleResult sr = listenerClientData.client.createSampleResult(context, event.getResult()); if(sr == null) { if (log.isDebugEnabled()) { log.debug("{} => Dropping SampleResult: {}", getName(), event.getResult()); } return; } try { if (!listenerClientData.queue.offer(sr)){ // we failed to add the element first time listenerClientData.queueWaits.add(1L); long t1 = System.nanoTime(); listenerClientData.queue.put(sr); long t2 = System.nanoTime(); listenerClientData.queueWaitTime.add(t2-t1); } } catch (Exception err) { log.error("sampleOccurred, failed to queue the sample", err); } } /** * Thread that dequeues data from queue to send it to {@link BackendListenerClient} */ private static final class Worker extends Thread { private final ListenerClientData listenerClientData; private final BackendListenerContext context; private final BackendListenerClient backendListenerClient; private Worker(BackendListenerClient backendListenerClient, Arguments arguments, ListenerClientData listenerClientData){ this.listenerClientData = listenerClientData; // Allow BackendListenerClient implementations to get access to test element name arguments.addArgument(TestElement.NAME, getName()); context = new BackendListenerContext(arguments); this.backendListenerClient = backendListenerClient; } @Override public void run() { final boolean isDebugEnabled = log.isDebugEnabled(); List<SampleResult> sampleResults = new ArrayList<>(listenerClientData.queue.size()); try { try { boolean endOfLoop = false; while (!endOfLoop) { if (isDebugEnabled) { log.debug("Thread: {} taking SampleResult from queue: {}", Thread.currentThread().getName(), listenerClientData.queue.size()); } SampleResult sampleResult = listenerClientData.queue.take(); if (isDebugEnabled) { log.debug("Thread: {} took SampleResult: {}, isFinal: {}", Thread.currentThread().getName(), sampleResult, sampleResult == FINAL_SAMPLE_RESULT); } // try to process as many as possible // The == comparison is not a mistake while (!(endOfLoop = sampleResult == FINAL_SAMPLE_RESULT) && sampleResult != null ) { sampleResults.add(sampleResult); if (isDebugEnabled) { log.debug("Thread: {} polling from queue: {}", Thread.currentThread().getName(), listenerClientData.queue.size()); } sampleResult = listenerClientData.queue.poll(); // returns null if nothing on queue currently if (isDebugEnabled) { log.debug("Thread: {} took from queue: {}, isFinal: {}", Thread.currentThread().getName(), sampleResult, sampleResult == FINAL_SAMPLE_RESULT); } } if (isDebugEnabled) { log.debug("Thread: {} exiting with FINAL EVENT: {}, null: {}", Thread.currentThread().getName(), sampleResult == FINAL_SAMPLE_RESULT, sampleResult == null); } sendToListener(backendListenerClient, context, sampleResults); if(!endOfLoop) { LockSupport.parkNanos(100); } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } // We may have been interrupted sendToListener(backendListenerClient, context, sampleResults); log.info("Worker ended"); } finally { listenerClientData.latch.countDown(); } } } /** * Send sampleResults to {@link BackendListenerClient} * @param backendListenerClient {@link BackendListenerClient} * @param context {@link BackendListenerContext} * @param sampleResults List of {@link SampleResult} */ static void sendToListener( final BackendListenerClient backendListenerClient, final BackendListenerContext context, final List<SampleResult> sampleResults) { if (!sampleResults.isEmpty()) { backendListenerClient.handleSampleResults(sampleResults, context); sampleResults.clear(); } } /** * Returns reference to {@link BackendListener} * @param clientClass {@link BackendListenerClient} client class * @return BackendListenerClient reference. */ static BackendListenerClient createBackendListenerClientImpl(Class<?> clientClass) { if (clientClass == null) { // failed to initialise the class return new ErrorBackendListenerClient(); } try { return (BackendListenerClient) clientClass.getDeclaredConstructor().newInstance(); } catch (Exception e) { log.error("Exception creating: {}", clientClass, e); return new ErrorBackendListenerClient(); } } // TestStateListener implementation @Override public void testStarted() { testStarted("local"); //$NON-NLS-1$ } @Override public void testStarted(String host) { if (log.isDebugEnabled()) { log.debug("{}\ttestStarted({})", whoAmI(), host); } int queueSize; final String size = getQueueSize(); try { queueSize = Integer.parseInt(size); } catch (NumberFormatException nfe) { log.warn("Invalid queue size '{}' defaulting to {}", size, DEFAULT_QUEUE_SIZE); queueSize = Integer.parseInt(DEFAULT_QUEUE_SIZE); } synchronized (LOCK) { myName = getName(); listenerClientData = queuesByTestElementName.get(myName); if (listenerClientData == null){ // We need to do this to ensure in Distributed testing // that only 1 instance of BackendListenerClient is used clientClass = initClass(); // may be null BackendListenerClient backendListenerClient = createBackendListenerClientImpl(clientClass); BackendListenerContext context = new BackendListenerContext((Arguments)getArguments().clone()); listenerClientData = new ListenerClientData(); listenerClientData.queue = new ArrayBlockingQueue<>(queueSize); listenerClientData.queueWaits = new LongAdder(); listenerClientData.queueWaitTime = new LongAdder(); listenerClientData.latch = new CountDownLatch(1); listenerClientData.client = backendListenerClient; if (log.isInfoEnabled()) { log.info("{}: Starting worker with class: {} and queue capacity: {}", getName(), clientClass, getQueueSize()); } Worker worker = new Worker(backendListenerClient, (Arguments) getArguments().clone(), listenerClientData); worker.setDaemon(true); worker.start(); if (log.isInfoEnabled()) { log.info("{}: Started worker with class: {}", getName(), clientClass); } try { backendListenerClient.setupTest(context); } catch (Exception e) { throw new java.lang.IllegalStateException("Failed calling setupTest", e); } queuesByTestElementName.put(myName, listenerClientData); } listenerClientData.instanceCount++; } } /** * Method called at the end of the test. This is called only on one instance * of BackendListener. This method will loop through all of the other * BackendListenerClients which have been registered (automatically in the * constructor) and notify them that the test has ended, allowing the * BackendListenerClients to cleanup. * Implements TestStateListener.testEnded(String) */ @Override public void testEnded(String host) { synchronized (LOCK) { ListenerClientData listenerClientDataForName = queuesByTestElementName.get(myName); if (log.isDebugEnabled()) { log.debug("testEnded called on instance {}#{}", myName, listenerClientDataForName.instanceCount); } if(listenerClientDataForName != null) { listenerClientDataForName.instanceCount--; if (listenerClientDataForName.instanceCount > 0){ // Not the last instance of myName return; } else { queuesByTestElementName.remove(myName); } } else { log.error("No listener client data found for BackendListener {}", myName); } } try { listenerClientData.queue.put(FINAL_SAMPLE_RESULT); } catch (Exception ex) { log.warn("testEnded() with exception: {}", ex, ex); } if (listenerClientData.queueWaits.longValue() > 0) { log.warn( "QueueWaits: {}; QueueWaitTime: {} (nanoseconds), you may need to increase queue capacity, see property 'backend_queue_capacity'", listenerClientData.queueWaits, listenerClientData.queueWaitTime); } try { listenerClientData.latch.await(); BackendListenerContext context = new BackendListenerContext(getArguments()); listenerClientData.client.teardownTest(context); } catch (Exception e) { throw new java.lang.IllegalStateException("Failed calling teardownTest", e); } } @Override public void testEnded() { testEnded("local"); //$NON-NLS-1$ } /** * A {@link BackendListenerClient} implementation used for error handling. If an * error occurs while creating the real BackendListenerClient object, it is * replaced with an instance of this class. Each time a sample occurs with * this class, the result is marked as a failure so the user can see that * the test failed. */ static class ErrorBackendListenerClient extends AbstractBackendListenerClient { /** * Return SampleResult with data on error. * * @see BackendListenerClient#handleSampleResults(List, BackendListenerContext) */ @Override public void handleSampleResults(List<SampleResult> sampleResults, BackendListenerContext context) { log.warn("ErrorBackendListenerClient#handleSampleResult called, noop"); Thread.yield(); } } /* (non-Javadoc) * @see org.apache.jmeter.samplers.SampleListener#sampleStarted(org.apache.jmeter.samplers.SampleEvent) */ @Override public void sampleStarted(SampleEvent e) { // NOOP } /* (non-Javadoc) * @see org.apache.jmeter.samplers.SampleListener#sampleStopped(org.apache.jmeter.samplers.SampleEvent) */ @Override public void sampleStopped(SampleEvent e) { // NOOP } /** * Set the arguments (parameters) for the BackendListenerClient to be executed * with. * * @param args * the new arguments. These replace any existing arguments. */ public void setArguments(Arguments args) { // Bug 59173 - don't save new default argument args.removeArgument(GraphiteBackendListenerClient.USE_REGEXP_FOR_SAMPLERS_LIST, GraphiteBackendListenerClient.USE_REGEXP_FOR_SAMPLERS_LIST_DEFAULT); setProperty(new TestElementProperty(ARGUMENTS, args)); } /** * Get the arguments (parameters) for the BackendListenerClient to be executed * with. * * @return the arguments */ public Arguments getArguments() { return (Arguments) getProperty(ARGUMENTS).getObjectValue(); } /** * Sets the Classname of the BackendListenerClient object * * @param classname * the new Classname value */ public void setClassname(String classname) { setProperty(CLASSNAME, classname); } /** * Gets the Classname of the BackendListenerClient object * * @return the Classname value */ public String getClassname() { return getPropertyAsString(CLASSNAME); } /** * Sets the queue size * * @param queueSize the size of the queue */ public void setQueueSize(String queueSize) { setProperty(QUEUE_SIZE, queueSize, DEFAULT_QUEUE_SIZE); } /** * Gets the queue size * * @return int queueSize */ public String getQueueSize() { return getPropertyAsString(QUEUE_SIZE, DEFAULT_QUEUE_SIZE); } }
src/components/org/apache/jmeter/visualizers/backend/BackendListener.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.visualizers.backend; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.LockSupport; import org.apache.jmeter.config.Arguments; import org.apache.jmeter.engine.util.NoThreadClone; import org.apache.jmeter.samplers.Remoteable; import org.apache.jmeter.samplers.SampleEvent; import org.apache.jmeter.samplers.SampleListener; import org.apache.jmeter.samplers.SampleResult; import org.apache.jmeter.testelement.AbstractTestElement; import org.apache.jmeter.testelement.TestElement; import org.apache.jmeter.testelement.TestStateListener; import org.apache.jmeter.testelement.property.TestElementProperty; import org.apache.jmeter.visualizers.backend.graphite.GraphiteBackendListenerClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Async Listener that delegates SampleResult handling to implementations of {@link BackendListenerClient} * @since 2.13 */ public class BackendListener extends AbstractTestElement implements Backend, Serializable, SampleListener, TestStateListener, NoThreadClone, Remoteable { private static final class ListenerClientData { private BackendListenerClient client; private BlockingQueue<SampleResult> queue; private LongAdder queueWaits; // how many times we had to wait to queue a SampleResult private LongAdder queueWaitTime; // how long we had to wait (nanoSeconds) // @GuardedBy("LOCK") private int instanceCount; // number of active tests private CountDownLatch latch; } private static final long serialVersionUID = 1L; private static final Logger log = LoggerFactory.getLogger(BackendListener.class); /** * Property key representing the classname of the BackendListenerClient to user. */ public static final String CLASSNAME = "classname"; /** * Queue size */ public static final String QUEUE_SIZE = "QUEUE_SIZE"; /** * Lock used to protect accumulators update + instanceCount update */ private static final Object LOCK = new Object(); /** * Property key representing the arguments for the BackendListenerClient. */ public static final String ARGUMENTS = "arguments"; /** * The BackendListenerClient class used by this sampler. * Created by testStarted; copied to cloned instances. */ private Class<?> clientClass; public static final String DEFAULT_QUEUE_SIZE = "5000"; // Create unique object as marker for end of queue private static final transient SampleResult FINAL_SAMPLE_RESULT = new SampleResult(); /* * This is needed for distributed testing where there is 1 instance * per server. But we need the total to be shared. */ private static final Map<String, ListenerClientData> queuesByTestElementName = new ConcurrentHashMap<>(); // Name of the test element. Set up by testStarted(). private transient String myName; // Holds listenerClientData for this test element private transient ListenerClientData listenerClientData; /** * Create a BackendListener. */ public BackendListener() { setArguments(new Arguments()); } /* * Ensure that the required class variables are cloned, * as this is not currently done by the super-implementation. */ @Override public Object clone() { BackendListener clone = (BackendListener) super.clone(); clone.clientClass = this.clientClass; return clone; } private Class<?> initClass() { String name = getClassname().trim(); try { return Class.forName(name, false, Thread.currentThread().getContextClassLoader()); } catch (Exception e) { log.error("{}\tException initialising: {}", whoAmI(), name, e); } return null; } /** * Generate a String identifier of this instance for debugging purposes. * * @return a String identifier for this sampler instance */ private String whoAmI() { StringBuilder sb = new StringBuilder(); sb.append(Thread.currentThread().getName()); sb.append("@"); sb.append(Integer.toHexString(hashCode())); sb.append("-"); sb.append(getName()); return sb.toString(); } /* (non-Javadoc) * @see org.apache.jmeter.samplers.SampleListener#sampleOccurred(org.apache.jmeter.samplers.SampleEvent) */ @Override public void sampleOccurred(SampleEvent event) { Arguments args = getArguments(); BackendListenerContext context = new BackendListenerContext(args); SampleResult sr = listenerClientData.client.createSampleResult(context, event.getResult()); if(sr == null) { if (log.isDebugEnabled()) { log.debug("{} => Dropping SampleResult: {}", getName(), event.getResult()); } return; } try { if (!listenerClientData.queue.offer(sr)){ // we failed to add the element first time listenerClientData.queueWaits.add(1L); long t1 = System.nanoTime(); listenerClientData.queue.put(sr); long t2 = System.nanoTime(); listenerClientData.queueWaitTime.add(t2-t1); } } catch (Exception err) { log.error("sampleOccurred, failed to queue the sample", err); } } /** * Thread that dequeues data from queue to send it to {@link BackendListenerClient} */ private static final class Worker extends Thread { private final ListenerClientData listenerClientData; private final BackendListenerContext context; private final BackendListenerClient backendListenerClient; private Worker(BackendListenerClient backendListenerClient, Arguments arguments, ListenerClientData listenerClientData){ this.listenerClientData = listenerClientData; // Allow BackendListenerClient implementations to get access to test element name arguments.addArgument(TestElement.NAME, getName()); context = new BackendListenerContext(arguments); this.backendListenerClient = backendListenerClient; } @Override public void run() { final boolean isDebugEnabled = log.isDebugEnabled(); List<SampleResult> sampleResults = new ArrayList<>(listenerClientData.queue.size()); try { try { boolean endOfLoop = false; while (!endOfLoop) { if (isDebugEnabled) { log.debug("Thread: {} taking SampleResult from queue: {}", Thread.currentThread().getName(), listenerClientData.queue.size()); } SampleResult sampleResult = listenerClientData.queue.take(); if (isDebugEnabled) { log.debug("Thread: {} took SampleResult: {}, isFinal: {}", Thread.currentThread().getName(), sampleResult, sampleResult == FINAL_SAMPLE_RESULT); } // try to process as many as possible // The == comparison is not a mistake while (!(endOfLoop = sampleResult == FINAL_SAMPLE_RESULT) && sampleResult != null ) { sampleResults.add(sampleResult); if (isDebugEnabled) { log.debug("Thread: {} polling from queue: {}", Thread.currentThread().getName(), listenerClientData.queue.size()); } sampleResult = listenerClientData.queue.poll(); // returns null if nothing on queue currently if (isDebugEnabled) { log.debug("Thread: {} took from queue: {}, isFinal: {}", Thread.currentThread().getName(), sampleResult, sampleResult == FINAL_SAMPLE_RESULT); } } if (isDebugEnabled) { log.debug("Thread: {} exiting with FINAL EVENT: {}, null: {}", Thread.currentThread().getName(), sampleResult == FINAL_SAMPLE_RESULT, sampleResult == null); } sendToListener(backendListenerClient, context, sampleResults); if(!endOfLoop) { LockSupport.parkNanos(100); } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } // We may have been interrupted sendToListener(backendListenerClient, context, sampleResults); log.info("Worker ended"); } finally { listenerClientData.latch.countDown(); } } } /** * Send sampleResults to {@link BackendListenerClient} * @param backendListenerClient {@link BackendListenerClient} * @param context {@link BackendListenerContext} * @param sampleResults List of {@link SampleResult} */ static void sendToListener( final BackendListenerClient backendListenerClient, final BackendListenerContext context, final List<SampleResult> sampleResults) { if (!sampleResults.isEmpty()) { backendListenerClient.handleSampleResults(sampleResults, context); sampleResults.clear(); } } /** * Returns reference to {@link BackendListener} * @param clientClass {@link BackendListenerClient} client class * @return BackendListenerClient reference. */ static BackendListenerClient createBackendListenerClientImpl(Class<?> clientClass) { if (clientClass == null) { // failed to initialise the class return new ErrorBackendListenerClient(); } try { return (BackendListenerClient) clientClass.newInstance(); } catch (Exception e) { log.error("Exception creating: {}", clientClass, e); return new ErrorBackendListenerClient(); } } // TestStateListener implementation @Override public void testStarted() { testStarted("local"); //$NON-NLS-1$ } @Override public void testStarted(String host) { if (log.isDebugEnabled()) { log.debug("{}\ttestStarted({})", whoAmI(), host); } int queueSize; final String size = getQueueSize(); try { queueSize = Integer.parseInt(size); } catch (NumberFormatException nfe) { log.warn("Invalid queue size '{}' defaulting to {}", size, DEFAULT_QUEUE_SIZE); queueSize = Integer.parseInt(DEFAULT_QUEUE_SIZE); } synchronized (LOCK) { myName = getName(); listenerClientData = queuesByTestElementName.get(myName); if (listenerClientData == null){ // We need to do this to ensure in Distributed testing // that only 1 instance of BackendListenerClient is used clientClass = initClass(); // may be null BackendListenerClient backendListenerClient = createBackendListenerClientImpl(clientClass); BackendListenerContext context = new BackendListenerContext((Arguments)getArguments().clone()); listenerClientData = new ListenerClientData(); listenerClientData.queue = new ArrayBlockingQueue<>(queueSize); listenerClientData.queueWaits = new LongAdder(); listenerClientData.queueWaitTime = new LongAdder(); listenerClientData.latch = new CountDownLatch(1); listenerClientData.client = backendListenerClient; if (log.isInfoEnabled()) { log.info("{}: Starting worker with class: {} and queue capacity: {}", getName(), clientClass, getQueueSize()); } Worker worker = new Worker(backendListenerClient, (Arguments) getArguments().clone(), listenerClientData); worker.setDaemon(true); worker.start(); if (log.isInfoEnabled()) { log.info("{}: Started worker with class: {}", getName(), clientClass); } try { backendListenerClient.setupTest(context); } catch (Exception e) { throw new java.lang.IllegalStateException("Failed calling setupTest", e); } queuesByTestElementName.put(myName, listenerClientData); } listenerClientData.instanceCount++; } } /** * Method called at the end of the test. This is called only on one instance * of BackendListener. This method will loop through all of the other * BackendListenerClients which have been registered (automatically in the * constructor) and notify them that the test has ended, allowing the * BackendListenerClients to cleanup. * Implements TestStateListener.testEnded(String) */ @Override public void testEnded(String host) { synchronized (LOCK) { ListenerClientData listenerClientDataForName = queuesByTestElementName.get(myName); if (log.isDebugEnabled()) { log.debug("testEnded called on instance {}#{}", myName, listenerClientDataForName.instanceCount); } if(listenerClientDataForName != null) { listenerClientDataForName.instanceCount--; if (listenerClientDataForName.instanceCount > 0){ // Not the last instance of myName return; } else { queuesByTestElementName.remove(myName); } } else { log.error("No listener client data found for BackendListener {}", myName); } } try { listenerClientData.queue.put(FINAL_SAMPLE_RESULT); } catch (Exception ex) { log.warn("testEnded() with exception: {}", ex, ex); } if (listenerClientData.queueWaits.longValue() > 0) { log.warn( "QueueWaits: {}; QueueWaitTime: {} (nanoseconds), you may need to increase queue capacity, see property 'backend_queue_capacity'", listenerClientData.queueWaits, listenerClientData.queueWaitTime); } try { listenerClientData.latch.await(); BackendListenerContext context = new BackendListenerContext(getArguments()); listenerClientData.client.teardownTest(context); } catch (Exception e) { throw new java.lang.IllegalStateException("Failed calling teardownTest", e); } } @Override public void testEnded() { testEnded("local"); //$NON-NLS-1$ } /** * A {@link BackendListenerClient} implementation used for error handling. If an * error occurs while creating the real BackendListenerClient object, it is * replaced with an instance of this class. Each time a sample occurs with * this class, the result is marked as a failure so the user can see that * the test failed. */ static class ErrorBackendListenerClient extends AbstractBackendListenerClient { /** * Return SampleResult with data on error. * * @see BackendListenerClient#handleSampleResults(List, BackendListenerContext) */ @Override public void handleSampleResults(List<SampleResult> sampleResults, BackendListenerContext context) { log.warn("ErrorBackendListenerClient#handleSampleResult called, noop"); Thread.yield(); } } /* (non-Javadoc) * @see org.apache.jmeter.samplers.SampleListener#sampleStarted(org.apache.jmeter.samplers.SampleEvent) */ @Override public void sampleStarted(SampleEvent e) { // NOOP } /* (non-Javadoc) * @see org.apache.jmeter.samplers.SampleListener#sampleStopped(org.apache.jmeter.samplers.SampleEvent) */ @Override public void sampleStopped(SampleEvent e) { // NOOP } /** * Set the arguments (parameters) for the BackendListenerClient to be executed * with. * * @param args * the new arguments. These replace any existing arguments. */ public void setArguments(Arguments args) { // Bug 59173 - don't save new default argument args.removeArgument(GraphiteBackendListenerClient.USE_REGEXP_FOR_SAMPLERS_LIST, GraphiteBackendListenerClient.USE_REGEXP_FOR_SAMPLERS_LIST_DEFAULT); setProperty(new TestElementProperty(ARGUMENTS, args)); } /** * Get the arguments (parameters) for the BackendListenerClient to be executed * with. * * @return the arguments */ public Arguments getArguments() { return (Arguments) getProperty(ARGUMENTS).getObjectValue(); } /** * Sets the Classname of the BackendListenerClient object * * @param classname * the new Classname value */ public void setClassname(String classname) { setProperty(CLASSNAME, classname); } /** * Gets the Classname of the BackendListenerClient object * * @return the Classname value */ public String getClassname() { return getPropertyAsString(CLASSNAME); } /** * Sets the queue size * * @param queueSize the size of the queue */ public void setQueueSize(String queueSize) { setProperty(QUEUE_SIZE, queueSize, DEFAULT_QUEUE_SIZE); } /** * Gets the queue size * * @return int queueSize */ public String getQueueSize() { return getPropertyAsString(QUEUE_SIZE, DEFAULT_QUEUE_SIZE); } }
Replace calls to deprecated Class#newInstance Part of #435 and Bugzilla Id: 62972 git-svn-id: 5ccfe34f605a6c2f9041ff2965ab60012c62539a@1847973 13f79535-47bb-0310-9956-ffa450edef68
src/components/org/apache/jmeter/visualizers/backend/BackendListener.java
Replace calls to deprecated Class#newInstance
Java
apache-2.0
7af3d4f0aeba724404a5ad3e6e788f375dc2d040
0
acciente/oacc-core,fspinnenhirn/oacc-core-ci
/* * Copyright 2009-2015, Acciente LLC * * Acciente LLC licenses this file to you under the * Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in * writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES * OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing * permissions and limitations under the License. */ package com.acciente.oacc.sql.internal; import com.acciente.oacc.AccessControlContext; import com.acciente.oacc.AuthenticationProvider; import com.acciente.oacc.Credentials; import com.acciente.oacc.DomainCreatePermission; import com.acciente.oacc.DomainCreatePermissions; import com.acciente.oacc.DomainPermission; import com.acciente.oacc.DomainPermissions; import com.acciente.oacc.NotAuthenticatedException; import com.acciente.oacc.NotAuthorizedException; import com.acciente.oacc.OaccException; import com.acciente.oacc.Resource; import com.acciente.oacc.ResourceClassInfo; import com.acciente.oacc.ResourceCreatePermission; import com.acciente.oacc.ResourceCreatePermissions; import com.acciente.oacc.ResourcePermission; import com.acciente.oacc.ResourcePermissions; import com.acciente.oacc.sql.SQLProfile; import com.acciente.oacc.sql.internal.persister.DomainPersister; import com.acciente.oacc.sql.internal.persister.GrantDomainCreatePermissionPostCreateSysPersister; import com.acciente.oacc.sql.internal.persister.GrantDomainCreatePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.GrantDomainPermissionSysPersister; import com.acciente.oacc.sql.internal.persister.GrantGlobalResourcePermissionPersister; import com.acciente.oacc.sql.internal.persister.GrantGlobalResourcePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.GrantResourceCreatePermissionPostCreatePersister; import com.acciente.oacc.sql.internal.persister.GrantResourceCreatePermissionPostCreateSysPersister; import com.acciente.oacc.sql.internal.persister.GrantResourceCreatePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.GrantResourcePermissionPersister; import com.acciente.oacc.sql.internal.persister.GrantResourcePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveDomainPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantDomainCreatePermissionPostCreateSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantDomainCreatePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantDomainPermissionSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantGlobalResourcePermissionPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantGlobalResourcePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantResourceCreatePermissionPostCreatePersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantResourceCreatePermissionPostCreateSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantResourceCreatePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantResourcePermissionPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantResourcePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveResourcePersister; import com.acciente.oacc.sql.internal.persister.RecursiveDomainPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantDomainCreatePermissionPostCreateSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantDomainCreatePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantDomainPermissionSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantGlobalResourcePermissionPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantGlobalResourcePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantResourceCreatePermissionPostCreatePersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantResourceCreatePermissionPostCreateSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantResourceCreatePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantResourcePermissionPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantResourcePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveResourcePersister; import com.acciente.oacc.sql.internal.persister.ResourceClassPermissionPersister; import com.acciente.oacc.sql.internal.persister.ResourceClassPersister; import com.acciente.oacc.sql.internal.persister.ResourcePersister; import com.acciente.oacc.sql.internal.persister.SQLConnection; import com.acciente.oacc.sql.internal.persister.SQLStrings; import com.acciente.oacc.sql.internal.persister.id.DomainId; import com.acciente.oacc.sql.internal.persister.id.Id; import com.acciente.oacc.sql.internal.persister.id.ResourceClassId; import com.acciente.oacc.sql.internal.persister.id.ResourcePermissionId; import javax.sql.DataSource; import java.io.Serializable; import java.sql.Connection; import java.sql.SQLException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @SuppressWarnings({"UnusedAssignment", "ThrowFromFinallyBlock"}) public class SQLAccessControlContext implements AccessControlContext, Serializable { // services private DataSource dataSource; private Connection connection; // state private AuthenticationProvider authenticationProvider; private boolean hasDefaultAuthenticationProvider; // The resource that authenticated in this session with a call to one of the authenticate() methods private Resource authenticatedResource; private String authenticatedResourceDomainName; // The resource as which the session's credentials are checked. This would be the same as the resource // that initially authenticated - UNLESS a another resource is being IMPERSONATED private Resource sessionResource; private String sessionResourceDomainName; // resource ID constants private static final Long SYSTEM_RESOURCE_ID = Long.valueOf(0); // domain permissions constants private static final DomainPermission DomainPermission_CREATE_CHILD_DOMAIN = DomainPermissions.getInstance(DomainPermissions.CREATE_CHILD_DOMAIN, false); private static final DomainPermission DomainPermission_CREATE_CHILD_DOMAIN_GRANT = DomainPermissions.getInstance(DomainPermissions.CREATE_CHILD_DOMAIN, true); private static final DomainPermission DomainPermission_DELETE = DomainPermissions.getInstance(DomainPermissions.DELETE, false); private static final DomainPermission DomainPermission_DELETE_GRANT = DomainPermissions.getInstance(DomainPermissions.DELETE, true); private static final DomainPermission DomainPermission_SUPER_USER = DomainPermissions.getInstance(DomainPermissions.SUPER_USER, false); private static final DomainPermission DomainPermission_SUPER_USER_GRANT = DomainPermissions.getInstance(DomainPermissions.SUPER_USER, true); // resource permissions constants private static final ResourcePermission ResourcePermission_INHERIT = ResourcePermissions.getInstance(ResourcePermissions.INHERIT, false); private static final ResourcePermission ResourcePermission_INHERIT_GRANT = ResourcePermissions.getInstance(ResourcePermissions.INHERIT, true); private static final ResourcePermission ResourcePermission_IMPERSONATE = ResourcePermissions.getInstance(ResourcePermissions.IMPERSONATE, false); private static final ResourcePermission ResourcePermission_IMPERSONATE_GRANT = ResourcePermissions.getInstance(ResourcePermissions.IMPERSONATE, true); private static final ResourcePermission ResourcePermission_RESET_CREDENTIALS = ResourcePermissions.getInstance(ResourcePermissions.RESET_CREDENTIALS, false); private static final ResourcePermission ResourcePermission_RESET_CREDENTIALS_GRANT = ResourcePermissions.getInstance(ResourcePermissions.RESET_CREDENTIALS, true); private static final ResourcePermission ResourcePermission_DELETE = ResourcePermissions.getInstance(ResourcePermissions.DELETE, false); private static final ResourcePermission ResourcePermission_DELETE_GRANT = ResourcePermissions.getInstance(ResourcePermissions.DELETE, true); private static final ResourcePermission ResourcePermission_QUERY = ResourcePermissions.getInstance(ResourcePermissions.QUERY, false); private static final ResourcePermission ResourcePermission_QUERY_GRANT = ResourcePermissions.getInstance(ResourcePermissions.QUERY, true); // persisters private final ResourceClassPersister resourceClassPersister; private final ResourceClassPermissionPersister resourceClassPermissionPersister; private final DomainPersister domainPersister; private final GrantDomainCreatePermissionSysPersister grantDomainCreatePermissionSysPersister; private final GrantDomainCreatePermissionPostCreateSysPersister grantDomainCreatePermissionPostCreateSysPersister; private final GrantDomainPermissionSysPersister grantDomainPermissionSysPersister; private final ResourcePersister resourcePersister; private final GrantResourceCreatePermissionSysPersister grantResourceCreatePermissionSysPersister; private final GrantResourceCreatePermissionPostCreateSysPersister grantResourceCreatePermissionPostCreateSysPersister; private final GrantResourceCreatePermissionPostCreatePersister grantResourceCreatePermissionPostCreatePersister; private final GrantResourcePermissionSysPersister grantResourcePermissionSysPersister; private final GrantGlobalResourcePermissionSysPersister grantGlobalResourcePermissionSysPersister; private final GrantResourcePermissionPersister grantResourcePermissionPersister; private final GrantGlobalResourcePermissionPersister grantGlobalResourcePermissionPersister; public static AccessControlContext getAccessControlContext(Connection connection, String schemaName, SQLProfile sqlProfile) { return new SQLAccessControlContext(connection, schemaName, sqlProfile); } public static AccessControlContext getAccessControlContext(DataSource dataSource, String schemaName, SQLProfile sqlProfile) { return new SQLAccessControlContext(dataSource, schemaName, sqlProfile); } public static AccessControlContext getAccessControlContext(Connection connection, String schemaName, SQLProfile sqlProfile, AuthenticationProvider authenticationProvider) { return new SQLAccessControlContext(connection, schemaName, sqlProfile, authenticationProvider); } public static AccessControlContext getAccessControlContext(DataSource dataSource, String schemaName, SQLProfile sqlProfile, AuthenticationProvider authenticationProvider) { return new SQLAccessControlContext(dataSource, schemaName, sqlProfile, authenticationProvider); } public static void preSerialize(AccessControlContext accessControlContext) { if (accessControlContext instanceof SQLAccessControlContext) { SQLAccessControlContext sqlAccessControlContext = (SQLAccessControlContext) accessControlContext; sqlAccessControlContext.__preSerialize(); } } public static void postDeserialize(AccessControlContext accessControlContext, Connection connection) { if (accessControlContext instanceof SQLAccessControlContext) { SQLAccessControlContext sqlAccessControlContext = (SQLAccessControlContext) accessControlContext; sqlAccessControlContext.__postDeserialize(connection); } } public static void postDeserialize(AccessControlContext accessControlContext, DataSource dataSource) { if (accessControlContext instanceof SQLAccessControlContext) { SQLAccessControlContext sqlAccessControlContext = (SQLAccessControlContext) accessControlContext; sqlAccessControlContext.__postDeserialize(dataSource); } } private SQLAccessControlContext(Connection connection, String schemaName, SQLProfile sqlProfile) { this(schemaName, sqlProfile); this.connection = connection; // use the built-in authentication provider when no custom implementation is provided this.authenticationProvider = new SQLPasswordAuthenticationProvider(connection, schemaName, sqlProfile.getSqlDialect()); this.hasDefaultAuthenticationProvider = true; } private SQLAccessControlContext(Connection connection, String schemaName, SQLProfile sqlProfile, AuthenticationProvider authenticationProvider) { this(schemaName, sqlProfile); this.connection = connection; this.authenticationProvider = authenticationProvider; this.hasDefaultAuthenticationProvider = false; } private SQLAccessControlContext(DataSource dataSource, String schemaName, SQLProfile sqlProfile) { this(schemaName, sqlProfile); this.dataSource = dataSource; // use the built-in authentication provider when no custom implementation is provided this.authenticationProvider = new SQLPasswordAuthenticationProvider(dataSource, schemaName, sqlProfile.getSqlDialect()); this.hasDefaultAuthenticationProvider = true; } private SQLAccessControlContext(DataSource dataSource, String schemaName, SQLProfile sqlProfile, AuthenticationProvider authenticationProvider) { this(schemaName, sqlProfile); this.dataSource = dataSource; this.authenticationProvider = authenticationProvider; this.hasDefaultAuthenticationProvider = false; } private SQLAccessControlContext(String schemaName, SQLProfile sqlProfile) { // generate all the SQLs the persisters need based on the database dialect SQLStrings sqlStrings = SQLStrings.getSQLStrings(schemaName, sqlProfile); // setup persisters resourceClassPersister = new ResourceClassPersister(sqlProfile, sqlStrings); resourceClassPermissionPersister = new ResourceClassPermissionPersister(sqlProfile, sqlStrings); if (sqlProfile.isRecursiveCTEEnabled()) { grantDomainCreatePermissionSysPersister = new RecursiveGrantDomainCreatePermissionSysPersister(sqlProfile, sqlStrings); grantDomainCreatePermissionPostCreateSysPersister = new RecursiveGrantDomainCreatePermissionPostCreateSysPersister(sqlProfile, sqlStrings); grantDomainPermissionSysPersister = new RecursiveGrantDomainPermissionSysPersister(sqlProfile, sqlStrings); domainPersister = new RecursiveDomainPersister(sqlProfile, sqlStrings); resourcePersister = new RecursiveResourcePersister(sqlProfile, sqlStrings); grantResourceCreatePermissionSysPersister = new RecursiveGrantResourceCreatePermissionSysPersister(sqlProfile, sqlStrings); grantResourceCreatePermissionPostCreateSysPersister = new RecursiveGrantResourceCreatePermissionPostCreateSysPersister(sqlProfile, sqlStrings); grantResourceCreatePermissionPostCreatePersister = new RecursiveGrantResourceCreatePermissionPostCreatePersister(sqlProfile, sqlStrings); grantResourcePermissionSysPersister = new RecursiveGrantResourcePermissionSysPersister(sqlProfile, sqlStrings); grantGlobalResourcePermissionSysPersister = new RecursiveGrantGlobalResourcePermissionSysPersister(sqlProfile, sqlStrings); grantResourcePermissionPersister = new RecursiveGrantResourcePermissionPersister(sqlProfile, sqlStrings); grantGlobalResourcePermissionPersister = new RecursiveGrantGlobalResourcePermissionPersister(sqlProfile, sqlStrings); } else { grantDomainCreatePermissionSysPersister = new NonRecursiveGrantDomainCreatePermissionSysPersister(sqlProfile, sqlStrings); grantDomainCreatePermissionPostCreateSysPersister = new NonRecursiveGrantDomainCreatePermissionPostCreateSysPersister(sqlProfile, sqlStrings); grantDomainPermissionSysPersister = new NonRecursiveGrantDomainPermissionSysPersister(sqlProfile, sqlStrings); domainPersister = new NonRecursiveDomainPersister(sqlProfile, sqlStrings); resourcePersister = new NonRecursiveResourcePersister(sqlProfile, sqlStrings); grantResourceCreatePermissionSysPersister = new NonRecursiveGrantResourceCreatePermissionSysPersister(sqlProfile, sqlStrings); grantResourceCreatePermissionPostCreateSysPersister = new NonRecursiveGrantResourceCreatePermissionPostCreateSysPersister(sqlProfile, sqlStrings); grantResourceCreatePermissionPostCreatePersister = new NonRecursiveGrantResourceCreatePermissionPostCreatePersister(sqlProfile, sqlStrings); grantResourcePermissionSysPersister = new NonRecursiveGrantResourcePermissionSysPersister(sqlProfile, sqlStrings); grantGlobalResourcePermissionSysPersister = new NonRecursiveGrantGlobalResourcePermissionSysPersister(sqlProfile, sqlStrings); grantResourcePermissionPersister = new NonRecursiveGrantResourcePermissionPersister(sqlProfile, sqlStrings); grantGlobalResourcePermissionPersister = new NonRecursiveGrantGlobalResourcePermissionPersister(sqlProfile, sqlStrings); } } private void __preSerialize() { this.dataSource = null; this.connection = null; if (hasDefaultAuthenticationProvider) { ((SQLPasswordAuthenticationProvider) authenticationProvider).preSerialize(); } } private void __postDeserialize(DataSource dataSource) { this.dataSource = dataSource; this.connection = null; if (hasDefaultAuthenticationProvider) { ((SQLPasswordAuthenticationProvider) authenticationProvider).postDeserialize(dataSource); } } private void __postDeserialize(Connection connection) { this.dataSource = null; this.connection = connection; if (hasDefaultAuthenticationProvider) { ((SQLPasswordAuthenticationProvider) authenticationProvider).postDeserialize(connection); } } @Override public void authenticate(Resource resource, Credentials credentials) { __assertResourceSpecified(resource); __assertCredentialsSpecified(credentials); // we deliberately don't resolve the resource before calling the common handler method, to avoid having // to keep the connection open across a potentially long call to a third-party authenticationProvider or // to avoid having to get a connection twice __authenticate(resource, credentials); } @Override public void authenticate(Resource resource) { __assertResourceSpecified(resource); // we deliberately don't resolve the resource before calling the common handler method, to avoid having // to keep the connection open across a potentially long call to a third-party authenticationProvider or // to avoid having to get a connection twice __authenticate(resource, null); } private void __authenticate(Resource resource, Credentials credentials) { // before delegating to the authentication provider we do some basic validation SQLConnection connection = null; final String resourceDomainForResource; try { connection = __getConnection(); // resolve the resource here - instead of outside this method - to avoid having // to keep the connection open across a potentially long call to a third-party authenticationProvider or // to avoid having to get a connection twice resource = __resolveResource(connection, resource); final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, resource); // complain if the resource is not marked as supporting authentication if (!resourceClassInternalInfo.isAuthenticatable()) { throw new IllegalArgumentException("Resource " + resource + " is not of an authenticatable resource class: " + resourceClassInternalInfo.getResourceClassName()); } resourceDomainForResource = domainPersister.getResourceDomainNameByResourceId(connection, resource); } finally { __closeConnection(connection); } // now we delegate to the authentication provider if (credentials != null) { authenticationProvider.authenticate(resource, credentials); } else { authenticationProvider.authenticate(resource); } authenticatedResource = resource; authenticatedResourceDomainName = resourceDomainForResource; sessionResource = authenticatedResource; sessionResourceDomainName = authenticatedResourceDomainName; } @Override public void unauthenticate() { sessionResource = authenticatedResource = null; sessionResourceDomainName = authenticatedResourceDomainName = null; } @Override public void impersonate(Resource resource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(resource); try { connection = __getConnection(); resource = __resolveResource(connection, resource); __assertImpersonatePermission(connection, resource); // switch the session credentials to the new resource sessionResource = resource; sessionResourceDomainName = domainPersister.getResourceDomainNameByResourceId(connection, resource); } finally { __closeConnection(connection); } } private void __assertImpersonatePermission(SQLConnection connection, Resource resource) { final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, resource); // complain if the resource is not of an authenticatable resource-class if (!resourceClassInternalInfo.isAuthenticatable()) { throw new IllegalArgumentException("Resource " + resource + " is not of an authenticatable resource class: " + resourceClassInternalInfo.getResourceClassName()); } boolean impersonatePermissionOK = false; // first check direct permissions final Set<ResourcePermission> resourcePermissions = __getEffectiveResourcePermissions(connection, authenticatedResource, resource); if (resourcePermissions.contains(ResourcePermission_IMPERSONATE) || resourcePermissions.contains(ResourcePermission_IMPERSONATE_GRANT)) { impersonatePermissionOK = true; } if (!impersonatePermissionOK) { // next check global direct permissions final String domainName = domainPersister.getResourceDomainNameByResourceId(connection, resource); final Set<ResourcePermission> globalResourcePermissions = __getEffectiveGlobalResourcePermissions(connection, authenticatedResource, resourceClassInternalInfo.getResourceClassName(), domainName); if (globalResourcePermissions.contains(ResourcePermission_IMPERSONATE) || globalResourcePermissions.contains(ResourcePermission_IMPERSONATE_GRANT)) { impersonatePermissionOK = true; } } if (!impersonatePermissionOK) { // finally check for super user permissions if (__isSuperUserOfResource(connection, authenticatedResource, resource)) { impersonatePermissionOK = true; } } if (!impersonatePermissionOK) { throw NotAuthorizedException.newInstanceForActionOnResource(authenticatedResource, "impersonate", resource); } } @Override public void unimpersonate() { sessionResource = authenticatedResource; sessionResourceDomainName = authenticatedResourceDomainName; } @Override public void setCredentials(Resource resource, Credentials newCredentials) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(resource); if (!authenticatedResource.equals(sessionResource)) { throw new IllegalStateException("Calling setCredentials while impersonating another resource is not valid"); } __assertCredentialsSpecified(newCredentials); final ResourceClassInternalInfo resourceClassInfo; final String domainName; try { connection = __getConnection(); resource = __resolveResource(connection, resource); resourceClassInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, resource); if (!resourceClassInfo.isAuthenticatable()) { throw new IllegalArgumentException("Calling setCredentials for an unauthenticatable resource is not valid"); } domainName = domainPersister.getResourceDomainNameByResourceId(connection, resource); // skip permission checks if the authenticated resource is trying to set its own credentials if (!authenticatedResource.equals(resource)) { __assertResetCredentialsResourcePermission(connection, resource, resourceClassInfo.getResourceClassName(), domainName); } } finally { __closeConnection(connection); } authenticationProvider.validateCredentials(resourceClassInfo.getResourceClassName(), domainName, newCredentials); authenticationProvider.setCredentials(resource, newCredentials); } private void __assertResetCredentialsResourcePermission(SQLConnection connection, Resource resource, String resourceClassName, String domainName) { // first check direct permissions boolean hasResetCredentialsPermission = false; final Set<ResourcePermission> resourcePermissions = __getEffectiveResourcePermissions(connection, authenticatedResource, resource); if (resourcePermissions.contains(ResourcePermission_RESET_CREDENTIALS) || resourcePermissions.contains(ResourcePermission_RESET_CREDENTIALS_GRANT)) { hasResetCredentialsPermission = true; } if (!hasResetCredentialsPermission) { // next check global direct permissions final Set<ResourcePermission> globalResourcePermissions = __getEffectiveGlobalResourcePermissions(connection, authenticatedResource, resourceClassName, domainName); if (globalResourcePermissions.contains(ResourcePermission_RESET_CREDENTIALS) || globalResourcePermissions.contains(ResourcePermission_RESET_CREDENTIALS_GRANT)) { hasResetCredentialsPermission = true; } } if (!hasResetCredentialsPermission) { // finally check for super user permissions if (__isSuperUserOfResource(connection, authenticatedResource, resource)) { hasResetCredentialsPermission = true; } } if (!hasResetCredentialsPermission) { throw NotAuthorizedException.newInstanceForActionOnResource(authenticatedResource, "reset credentials", resource); } } @Override public void createResourceClass(String resourceClassName, boolean authenticatable, boolean unauthenticatedCreateAllowed) { SQLConnection connection = null; __assertAuthenticated(); __assertAuthenticatedAsSystemResource(); // check if the auth resource is permitted to create resource classes __assertResourceClassNameValid(resourceClassName); try { connection = __getConnection(); resourceClassName = resourceClassName.trim(); // check if this resource class already exists if (resourceClassPersister.getResourceClassId(connection, resourceClassName) != null) { throw new IllegalArgumentException("Duplicate resource class: " + resourceClassName); } resourceClassPersister.addResourceClass(connection, resourceClassName, authenticatable, unauthenticatedCreateAllowed); } finally { __closeConnection(connection); } } @Override public void createResourcePermission(String resourceClassName, String permissionName) { SQLConnection connection = null; __assertAuthenticated(); __assertAuthenticatedAsSystemResource(); // check if the auth resource is permitted to create resource classes __assertResourceClassSpecified(resourceClassName); __assertPermissionNameValid(permissionName); try { connection = __getConnection(); resourceClassName = resourceClassName.trim(); permissionName = permissionName.trim(); // first verify that resource class is defined Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // check if the permission name is already defined! Id<ResourcePermissionId> permissionId = resourceClassPermissionPersister.getResourceClassPermissionId(connection, resourceClassId, permissionName); if (permissionId != null) { throw new IllegalArgumentException("Duplicate permission: " + permissionName + " for resource class: " + resourceClassName); } resourceClassPermissionPersister.addResourceClassPermission(connection, resourceClassId, permissionName); } finally { __closeConnection(connection); } } @Override public void createDomain(String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertDomainSpecified(domainName); try { connection = __getConnection(); domainName = domainName.trim(); __createDomain(connection, domainName, null); } finally { __closeConnection(connection); } } @Override public void createDomain(String domainName, String parentDomainName) { SQLConnection connection = null; __assertAuthenticated(); __assertDomainSpecified(domainName); __assertParentDomainSpecified(parentDomainName); try { connection = __getConnection(); domainName = domainName.trim(); parentDomainName = parentDomainName.trim(); __createDomain(connection, domainName, parentDomainName); } finally { __closeConnection(connection); } } private void __createDomain(SQLConnection connection, String domainName, String parentDomainName) { // we need to check if the currently authenticated resource is allowed to create domains final Set<DomainCreatePermission> domainCreatePermissions = grantDomainCreatePermissionSysPersister.getDomainCreateSysPermissionsIncludeInherited(connection, sessionResource); // if there is at least one permission, then it implies that this resource is allowed to create domains if (domainCreatePermissions.isEmpty()) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "create domain"); } // determine the post create permissions on the new domain final Set<DomainPermission> newDomainPermissions = __getPostCreateDomainPermissions(grantDomainCreatePermissionPostCreateSysPersister .getDomainCreatePostCreateSysPermissionsIncludeInherited(connection, sessionResource)); // check to ensure that the requested domain name does not already exist if (domainPersister.getResourceDomainId(connection, domainName) != null) { throw new IllegalArgumentException("Duplicate domain: " + domainName); } if (parentDomainName == null) { // create the new root domain domainPersister.addResourceDomain(connection, domainName); } else { // check to ensure that the parent domain name exists Id<DomainId> parentDomainId = domainPersister.getResourceDomainId(connection, parentDomainName); if (parentDomainId == null) { throw new IllegalArgumentException("Parent domain: " + parentDomainName + " not found!"); } // we need to check if the currently authenticated resource is allowed to create child domains in the parent Set<DomainPermission> parentDomainPermissions; parentDomainPermissions = __getEffectiveDomainPermissions(connection, sessionResource, parentDomainName); if (!parentDomainPermissions.contains(DomainPermission_CREATE_CHILD_DOMAIN) && !parentDomainPermissions.contains(DomainPermission_CREATE_CHILD_DOMAIN_GRANT) && !parentDomainPermissions.contains(DomainPermission_SUPER_USER) && !parentDomainPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "create child domain in domain: " + parentDomainName); } // create the new child domain domainPersister.addResourceDomain(connection, domainName, parentDomainId); } if (newDomainPermissions.size() > 0) { // grant the currently authenticated resource the privileges to the new domain __setDirectDomainPermissions(connection, sessionResource, domainName, newDomainPermissions, true); } } @Override public boolean deleteDomain(String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertDomainSpecified(domainName); try { connection = __getConnection(); return __deleteDomain(connection, domainName); } finally { __closeConnection(connection); } } private boolean __deleteDomain(SQLConnection connection, String domainName) { // short-circuit out of this call if the specified resource does not exist final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { return false; } // check for authorization (using internal has-permission method is ok because querying for session resource) if (!__hasDomainPermissions(connection, sessionResource, domainName, Collections.singleton(DomainPermission_DELETE))) { throw NotAuthorizedException.newInstanceForDomainPermissions(sessionResource, domainName, DomainPermission_DELETE); } // check if the domain is empty (=domain must not contain any resources, and none in any descendant domains) if (!resourcePersister.isDomainEmpty(connection, domainId)) { throw new IllegalArgumentException("Deleting a domain (" + domainName + ") that contains resources directly or in a descendant domain is invalid"); } // remove any permissions the obsolete resource has as an accessor resource grantDomainPermissionSysPersister.removeAllDomainSysPermissions(connection, domainId); grantResourceCreatePermissionPostCreatePersister.removeAllResourceCreatePostCreatePermissions(connection, domainId); grantResourceCreatePermissionPostCreateSysPersister.removeAllResourceCreatePostCreateSysPermissions(connection, domainId); grantResourceCreatePermissionSysPersister.removeAllResourceCreateSysPermissions(connection, domainId); grantGlobalResourcePermissionPersister.removeAllGlobalResourcePermissions(connection, domainId); grantGlobalResourcePermissionSysPersister.removeAllGlobalSysPermissions(connection, domainId); // remove the domain domainPersister.deleteDomain(connection, domainId); return true; } @Override public Resource createResource(String resourceClassName, String domainName) { SQLConnection connection = null; try { connection = __getConnection(); return __createResource(connection, resourceClassName, domainName, null, null); } finally { __closeConnection(connection); } } @Override public Resource createResource(String resourceClassName, String domainName, Credentials credentials) { SQLConnection connection = null; __assertCredentialsSpecified(credentials); try { connection = __getConnection(); return __createResource(connection, resourceClassName, domainName, null, credentials); } finally { __closeConnection(connection); } } @Override public Resource createResource(String resourceClassName, String domainName, String externalId) { SQLConnection connection = null; __assertExternalIdSpecified(externalId); try { connection = __getConnection(); return __createResource(connection, resourceClassName, domainName, externalId, null); } finally { __closeConnection(connection); } } @Override public Resource createResource(String resourceClassName, String domainName, String externalId, Credentials credentials) { SQLConnection connection = null; __assertExternalIdSpecified(externalId); __assertCredentialsSpecified(credentials); try { connection = __getConnection(); return __createResource(connection, resourceClassName, domainName, externalId, credentials); } finally { __closeConnection(connection); } } private Resource __createResource(SQLConnection connection, String resourceClassName, String domainName, String externalId, Credentials credentials) { __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); // validate the resource class resourceClassName = resourceClassName.trim(); final ResourceClassInternalInfo resourceClassInternalInfo = __getResourceClassInternalInfo(connection, resourceClassName); if (!resourceClassInternalInfo.isUnauthenticatedCreateAllowed()) { __assertAuthenticated(); } if (resourceClassInternalInfo.isAuthenticatable()) { // if this resource class is authenticatable, then validate the credentials authenticationProvider.validateCredentials(resourceClassName, domainName, credentials); } else { // if this resource class is NOT authenticatable, then specifying credentials is invalid __assertCredentialsNotSpecified(credentials); } // validate the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // check to ensure that the specified external id does not already exist if (externalId != null && resourcePersister.resolveResourceByExternalId(connection, externalId) != null) { throw new IllegalArgumentException("External id is not unique: " + externalId); } // we first check the create permissions final Set<ResourcePermission> newResourcePermissions; // the only way we can have come here with _sessionResource == null is // when non-authenticated create is allowed for this resource class if (sessionResource == null) { // if this session is unauthenticated then give the new resource all available // permissions to itself newResourcePermissions = new HashSet<>(); for (String permissionName : resourceClassPermissionPersister.getPermissionNames(connection, resourceClassName)) { newResourcePermissions.add(ResourcePermissions.getInstance(permissionName, true)); } newResourcePermissions.add(ResourcePermissions.getInstance(ResourcePermissions.DELETE, true)); newResourcePermissions.add(ResourcePermissions.getInstance(ResourcePermissions.QUERY, true)); if (resourceClassInternalInfo.isAuthenticatable()) { newResourcePermissions.add(ResourcePermissions.getInstance(ResourcePermissions.RESET_CREDENTIALS, true)); newResourcePermissions.add(ResourcePermissions.getInstance(ResourcePermissions.IMPERSONATE, true)); } } else { final Set<ResourceCreatePermission> resourceCreatePermissions; boolean createPermissionOK = false; resourceCreatePermissions = __getEffectiveResourceCreatePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); newResourcePermissions = __getPostCreateResourcePermissions(resourceCreatePermissions); if (resourceCreatePermissions.size() > 0) { createPermissionOK = true; } // if that did not work, next we check the session resource has super user permissions // to the domain of the new resource if (!createPermissionOK) { createPermissionOK = __isSuperUserOfDomain(connection, sessionResource, domainName); } if (!createPermissionOK) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "create resource of resource class " + resourceClassName); } } // create the new resource final Resource newResource = resourcePersister.createResource(connection, Id.<ResourceClassId>from(resourceClassInternalInfo .getResourceClassId()), domainId, externalId); // set permissions on the new resource, if applicable if (newResourcePermissions != null && newResourcePermissions.size() > 0) { if (sessionResource != null) { __setDirectResourcePermissions(connection, sessionResource, newResource, newResourcePermissions, sessionResource, true); } else { // if this session is unauthenticated the permissions are granted to the newly created resource __setDirectResourcePermissions(connection, newResource, newResource, newResourcePermissions, newResource, true); } } if (credentials != null) { authenticationProvider.setCredentials(newResource, credentials); } return newResource; } @Override public boolean deleteResource(Resource obsoleteResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(obsoleteResource); try { connection = __getConnection(); // we deliberately don't resolve the resource before calling the handler method, because the // delete operation should be idempotent and return false if the resource does not resolve/exist return __deleteResource(connection, obsoleteResource); } finally { __closeConnection(connection); } } private boolean __deleteResource(SQLConnection connection, Resource obsoleteResource) { try { obsoleteResource = __resolveResource(connection, obsoleteResource); } catch (IllegalArgumentException e) { // short-circuit out of this call if the specified resource does not exist/resolve // NOTE that this will still throw an exception if a resource does not match its // specified external id if (e.getMessage().toLowerCase().contains("not found")) { return false; } throw e; } // check for authorization if (!__isSuperUserOfResource(connection, sessionResource, obsoleteResource)) { final Set<ResourcePermission> sessionResourcePermissions = __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, obsoleteResource); if (!sessionResourcePermissions.contains(ResourcePermission_DELETE) && !sessionResourcePermissions.contains(ResourcePermission_DELETE_GRANT)) { throw NotAuthorizedException.newInstanceForActionOnResource(sessionResource, "delete", obsoleteResource); } } // remove the resource's credentials, if necessary final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, obsoleteResource); if (resourceClassInternalInfo.isAuthenticatable()) { authenticationProvider.deleteCredentials(obsoleteResource); } // remove any permissions the obsolete resource has as an accessor resource grantDomainCreatePermissionPostCreateSysPersister.removeDomainCreatePostCreateSysPermissions(connection, obsoleteResource); grantDomainCreatePermissionSysPersister.removeDomainCreateSysPermissions(connection, obsoleteResource); grantDomainPermissionSysPersister.removeAllDomainSysPermissions(connection, obsoleteResource); grantResourceCreatePermissionPostCreatePersister.removeAllResourceCreatePostCreatePermissions(connection, obsoleteResource); grantResourceCreatePermissionPostCreateSysPersister.removeAllResourceCreatePostCreateSysPermissions(connection, obsoleteResource); grantResourceCreatePermissionSysPersister.removeAllResourceCreateSysPermissions(connection, obsoleteResource); grantGlobalResourcePermissionPersister.removeAllGlobalResourcePermissions(connection, obsoleteResource); grantGlobalResourcePermissionSysPersister.removeAllGlobalSysPermissions(connection, obsoleteResource); // remove any permissions the obsolete resource has as an accessor resource OR as an accessed resource grantResourcePermissionPersister.removeAllResourcePermissionsAsAccessorOrAccessed(connection, obsoleteResource); grantResourcePermissionSysPersister.removeAllResourceSysPermissionsAsAccessorOrAccessed(connection, obsoleteResource); // remove the resource resourcePersister.deleteResource(connection, obsoleteResource); // handle special case where deleted resource is the session or authenticated resource if (authenticatedResource.equals(obsoleteResource)) { unauthenticate(); } else if (sessionResource.equals(obsoleteResource)) { unimpersonate(); } return true; } @Override public void setDomainPermissions(Resource accessorResource, String domainName, Set<DomainPermission> permissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionsSpecified(permissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __setDirectDomainPermissions(connection, accessorResource, domainName, permissions, false); } finally { __closeConnection(connection); } } private void __setDirectDomainPermissions(SQLConnection connection, Resource accessorResource, String domainName, Set<DomainPermission> requestedDomainPermissions, boolean newDomainMode) { // determine the domain ID of the domain, for use in the grant below Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // validate requested set is not null; empty set is valid and would remove any direct domain permissions if (requestedDomainPermissions == null) { throw new IllegalArgumentException("Set of requested domain permissions may not be null"); } if (!newDomainMode) { // check if the grantor (=session resource) has permissions to grant the requested permissions final Set<DomainPermission> grantorPermissions = __getEffectiveDomainPermissions(connection, sessionResource, domainName); // check if the grantor (=session resource) has super user permissions to the target domain if (!grantorPermissions.contains(DomainPermission_SUPER_USER) && !grantorPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { final Set<DomainPermission> directAccessorPermissions = __getDirectDomainPermissions(connection, accessorResource, domainId); final Set<DomainPermission> requestedAddPermissions = __subtract(requestedDomainPermissions, directAccessorPermissions); if (!requestedAddPermissions.isEmpty()) { final Set<DomainPermission> unauthorizedAddPermissions; unauthorizedAddPermissions = __subtractDomainPermissionsIfGrantableFrom(requestedAddPermissions, grantorPermissions); if (unauthorizedAddPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "add the following domain permission(s): " + unauthorizedAddPermissions); } } final Set<DomainPermission> requestedRemovePermissions = __subtract(directAccessorPermissions, requestedDomainPermissions); if (!requestedRemovePermissions.isEmpty()) { final Set<DomainPermission> unauthorizedRemovePermissions; unauthorizedRemovePermissions = __subtractDomainPermissionsIfGrantableFrom(requestedRemovePermissions, grantorPermissions); if (unauthorizedRemovePermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "remove the following domain permission(s): " + unauthorizedRemovePermissions); } } } // revoke any existing permissions that accessor to has to this domain directly grantDomainPermissionSysPersister.removeDomainSysPermissions(connection, accessorResource, domainId); } // add the new permissions grantDomainPermissionSysPersister.addDomainSysPermissions(connection, accessorResource, sessionResource, domainId, requestedDomainPermissions); } private Set<DomainPermission> __getDirectDomainPermissions(SQLConnection connection, Resource accessorResource, Id<DomainId> domainId) { // only system permissions are possible on a domain return grantDomainPermissionSysPersister.getDomainSysPermissions(connection, accessorResource, domainId); } private Set<DomainPermission> __subtractDomainPermissionsIfGrantableFrom(Set<DomainPermission> candidatePermissionSet, Set<DomainPermission> grantorPermissionSet) { Set<DomainPermission> differenceSet = new HashSet<>(candidatePermissionSet); for (DomainPermission candidatePermission : candidatePermissionSet) { for (DomainPermission grantorPermission : grantorPermissionSet) { if (candidatePermission.isGrantableFrom(grantorPermission)) { differenceSet.remove(candidatePermission); break; } } } return differenceSet; } @Override public void grantDomainPermissions(Resource accessorResource, String domainName, Set<DomainPermission> domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionsSpecified(domainPermissions); __assertPermissionsSetNotEmpty(domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __grantDirectDomainPermissions(connection, accessorResource, domainName, domainPermissions); } finally { __closeConnection(connection); } } @Override public void grantDomainPermissions(Resource accessorResource, String domainName, DomainPermission domainPermission, DomainPermission... domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionSpecified(domainPermission); __assertVarargPermissionsSpecified(domainPermissions); final Set<DomainPermission> requestedDomainPermissions = __getSetWithoutNullsOrDuplicates(domainPermission, domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __grantDirectDomainPermissions(connection, accessorResource, domainName, requestedDomainPermissions); } finally { __closeConnection(connection); } } private void __grantDirectDomainPermissions(SQLConnection connection, Resource accessorResource, String domainName, Set<DomainPermission> requestedDomainPermissions) { __assertUniqueDomainPermissionsNames(requestedDomainPermissions); // determine the domain ID of the domain, for use in the grant below Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // validate requested set is not null; empty set is valid and would remove any direct domain permissions if (requestedDomainPermissions == null) { throw new IllegalArgumentException("Set of requested domain permissions may not be null"); } // check if the grantor (=session resource) has permissions to grant the requested permissions final Set<DomainPermission> grantorPermissions = __getEffectiveDomainPermissions(connection, sessionResource, domainName); // check if the grantor (=session resource) has super user permissions to the target domain if (!grantorPermissions.contains(DomainPermission_SUPER_USER) && !grantorPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { final Set<DomainPermission> unauthorizedPermissions = __subtractDomainPermissionsIfGrantableFrom(requestedDomainPermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "grant the following domain permission(s): " + unauthorizedPermissions); } } final Set<DomainPermission> directAccessorPermissions = __getDirectDomainPermissions(connection, accessorResource, domainId); final Set<DomainPermission> addPermissions = new HashSet<>(requestedDomainPermissions.size()); final Set<DomainPermission> updatePermissions = new HashSet<>(requestedDomainPermissions.size()); for (DomainPermission requestedPermission : requestedDomainPermissions) { boolean existingPermission = false; for (DomainPermission existingDirectPermission : directAccessorPermissions) { if (requestedPermission.equalsIgnoreGrant(existingDirectPermission)) { // we found a match by permission name - now let's see if we need to update existing or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && !requestedPermission.isGrantableFrom(existingDirectPermission)) { // requested permission has higher granting rights than the already existing direct permission, // so we need to update it updatePermissions.add(requestedPermission); } existingPermission = true; break; } } if (!existingPermission) { // couldn't find requested permission in set of already existing direct permissions, by name, so we need to add it addPermissions.add(requestedPermission); } } // update any existing permissions that accessor to has to this domain directly grantDomainPermissionSysPersister.updateDomainSysPermissions(connection, accessorResource, sessionResource, domainId, updatePermissions); // add the new permissions grantDomainPermissionSysPersister.addDomainSysPermissions(connection, accessorResource, sessionResource, domainId, addPermissions); } private void __assertUniqueDomainPermissionsNames(Set<DomainPermission> domainPermissions) { final Set<String> uniquePermissionNames = new HashSet<>(domainPermissions.size()); for (final DomainPermission domainPermissionPermission : domainPermissions) { if (uniquePermissionNames.contains(domainPermissionPermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + domainPermissionPermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniquePermissionNames.add(domainPermissionPermission.getPermissionName()); } } } @Override public void revokeDomainPermissions(Resource accessorResource, String domainName, Set<DomainPermission> domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionsSpecified(domainPermissions); __assertPermissionsSetNotEmpty(domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __revokeDirectDomainPermissions(connection, accessorResource, domainName, domainPermissions); } finally { __closeConnection(connection); } } @Override public void revokeDomainPermissions(Resource accessorResource, String domainName, DomainPermission domainPermission, DomainPermission... domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionSpecified(domainPermission); __assertVarargPermissionsSpecified(domainPermissions); final Set<DomainPermission> requestedDomainPermissions = __getSetWithoutNullsOrDuplicates(domainPermission, domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __revokeDirectDomainPermissions(connection, accessorResource, domainName, requestedDomainPermissions); } finally { __closeConnection(connection); } } private void __revokeDirectDomainPermissions(SQLConnection connection, Resource accessorResource, String domainName, Set<DomainPermission> requestedDomainPermissions) { __assertUniqueDomainPermissionsNames(requestedDomainPermissions); // determine the domain ID of the domain, for use in the revocation below Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // validate requested set is not null if (requestedDomainPermissions == null) { throw new IllegalArgumentException("Set of requested domain permissions to be revoked may not be null"); } final Set<DomainPermission> grantorPermissions = __getEffectiveDomainPermissions(connection, sessionResource, domainName); // check if the grantor (=session resource) has super user permissions to the target domain or // has permissions to grant the requested permissions if (!grantorPermissions.contains(DomainPermission_SUPER_USER) && !grantorPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { final Set<DomainPermission> unauthorizedPermissions = __subtractDomainPermissionsIfGrantableFrom(requestedDomainPermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "revoke the following domain permission(s): " + unauthorizedPermissions); } } final Set<DomainPermission> directAccessorPermissions = __getDirectDomainPermissions(connection, accessorResource, domainId); final Set<DomainPermission> removePermissions = new HashSet<>(requestedDomainPermissions.size()); for (DomainPermission requestedPermission : requestedDomainPermissions) { for (DomainPermission existingDirectPermission : directAccessorPermissions) { if (requestedPermission.equalsIgnoreGrant(existingDirectPermission)) { // requested permission has same name and regardless of granting rights we need to remove it removePermissions.add(requestedPermission); break; } } } // remove any existing permissions that accessor has to this domain directly grantDomainPermissionSysPersister.removeDomainSysPermissions(connection, accessorResource, domainId, removePermissions); } @Override public Set<DomainPermission> getDomainPermissions(Resource accessorResource, String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } return __getDirectDomainPermissions(connection, accessorResource, domainId); } finally { __closeConnection(connection); } } @Override public Map<String, Set<DomainPermission>> getDomainPermissionsMap(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __collapseDomainPermissions(grantDomainPermissionSysPersister.getDomainSysPermissions(connection, accessorResource)); } finally { __closeConnection(connection); } } @Override public Set<DomainPermission> getEffectiveDomainPermissions(Resource accessorResource, String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getEffectiveDomainPermissions(connection, accessorResource, domainName); } finally { __closeConnection(connection); } } private Set<DomainPermission> __getEffectiveDomainPermissions(SQLConnection connection, Resource accessorResource, String domainName) { Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } return __getEffectiveDomainPermissions(connection, accessorResource, domainId); } private Set<DomainPermission> __getEffectiveDomainPermissions(SQLConnection connection, Resource accessorResource, Id<DomainId> domainId) { // only system permissions are possible on a domain final Set<DomainPermission> domainSysPermissionsIncludingInherited = grantDomainPermissionSysPersister.getDomainSysPermissionsIncludeInherited(connection, accessorResource, domainId); for (DomainPermission permission : domainSysPermissionsIncludingInherited) { // check if super-user privileges apply and construct set of all possible permissions, if necessary if (DomainPermissions.SUPER_USER.equals(permission.getPermissionName())) { return __getApplicableDomainPermissions(); } } return __collapseDomainPermissions(domainSysPermissionsIncludingInherited); } private Set<DomainPermission> __collapseDomainPermissions(Set<DomainPermission> domainPermissions) { final Set<DomainPermission> collapsedPermissions = new HashSet<>(domainPermissions); for (DomainPermission permission : domainPermissions) { for (DomainPermission grantEquivalentPermission : domainPermissions) { if (permission.isGrantableFrom(grantEquivalentPermission) && !permission.equals(grantEquivalentPermission)) { collapsedPermissions.remove(permission); break; } } } return collapsedPermissions; } @Override public Map<String, Set<DomainPermission>> getEffectiveDomainPermissionsMap(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getEffectiveDomainPermissionsMap(connection, accessorResource); } finally { __closeConnection(connection); } } private Map<String, Set<DomainPermission>> __getEffectiveDomainPermissionsMap(SQLConnection connection, Resource accessorResource) { final Map<String, Set<DomainPermission>> domainSysPermissionsIncludingInherited = grantDomainPermissionSysPersister.getDomainSysPermissionsIncludeInherited(connection, accessorResource); for (String domainName : domainSysPermissionsIncludingInherited.keySet()) { final Set<DomainPermission> domainPermissions = domainSysPermissionsIncludingInherited.get(domainName); if (domainPermissions.contains(DomainPermission_SUPER_USER) || domainPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { domainSysPermissionsIncludingInherited.put(domainName, __getApplicableDomainPermissions()); } } return __collapseDomainPermissions(domainSysPermissionsIncludingInherited); } private static Set<DomainPermission> __getApplicableDomainPermissions() { Set<DomainPermission> superDomainPermissions = new HashSet<>(3); superDomainPermissions.add(DomainPermission_SUPER_USER_GRANT); superDomainPermissions.add(DomainPermission_CREATE_CHILD_DOMAIN_GRANT); superDomainPermissions.add(DomainPermission_DELETE_GRANT); return superDomainPermissions; } private Map<String, Set<DomainPermission>> __collapseDomainPermissions(Map<String, Set<DomainPermission>> domainPermissionsMap) { Map<String, Set<DomainPermission>> collapsedDomainPermissionsMap = new HashMap<>(domainPermissionsMap.size()); for (String domainName : domainPermissionsMap.keySet()) { collapsedDomainPermissionsMap.put(domainName, __collapseDomainPermissions(domainPermissionsMap.get(domainName))); } return collapsedDomainPermissionsMap; } @Override public void setDomainCreatePermissions(Resource accessorResource, Set<DomainCreatePermission> domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionsSpecified(domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __setDirectDomainCreatePermissions(connection, accessorResource, domainCreatePermissions); } finally { __closeConnection(connection); } } private void __setDirectDomainCreatePermissions(SQLConnection connection, Resource accessorResource, Set<DomainCreatePermission> requestedDomainCreatePermissions) { __assertSetContainsDomainCreateSystemPermission(requestedDomainCreatePermissions); __assertUniqueSystemOrPostCreateDomainPermissionNames(requestedDomainCreatePermissions); // check if grantor (=session resource) is authorized to add/remove requested permissions final Set<DomainCreatePermission> grantorPermissions = __getEffectiveDomainCreatePermissions(connection, sessionResource); final Set<DomainCreatePermission> directAccessorPermissions = __getDirectDomainCreatePermissions(connection, accessorResource); final Set<DomainCreatePermission> requestedAddPermissions = __subtract(requestedDomainCreatePermissions, directAccessorPermissions); if (!requestedAddPermissions.isEmpty()) { final Set<DomainCreatePermission> unauthorizedAddPermissions = __subtractDomainCreatePermissionsIfGrantableFrom(requestedAddPermissions, grantorPermissions); if (unauthorizedAddPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "add the following domain create permission(s): " + unauthorizedAddPermissions); } } final Set<DomainCreatePermission> requestedRemovePermissions = __subtract(directAccessorPermissions, requestedDomainCreatePermissions); if (!requestedRemovePermissions.isEmpty()) { final Set<DomainCreatePermission> unauthorizedRemovePermissions = __subtractDomainCreatePermissionsIfGrantableFrom(requestedRemovePermissions, grantorPermissions); if (unauthorizedRemovePermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "remove the following domain create permission(s): " + unauthorizedRemovePermissions); } } // NOTE: our current data model only support system permissions for domains // revoke any existing domain system permission (*CREATE) this accessor has to this domain grantDomainCreatePermissionSysPersister.removeDomainCreateSysPermissions(connection, accessorResource); // revoke any existing domain post create system permissions this accessor has to this domain grantDomainCreatePermissionPostCreateSysPersister.removeDomainCreatePostCreateSysPermissions(connection, accessorResource); // add the domain system permissions (*CREATE) grantDomainCreatePermissionSysPersister.addDomainCreateSysPermissions(connection, accessorResource, sessionResource, requestedDomainCreatePermissions); // add the domain post create system permissions grantDomainCreatePermissionPostCreateSysPersister .addDomainCreatePostCreateSysPermissions(connection, accessorResource, sessionResource, requestedDomainCreatePermissions); } private void __assertSetContainsDomainCreateSystemPermission(Set<DomainCreatePermission> domainCreatePermissions) { if (!domainCreatePermissions.isEmpty()) { // if at least one permission is specified, then there must be a *CREATE permission in the set if (!__setContainsDomainCreateSystemPermission(domainCreatePermissions)) { throw new IllegalArgumentException("Domain create permission *CREATE must be specified"); } } } private boolean __setContainsDomainCreateSystemPermission(Set<DomainCreatePermission> domainCreatePermissions) { for (final DomainCreatePermission domainCreatePermission : domainCreatePermissions) { if (domainCreatePermission.isSystemPermission() && DomainCreatePermissions.CREATE.equals(domainCreatePermission.getPermissionName())) { return true; } } return false; } private Set<DomainCreatePermission> __getDirectDomainCreatePermissions(SQLConnection connection, Resource accessorResource) { final Set<DomainCreatePermission> domainCreatePermissions = new HashSet<>(); domainCreatePermissions .addAll(grantDomainCreatePermissionSysPersister.getDomainCreateSysPermissions(connection, accessorResource)); domainCreatePermissions .addAll(grantDomainCreatePermissionPostCreateSysPersister.getDomainCreatePostCreateSysPermissions( connection, accessorResource)); return domainCreatePermissions; } private Set<DomainCreatePermission> __subtractDomainCreatePermissionsIfGrantableFrom(Set<DomainCreatePermission> candidatePermissionSet, Set<DomainCreatePermission> grantorPermissionSet) { Set<DomainCreatePermission> differenceSet = new HashSet<>(candidatePermissionSet); for (DomainCreatePermission candidatePermission : candidatePermissionSet) { for (DomainCreatePermission grantorPermission : grantorPermissionSet) { if (candidatePermission.isGrantableFrom(grantorPermission)) { differenceSet.remove(candidatePermission); break; } } } return differenceSet; } @Override public void grantDomainCreatePermissions(Resource accessorResource, Set<DomainCreatePermission> domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionsSpecified(domainCreatePermissions); __assertPermissionsSetNotEmpty(domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __grantDirectDomainCreatePermissions(connection, accessorResource, domainCreatePermissions); } finally { __closeConnection(connection); } } @Override public void grantDomainCreatePermissions(Resource accessorResource, DomainCreatePermission domainCreatePermission, DomainCreatePermission... domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionSpecified(domainCreatePermission); __assertVarargPermissionsSpecified(domainCreatePermissions); final Set<DomainCreatePermission> requestedDomainCreatePermissions = __getSetWithoutNullsOrDuplicates(domainCreatePermission, domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __grantDirectDomainCreatePermissions(connection, accessorResource, requestedDomainCreatePermissions); } finally { __closeConnection(connection); } } private void __grantDirectDomainCreatePermissions(SQLConnection connection, Resource accessorResource, Set<DomainCreatePermission> requestedDomainCreatePermissions) { __assertUniqueSystemOrPostCreateDomainPermissionNames(requestedDomainCreatePermissions); // check if grantor (=session resource) is authorized to add requested permissions final Set<DomainCreatePermission> grantorPermissions = __getEffectiveDomainCreatePermissions(connection, sessionResource); final Set<DomainCreatePermission> unauthorizedPermissions = __subtractDomainCreatePermissionsIfGrantableFrom(requestedDomainCreatePermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "grant the following domain create permission(s): " + unauthorizedPermissions); } final Set<DomainCreatePermission> directAccessorPermissions = __getDirectDomainCreatePermissions(connection, accessorResource); if (directAccessorPermissions.isEmpty()) { // our invariant is that a resource's direct create permissions must include the *CREATE system permission; // if there are no direct create permissions, then the requested permissions to be granted need to include *CREATE __assertSetContainsDomainCreateSystemPermission(requestedDomainCreatePermissions); } final Set<DomainCreatePermission> addPermissions = new HashSet<>(requestedDomainCreatePermissions.size()); final Set<DomainCreatePermission> updatePermissions = new HashSet<>(requestedDomainCreatePermissions.size()); for (DomainCreatePermission requestedPermission : requestedDomainCreatePermissions) { boolean existingPermission = false; if (requestedPermission.isSystemPermission()) { for (DomainCreatePermission existingDirectPermission : directAccessorPermissions) { if (existingDirectPermission.isSystemPermission() && requestedPermission.getSystemPermissionId() == existingDirectPermission.getSystemPermissionId()) { // we found a match by sysId - now let's see if we need to update existing or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && !requestedPermission.isGrantableFrom(existingDirectPermission)) { // requested permission has higher granting rights than // the already existing direct permission, so we need to update it updatePermissions.add(requestedPermission); } existingPermission = true; break; } } } else { final DomainPermission requestedPostCreateDomainPermission = requestedPermission.getPostCreateDomainPermission(); for (DomainCreatePermission existingDirectPermission : directAccessorPermissions) { if (!existingDirectPermission.isSystemPermission()) { final DomainPermission existingPostCreateDomainPermission = existingDirectPermission.getPostCreateDomainPermission(); if (requestedPostCreateDomainPermission.equalsIgnoreGrant(existingPostCreateDomainPermission)) { // found a match in name - let's check compatibility first if (requestedPermission.isWithGrant() != requestedPostCreateDomainPermission.isWithGrant() && existingDirectPermission.isWithGrant() != existingPostCreateDomainPermission.isWithGrant() && requestedPermission.isWithGrant() != existingDirectPermission.isWithGrant()) { // the requested permission is incompatible to the existing permission because we can't // perform grant operations (a)/G -> (a/G) or (a/G) -> (a)/G without removing either the // create or post-create granting option throw new IllegalArgumentException("Requested create permissions " + requestedDomainCreatePermissions + " are incompatible with existing create permissions " + directAccessorPermissions); } // now let's see if we need to update existing permission or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && ((requestedPermission.isWithGrant() && requestedPostCreateDomainPermission.isWithGrant()) || (!existingDirectPermission.isWithGrant() && !existingPostCreateDomainPermission.isWithGrant()))) { // the two permissions match in name, but the requested has higher granting rights, // so we need to update updatePermissions.add(requestedPermission); } // because we found a match in name, we can skip comparing requested against other existing permissions existingPermission = true; break; } } } } if (!existingPermission) { // couldn't find requested permission in set of already existing direct permissions, by name, so we need to add it addPermissions.add(requestedPermission); } } // update the domain system permissions (*CREATE), if necessary grantDomainCreatePermissionSysPersister.updateDomainCreateSysPermissions(connection, accessorResource, sessionResource, updatePermissions); // update the domain post create system permissions, if necessary grantDomainCreatePermissionPostCreateSysPersister .updateDomainCreatePostCreateSysPermissions(connection, accessorResource, sessionResource, updatePermissions); // add any new domain system permissions (*CREATE) grantDomainCreatePermissionSysPersister.addDomainCreateSysPermissions(connection, accessorResource, sessionResource, addPermissions); // add any new domain post create system permissions grantDomainCreatePermissionPostCreateSysPersister .addDomainCreatePostCreateSysPermissions(connection, accessorResource, sessionResource, addPermissions); } private void __assertUniqueSystemOrPostCreateDomainPermissionNames(Set<DomainCreatePermission> domainCreatePermissions) { final Set<String> uniqueSystemPermissionNames = new HashSet<>(domainCreatePermissions.size()); final Set<String> uniquePostCreatePermissionNames = new HashSet<>(domainCreatePermissions.size()); for (final DomainCreatePermission domainCreatePermission : domainCreatePermissions) { if (domainCreatePermission.isSystemPermission()) { if (uniqueSystemPermissionNames.contains(domainCreatePermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + domainCreatePermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniqueSystemPermissionNames.add(domainCreatePermission.getPermissionName()); } } else { final DomainPermission postCreateDomainPermission = domainCreatePermission.getPostCreateDomainPermission(); if (uniquePostCreatePermissionNames.contains(postCreateDomainPermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + postCreateDomainPermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniquePostCreatePermissionNames.add(postCreateDomainPermission.getPermissionName()); } } } } @Override public void revokeDomainCreatePermissions(Resource accessorResource, Set<DomainCreatePermission> domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionsSpecified(domainCreatePermissions); __assertPermissionsSetNotEmpty(domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __revokeDirectDomainCreatePermissions(connection, accessorResource, domainCreatePermissions); } finally { __closeConnection(connection); } } @Override public void revokeDomainCreatePermissions(Resource accessorResource, DomainCreatePermission domainCreatePermission, DomainCreatePermission... domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionSpecified(domainCreatePermission); __assertVarargPermissionsSpecified(domainCreatePermissions); final Set<DomainCreatePermission> requestedDomainCreatePermissions = __getSetWithoutNullsOrDuplicates(domainCreatePermission, domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __revokeDirectDomainCreatePermissions(connection, accessorResource, requestedDomainCreatePermissions); } finally { __closeConnection(connection); } } private void __revokeDirectDomainCreatePermissions(SQLConnection connection, Resource accessorResource, Set<DomainCreatePermission> requestedDomainCreatePermissions) { __assertUniqueSystemOrPostCreateDomainPermissionNames(requestedDomainCreatePermissions); // check if grantor (=session resource) is authorized to revoke requested permissions final Set<DomainCreatePermission> grantorPermissions = __getEffectiveDomainCreatePermissions(connection, sessionResource); final Set<DomainCreatePermission> unauthorizedPermissions = __subtractDomainCreatePermissionsIfGrantableFrom(requestedDomainCreatePermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "revoke the following domain create permission(s): " + unauthorizedPermissions); } final Set<DomainCreatePermission> directAccessorPermissions = __getDirectDomainCreatePermissions(connection, accessorResource); if ((directAccessorPermissions.size() > requestedDomainCreatePermissions.size()) && __setContainsDomainCreateSystemPermission(requestedDomainCreatePermissions)) { // our invariant is that a resource's direct create permissions must include the *CREATE system permission; // if after revoking the requested permissions, the remaining set wouldn't include the *CREATE, we'd have a problem throw new IllegalArgumentException( "Attempt to revoke a subset of domain create permissions that includes the *CREATE system permission: " + requestedDomainCreatePermissions); } final Set<DomainCreatePermission> removePermissions = new HashSet<>(requestedDomainCreatePermissions.size()); for (DomainCreatePermission requestedPermission : requestedDomainCreatePermissions) { if (requestedPermission.isSystemPermission()) { for (DomainCreatePermission existingDirectPermission : directAccessorPermissions) { if (existingDirectPermission.isSystemPermission() && requestedPermission.getSystemPermissionId() == existingDirectPermission.getSystemPermissionId()) { // requested permission has same system Id as an already existing direct permission, so remove it removePermissions.add(requestedPermission); break; } } } else { final DomainPermission requestedPostCreateDomainPermission = requestedPermission.getPostCreateDomainPermission(); for (DomainCreatePermission existingDirectPermission : directAccessorPermissions) { if (!existingDirectPermission.isSystemPermission()) { // now let's look at the post-create permissions if (requestedPostCreateDomainPermission.equalsIgnoreGrant(existingDirectPermission.getPostCreateDomainPermission())) { // requested post-create permission has same name as an already existing direct permission, so remove it removePermissions.add(requestedPermission); break; } } } } } // remove the domain system permissions (*CREATE), if necessary grantDomainCreatePermissionSysPersister.removeDomainCreateSysPermissions(connection, accessorResource, removePermissions); // remove the domain post create system permissions, if necessary grantDomainCreatePermissionPostCreateSysPersister.removeDomainCreatePostCreateSysPermissions(connection, accessorResource, removePermissions); } @Override public Set<DomainCreatePermission> getDomainCreatePermissions(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getDirectDomainCreatePermissions(connection, accessorResource); } finally { __closeConnection(connection); } } @Override public Set<DomainCreatePermission> getEffectiveDomainCreatePermissions(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getEffectiveDomainCreatePermissions(connection, accessorResource); } finally { __closeConnection(connection); } } private Set<DomainCreatePermission> __getEffectiveDomainCreatePermissions(SQLConnection connection, Resource accessorResource) { final Set<DomainCreatePermission> domainCreatePermissions = new HashSet<>(); domainCreatePermissions .addAll(grantDomainCreatePermissionSysPersister.getDomainCreateSysPermissionsIncludeInherited(connection, accessorResource)); domainCreatePermissions .addAll(grantDomainCreatePermissionPostCreateSysPersister .getDomainCreatePostCreateSysPermissionsIncludeInherited(connection, accessorResource)); return __collapseDomainCreatePermissions(domainCreatePermissions); } private Set<DomainCreatePermission> __collapseDomainCreatePermissions(Set<DomainCreatePermission> domainCreatePermissions) { final Set<DomainCreatePermission> collapsedPermissions = new HashSet<>(domainCreatePermissions); for (DomainCreatePermission permission : domainCreatePermissions) { for (DomainCreatePermission grantEquivalentPermission : domainCreatePermissions) { if (permission.isGrantableFrom(grantEquivalentPermission) && !permission.equals(grantEquivalentPermission)) { collapsedPermissions.remove(permission); break; } } } return collapsedPermissions; } @Override public void setResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __setDirectResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, resourceCreatePermissions); } finally { __closeConnection(connection); } } private void __setDirectResourceCreatePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> requestedResourceCreatePermissions) { // verify that resource class is defined and get its metadata final ResourceClassInternalInfo resourceClassInfo = __getResourceClassInternalInfo(connection, resourceClassName); final Id<ResourceClassId> resourceClassId = Id.from(resourceClassInfo.getResourceClassId()); // verify that domain is defined final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // ensure that the *CREATE system permissions was specified __assertSetContainsResourceCreateSystemPermission(requestedResourceCreatePermissions); // ensure that the post create permissions are all in the correct resource class __assertUniquePostCreatePermissionsNamesForResourceClass(connection, requestedResourceCreatePermissions, resourceClassInfo); // check if the grantor (=session resource) is authorized to grant the requested permissions if (!__isSuperUserOfDomain(connection, sessionResource, domainName)) { final Set<ResourceCreatePermission> grantorPermissions = __getEffectiveResourceCreatePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); final Set<ResourceCreatePermission> directAccessorPermissions = __getDirectResourceCreatePermissions(connection, accessorResource, resourceClassId, domainId); final Set<ResourceCreatePermission> requestedAddPermissions = __subtract(requestedResourceCreatePermissions, directAccessorPermissions); if (!requestedAddPermissions.isEmpty()) { final Set<ResourceCreatePermission> unauthorizedAddPermissions = __subtractResourceCreatePermissionsIfGrantableFrom(requestedAddPermissions, grantorPermissions); if (unauthorizedAddPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "add the following permission(s): " + unauthorizedAddPermissions); } } final Set<ResourceCreatePermission> requestedRemovePermissions = __subtract(directAccessorPermissions, requestedResourceCreatePermissions); if (!requestedRemovePermissions.isEmpty()) { final Set<ResourceCreatePermission> unauthorizedRemovePermissions = __subtractResourceCreatePermissionsIfGrantableFrom(requestedRemovePermissions, grantorPermissions); if (unauthorizedRemovePermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "remove the following permission(s): " + unauthorizedRemovePermissions); } } } // revoke any existing *CREATE system permissions this accessor has to this resource class grantResourceCreatePermissionSysPersister.removeResourceCreateSysPermissions(connection, accessorResource, resourceClassId, domainId); // revoke any existing post create system permissions this accessor has to this resource class grantResourceCreatePermissionPostCreateSysPersister.removeResourceCreatePostCreateSysPermissions(connection, accessorResource, resourceClassId, domainId); // revoke any existing post create non-system permissions this accessor has to this resource class grantResourceCreatePermissionPostCreatePersister.removeResourceCreatePostCreatePermissions(connection, accessorResource, resourceClassId, domainId); // grant the *CREATE system permissions grantResourceCreatePermissionSysPersister.addResourceCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, requestedResourceCreatePermissions, sessionResource); // grant the post create system permissions grantResourceCreatePermissionPostCreateSysPersister.addResourceCreatePostCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, requestedResourceCreatePermissions, sessionResource); // grant the post create non-system permissions grantResourceCreatePermissionPostCreatePersister.addResourceCreatePostCreatePermissions(connection, accessorResource, resourceClassId, domainId, requestedResourceCreatePermissions, sessionResource); } private void __assertSetContainsResourceCreateSystemPermission(Set<ResourceCreatePermission> resourceCreatePermissions) { if (!resourceCreatePermissions.isEmpty()) { boolean createSysPermissionFound = false; for (final ResourceCreatePermission resourceCreatePermission : resourceCreatePermissions) { if (resourceCreatePermission.isSystemPermission() && ResourceCreatePermissions.CREATE.equals(resourceCreatePermission.getPermissionName())) { createSysPermissionFound = true; break; } } // if at least one permission is specified, then there must be a *CREATE permission in the set if (!createSysPermissionFound) { throw new IllegalArgumentException("Permission: *CREATE must be specified"); } } } private void __assertUniquePostCreatePermissionsNamesForResourceClass(SQLConnection connection, Set<ResourceCreatePermission> resourceCreatePermissions, ResourceClassInternalInfo resourceClassInternalInfo) { final List<String> validPermissionNames = __getApplicableResourcePermissionNames(connection, resourceClassInternalInfo); final Set<String> uniqueSystemPermissionNames = new HashSet<>(resourceCreatePermissions.size()); final Set<String> uniquePostCreatePermissionNames = new HashSet<>(resourceCreatePermissions.size()); for (final ResourceCreatePermission resourceCreatePermission : resourceCreatePermissions) { if (resourceCreatePermission.isSystemPermission()) { if (uniqueSystemPermissionNames.contains(resourceCreatePermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + resourceCreatePermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniqueSystemPermissionNames.add(resourceCreatePermission.getPermissionName()); } } else { final ResourcePermission postCreateResourcePermission = resourceCreatePermission.getPostCreateResourcePermission(); if (!validPermissionNames.contains(postCreateResourcePermission.getPermissionName())) { if (postCreateResourcePermission.isSystemPermission()) { // currently the only invalid system permissions are for unauthenticatable resource classes throw new IllegalArgumentException("Permission: " + postCreateResourcePermission.getPermissionName() + ", not valid for unauthenticatable resource"); } else { throw new IllegalArgumentException("Permission: " + postCreateResourcePermission.getPermissionName() + " is not defined for resource class: " + resourceClassInternalInfo.getResourceClassName()); } } if (uniquePostCreatePermissionNames.contains(postCreateResourcePermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + postCreateResourcePermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniquePostCreatePermissionNames.add(postCreateResourcePermission.getPermissionName()); } } } } private Set<ResourceCreatePermission> __subtractResourceCreatePermissionsIfGrantableFrom(Set<ResourceCreatePermission> candidatePermissionSet, Set<ResourceCreatePermission> grantorPermissionSet) { Set<ResourceCreatePermission> differenceSet = new HashSet<>(candidatePermissionSet); for (ResourceCreatePermission candidatePermission : candidatePermissionSet) { for (ResourceCreatePermission grantorPermission : grantorPermissionSet) { if (candidatePermission.isGrantableFrom(grantorPermission)) { differenceSet.remove(candidatePermission); break; } } } return differenceSet; } private <T> Set<T> __subtract(Set<T> minuendSet, Set<T> subtrahendSet) { Set<T> differenceSet = new HashSet<>(minuendSet); differenceSet.removeAll(subtrahendSet); return differenceSet; } private Set<ResourceCreatePermission> __getDirectResourceCreatePermissions(SQLConnection connection, Resource accessorResource, Id<ResourceClassId> resourceClassId, Id<DomainId> domainId) { Set<ResourceCreatePermission> resourceCreatePermissions = new HashSet<>(); // first get the *CREATE system permission the accessor has directly to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionSysPersister.getResourceCreateSysPermissions(connection, accessorResource, resourceClassId, domainId)); // next get the post create system permissions the accessor has directly to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionPostCreateSysPersister.getResourceCreatePostCreateSysPermissions( connection, accessorResource, resourceClassId, domainId)); // next get the post create non-system permissions the accessor has directly to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionPostCreatePersister.getResourceCreatePostCreatePermissions( connection, accessorResource, resourceClassId, domainId)); return resourceCreatePermissions; } @Override public void grantResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourceCreatePermissions); __assertPermissionsSetNotEmpty(resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __grantDirectResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, resourceCreatePermissions); } finally { __closeConnection(connection); } } @Override public void grantResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourceCreatePermission resourceCreatePermission, ResourceCreatePermission... resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourceCreatePermission); __assertVarargPermissionsSpecified(resourceCreatePermissions); final Set<ResourceCreatePermission> requestedResourceCreatePermissions = __getSetWithoutNullsOrDuplicates(resourceCreatePermission, resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __grantDirectResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, requestedResourceCreatePermissions); } finally { __closeConnection(connection); } } private void __grantDirectResourceCreatePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> requestedResourceCreatePermissions) { // verify that resource class is defined and get its metadata final ResourceClassInternalInfo resourceClassInfo = __getResourceClassInternalInfo(connection, resourceClassName); final Id<ResourceClassId> resourceClassId = Id.from(resourceClassInfo.getResourceClassId()); // verify that domain is defined final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // ensure that the post create permissions are all in the correct resource class __assertUniquePostCreatePermissionsNamesForResourceClass(connection, requestedResourceCreatePermissions, resourceClassInfo); // check if the grantor (=session resource) is authorized to grant the requested permissions if (!__isSuperUserOfDomain(connection, sessionResource, domainName)) { final Set<ResourceCreatePermission> grantorPermissions = __getEffectiveResourceCreatePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); final Set<ResourceCreatePermission> unauthorizedAddPermissions = __subtractResourceCreatePermissionsIfGrantableFrom(requestedResourceCreatePermissions, grantorPermissions); if (unauthorizedAddPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "grant the following permission(s): " + unauthorizedAddPermissions); } } // ensure that the *CREATE system permissions was specified final Set<ResourceCreatePermission> directAccessorPermissions = __getDirectResourceCreatePermissions(connection, accessorResource, resourceClassId, domainId); if (directAccessorPermissions.isEmpty()) { // our invariant is that a resource's direct create permissions must include the *CREATE system permission; // if there are no direct create permissions, then the requested permissions to be granted needs to include *CREATE __assertSetContainsResourceCreateSystemPermission(requestedResourceCreatePermissions); } final Set<ResourceCreatePermission> addPermissions = new HashSet<>(requestedResourceCreatePermissions.size()); final Set<ResourceCreatePermission> updatePermissions = new HashSet<>(requestedResourceCreatePermissions.size()); for (ResourceCreatePermission requestedPermission : requestedResourceCreatePermissions) { boolean existingPermission = false; if (requestedPermission.isSystemPermission()) { for (ResourceCreatePermission existingDirectPermission : directAccessorPermissions) { if (existingDirectPermission.isSystemPermission() && requestedPermission.getSystemPermissionId() == existingDirectPermission.getSystemPermissionId()) { // we found a match by sysId - now let's see if we need to update existing or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && !requestedPermission.isGrantableFrom(existingDirectPermission)) { // requested permission has higher granting rights than // the already existing direct permission, so we need to update it updatePermissions.add(requestedPermission); } existingPermission = true; break; } } } else { final ResourcePermission requestedPostCreateResourcePermission = requestedPermission.getPostCreateResourcePermission(); for (ResourceCreatePermission existingDirectPermission : directAccessorPermissions) { if (!existingDirectPermission.isSystemPermission()) { final ResourcePermission existingPostCreateResourcePermission = existingDirectPermission.getPostCreateResourcePermission(); if (requestedPostCreateResourcePermission.equalsIgnoreGrant(existingPostCreateResourcePermission)) { // found a match in name - let's check compatibility first if (requestedPermission.isWithGrant() != requestedPostCreateResourcePermission.isWithGrant() && existingDirectPermission.isWithGrant() != existingPostCreateResourcePermission.isWithGrant() && requestedPermission.isWithGrant() != existingDirectPermission.isWithGrant()) { // the requested permission is incompatible to the existing permission because we can't // perform grant operations (a)/G -> (a/G) or (a/G) -> (a)/G without removing either the // create or post-create granting option throw new IllegalArgumentException("Requested create permissions " + requestedResourceCreatePermissions + " are incompatible with existing create permissions " + directAccessorPermissions); } // now let's see if we need to update existing permission or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && ((requestedPermission.isWithGrant() && requestedPostCreateResourcePermission.isWithGrant()) || (!existingDirectPermission.isWithGrant() && !existingPostCreateResourcePermission.isWithGrant()))) { // the two permissions match in name, but the requested has higher granting rights, // so we need to update updatePermissions.add(requestedPermission); } // because we found a match in name, we can skip comparing requested against other existing permissions existingPermission = true; break; } } } } if (!existingPermission) { // couldn't find requested permission in set of already existing direct permissions, by name, so we need to add it addPermissions.add(requestedPermission); } } // update *CREATE system permission, if necessary grantResourceCreatePermissionSysPersister.updateResourceCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, updatePermissions, sessionResource); // update any post create system permissions, if necessary grantResourceCreatePermissionPostCreateSysPersister.updateResourceCreatePostCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, updatePermissions, sessionResource); // update any post create non-system permissions, if necessary grantResourceCreatePermissionPostCreatePersister.updateResourceCreatePostCreatePermissions(connection, accessorResource, resourceClassId, domainId, updatePermissions, sessionResource); // grant the *CREATE system permissions, if necessary grantResourceCreatePermissionSysPersister.addResourceCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, addPermissions, sessionResource); // grant any post create system permissions, if necessary grantResourceCreatePermissionPostCreateSysPersister.addResourceCreatePostCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, addPermissions, sessionResource); // grant any post create non-system permissions, if necessary grantResourceCreatePermissionPostCreatePersister.addResourceCreatePostCreatePermissions(connection, accessorResource, resourceClassId, domainId, addPermissions, sessionResource); } @Override public void revokeResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourceCreatePermissions); __assertPermissionsSetNotEmpty(resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __revokeDirectResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, resourceCreatePermissions); } finally { __closeConnection(connection); } } @Override public void revokeResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourceCreatePermission resourceCreatePermission, ResourceCreatePermission... resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourceCreatePermission); __assertVarargPermissionsSpecified(resourceCreatePermissions); final Set<ResourceCreatePermission> requestedResourceCreatePermissions = __getSetWithoutNullsOrDuplicates(resourceCreatePermission, resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __revokeDirectResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, requestedResourceCreatePermissions); } finally { __closeConnection(connection); } } private void __revokeDirectResourceCreatePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> requestedResourceCreatePermissions) { // verify that resource class is defined and get its metadata final ResourceClassInternalInfo resourceClassInfo = __getResourceClassInternalInfo(connection, resourceClassName); final Id<ResourceClassId> resourceClassId = Id.from(resourceClassInfo.getResourceClassId()); // verify that domain is defined final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } __assertUniquePostCreatePermissionsNamesForResourceClass(connection, requestedResourceCreatePermissions, resourceClassInfo); // check if the grantor (=session resource) is authorized to grant the requested permissions if (!__isSuperUserOfDomain(connection, sessionResource, domainName)) { final Set<ResourceCreatePermission> grantorPermissions = __getEffectiveResourceCreatePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); final Set<ResourceCreatePermission> unauthorizedPermissions = __subtractResourceCreatePermissionsIfGrantableFrom(requestedResourceCreatePermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "revoke the following permission(s): " + unauthorizedPermissions); } } // ensure that the *CREATE system permissions will remain if not all are cleared final Set<ResourceCreatePermission> directAccessorPermissions = __getDirectResourceCreatePermissions(connection, accessorResource, resourceClassId, domainId); if ((directAccessorPermissions.size() > requestedResourceCreatePermissions.size()) && __setContainsResourceCreateSystemPermission(requestedResourceCreatePermissions)) { // our invariant is that a resource's direct create permissions must include the *CREATE system permission; // if after revoking the requested permissions, the remaining set wouldn't include the *CREATE, we'd have a problem throw new IllegalArgumentException( "Attempt to revoke a subset of resource create permissions that includes the *CREATE system permission: " + requestedResourceCreatePermissions); } final Set<ResourceCreatePermission> removePermissions = new HashSet<>(requestedResourceCreatePermissions.size()); for (ResourceCreatePermission requestedPermission : requestedResourceCreatePermissions) { if (requestedPermission.isSystemPermission()) { for (ResourceCreatePermission existingDirectPermission : directAccessorPermissions) { if (existingDirectPermission.isSystemPermission() && requestedPermission.getSystemPermissionId() == existingDirectPermission.getSystemPermissionId()) { // requested permission has same system Id as an already existing direct permission, so remove it removePermissions.add(requestedPermission); break; } } } else { final ResourcePermission requestedPostCreateResourcePermission = requestedPermission.getPostCreateResourcePermission(); for (ResourceCreatePermission existingDirectPermission : directAccessorPermissions) { if (!existingDirectPermission.isSystemPermission()) { // now let's look at the post-create permissions if (requestedPostCreateResourcePermission .equalsIgnoreGrant(existingDirectPermission.getPostCreateResourcePermission())) { // requested post-create permission has same name as an already existing direct permission, so remove it removePermissions.add(requestedPermission); break; } } } } } // remove *CREATE system permission, if necessary grantResourceCreatePermissionSysPersister.removeResourceCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, removePermissions); // remove any post create system permissions, if necessary grantResourceCreatePermissionPostCreateSysPersister.removeResourceCreatePostCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, removePermissions); // remove any post create non-system permissions, if necessary grantResourceCreatePermissionPostCreatePersister.removeResourceCreatePostCreatePermissions(connection, accessorResource, resourceClassId, domainId, removePermissions); } private boolean __setContainsResourceCreateSystemPermission(Set<ResourceCreatePermission> resourceCreatePermissions) { for (final ResourceCreatePermission resourceCreatePermission : resourceCreatePermissions) { if (resourceCreatePermission.isSystemPermission() && ResourceCreatePermissions.CREATE.equals(resourceCreatePermission.getPermissionName())) { return true; } } return false; } @Override public Set<ResourceCreatePermission> getResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __getDirectResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName); } finally { __closeConnection(connection); } } private Set<ResourceCreatePermission> __getDirectResourceCreatePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName) { // verify that resource class is defined Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // verify that domain is defined final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } return __getDirectResourceCreatePermissions(connection, accessorResource, resourceClassId, domainId); } @Override public Map<String, Map<String, Set<ResourceCreatePermission>>> getResourceCreatePermissionsMap(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getDirectResourceCreatePermissionsMap(connection, accessorResource); } finally { __closeConnection(connection); } } private Map<String, Map<String, Set<ResourceCreatePermission>>> __getDirectResourceCreatePermissionsMap(SQLConnection connection, Resource accessorResource) { // collect all the create permissions that the accessor has Map<String, Map<String, Set<ResourceCreatePermission>>> allResourceCreatePermissionsMap = new HashMap<>(); // read the *CREATE system permissions and add to allResourceCreatePermissionsMap allResourceCreatePermissionsMap .putAll(grantResourceCreatePermissionSysPersister.getResourceCreateSysPermissions(connection, accessorResource)); // read the post create system permissions and add to allResourceCreatePermissionsMap __mergeSourceCreatePermissionsMapIntoTargetCreatePermissionsMap( grantResourceCreatePermissionPostCreateSysPersister .getResourceCreatePostCreateSysPermissions(connection, accessorResource), allResourceCreatePermissionsMap); // read the post create non-system permissions and add to allResourceCreatePermissionsMap __mergeSourceCreatePermissionsMapIntoTargetCreatePermissionsMap( grantResourceCreatePermissionPostCreatePersister .getResourceCreatePostCreatePermissions(connection, accessorResource), allResourceCreatePermissionsMap); return __collapseResourceCreatePermissions(allResourceCreatePermissionsMap); } @Override public Set<ResourceCreatePermission> getEffectiveResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __getEffectiveResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName); } finally { __closeConnection(connection); } } private Set<ResourceCreatePermission> __getEffectiveResourceCreatePermissionsIgnoringSuperUserPrivileges(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName) { // verify that resource class is defined Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // verify that domain is defined final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // collect the create permissions that this resource has to this resource class Set<ResourceCreatePermission> resourceCreatePermissions = new HashSet<>(); // first read the *CREATE system permission the accessor has to the specified resource class resourceCreatePermissions.addAll( grantResourceCreatePermissionSysPersister.getResourceCreateSysPermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); // next read the post create system permissions the accessor has to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionPostCreateSysPersister .getResourceCreatePostCreateSysPermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); // next read the post create non-system permissions the accessor has to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionPostCreatePersister .getResourceCreatePostCreatePermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); return __collapseResourceCreatePermissions(resourceCreatePermissions); } private Set<ResourceCreatePermission> __getEffectiveResourceCreatePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName) { // verify that resource class is defined final ResourceClassInternalInfo resourceClassInternalInfo = __getResourceClassInternalInfo(connection, resourceClassName); // verify that domain is defined final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } if (__isSuperUserOfDomain(connection, accessorResource, domainName)) { return __getApplicableResourceCreatePermissions(connection, resourceClassInternalInfo); } Id<ResourceClassId> resourceClassId = Id.from(resourceClassInternalInfo.getResourceClassId()); // collect the create permissions that this resource has to this resource class Set<ResourceCreatePermission> resourceCreatePermissions = new HashSet<>(); // first read the *CREATE system permission the accessor has to the specified resource class resourceCreatePermissions.addAll( grantResourceCreatePermissionSysPersister.getResourceCreateSysPermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); // next read the post create system permissions the accessor has to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionPostCreateSysPersister .getResourceCreatePostCreateSysPermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); // next read the post create non-system permissions the accessor has to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionPostCreatePersister .getResourceCreatePostCreatePermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); return __collapseResourceCreatePermissions(resourceCreatePermissions); } private Set<ResourceCreatePermission> __collapseResourceCreatePermissions(Set<ResourceCreatePermission> resourceCreatePermissions) { final Set<ResourceCreatePermission> collapsedPermissions = new HashSet<>(resourceCreatePermissions); for (ResourceCreatePermission permission : resourceCreatePermissions) { for (ResourceCreatePermission grantEquivalentPermission : resourceCreatePermissions) { if (permission.isGrantableFrom(grantEquivalentPermission) && !permission.equals(grantEquivalentPermission)) { collapsedPermissions.remove(permission); break; } } } return collapsedPermissions; } @Override public Map<String, Map<String, Set<ResourceCreatePermission>>> getEffectiveResourceCreatePermissionsMap(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getEffectiveResourceCreatePermissionsMap(connection, accessorResource); } finally { __closeConnection(connection); } } private Map<String, Map<String, Set<ResourceCreatePermission>>> __getEffectiveResourceCreatePermissionsMap( SQLConnection connection, Resource accessorResource) { // collect all the create permissions that the accessor has Map<String, Map<String, Set<ResourceCreatePermission>>> allResourceCreatePermissionsMap = new HashMap<>(); // read the *CREATE system permissions and add to allResourceCreatePermissionsMap allResourceCreatePermissionsMap .putAll(grantResourceCreatePermissionSysPersister .getResourceCreateSysPermissionsIncludeInherited(connection, accessorResource)); // read the post create system permissions and add to allResourceCreatePermissionsMap __mergeSourceCreatePermissionsMapIntoTargetCreatePermissionsMap( grantResourceCreatePermissionPostCreateSysPersister .getResourceCreatePostCreateSysPermissionsIncludeInherited(connection, accessorResource), allResourceCreatePermissionsMap); // read the post create non-system permissions and add to allResourceCreatePermissionsMap __mergeSourceCreatePermissionsMapIntoTargetCreatePermissionsMap( grantResourceCreatePermissionPostCreatePersister .getResourceCreatePostCreatePermissionsIncludeInherited(connection, accessorResource), allResourceCreatePermissionsMap); // finally, collect all applicable create permissions when accessor has super-user privileges to any domain // and add them into the globalALLPermissionsMap final Map<String, Map<String, Set<ResourceCreatePermission>>> allSuperResourceCreatePermissionsMap = new HashMap<>(); Map<String, Set<ResourceCreatePermission>> superResourceCreatePermissionsMap = null; final Map<String, Set<DomainPermission>> effectiveDomainPermissionsMap = __getEffectiveDomainPermissionsMap(connection, accessorResource); for (String domainName : effectiveDomainPermissionsMap.keySet()) { final Set<DomainPermission> effectiveDomainPermissions = effectiveDomainPermissionsMap.get(domainName); if (effectiveDomainPermissions.contains(DomainPermission_SUPER_USER) || effectiveDomainPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { if (superResourceCreatePermissionsMap == null) { // lazy-construct super-user-privileged resource-permissions map by resource classes final List<String> resourceClassNames = resourceClassPersister.getResourceClassNames(connection); superResourceCreatePermissionsMap = new HashMap<>(resourceClassNames.size()); for (String resourceClassName : resourceClassNames) { final Set<ResourceCreatePermission> applicableResourceCreatePermissions = __getApplicableResourceCreatePermissions(connection, __getResourceClassInternalInfo(connection, resourceClassName)); superResourceCreatePermissionsMap.put(resourceClassName, applicableResourceCreatePermissions); } } allSuperResourceCreatePermissionsMap.put(domainName, superResourceCreatePermissionsMap); } } __mergeSourceCreatePermissionsMapIntoTargetCreatePermissionsMap(allSuperResourceCreatePermissionsMap, allResourceCreatePermissionsMap); return __collapseResourceCreatePermissions(allResourceCreatePermissionsMap); } private void __mergeSourceCreatePermissionsMapIntoTargetCreatePermissionsMap(Map<String, Map<String, Set<ResourceCreatePermission>>> sourceCreatePermissionsMap, Map<String, Map<String, Set<ResourceCreatePermission>>> targetCreatePermissionsMap) { for (String domainName : sourceCreatePermissionsMap.keySet()) { Map<String, Set<ResourceCreatePermission>> targetCreatePermsForDomainMap; // does the target map have domain? if ((targetCreatePermsForDomainMap = targetCreatePermissionsMap.get(domainName)) == null) { // no, add the domain targetCreatePermissionsMap.put(domainName, targetCreatePermsForDomainMap = new HashMap<>()); } for (String resourceClassName : sourceCreatePermissionsMap.get(domainName).keySet()) { Set<ResourceCreatePermission> targetCreatePermsForClassSet; // does the target map have the resource class? if ((targetCreatePermsForClassSet = targetCreatePermsForDomainMap.get(resourceClassName)) == null) { // no, add the resource class targetCreatePermsForDomainMap.put(resourceClassName, targetCreatePermsForClassSet = new HashSet<>()); } // get the source permissions for the domain + resource class final Set<ResourceCreatePermission> sourceCreatePermsForClassSet = sourceCreatePermissionsMap.get(domainName).get(resourceClassName); // add the source permissions above to the target for the respective domain + resource class targetCreatePermsForClassSet.addAll(sourceCreatePermsForClassSet); } } } private Map<String, Map<String, Set<ResourceCreatePermission>>> __collapseResourceCreatePermissions(Map<String, Map<String, Set<ResourceCreatePermission>>> resourceCreatePermissionsMap) { for (String domainName : resourceCreatePermissionsMap.keySet()) { final Map<String, Set<ResourceCreatePermission>> createPermissionsByDomainMap = resourceCreatePermissionsMap.get(domainName); for (String resourceClassName : createPermissionsByDomainMap.keySet()) { final Set<ResourceCreatePermission> createPermissionsByResourceClassMap = createPermissionsByDomainMap.get(resourceClassName); createPermissionsByDomainMap.put(resourceClassName, __collapseResourceCreatePermissions(createPermissionsByResourceClassMap)); } } return resourceCreatePermissionsMap; } @Override public void setResourcePermissions(Resource accessorResource, Resource accessedResource, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionsSpecified(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __setDirectResourcePermissions(connection, accessorResource, accessedResource, resourcePermissions, sessionResource, false); } finally { __closeConnection(connection); } } private void __setDirectResourcePermissions(SQLConnection connection, Resource accessorResource, Resource accessedResource, Set<ResourcePermission> requestedResourcePermissions, Resource grantorResource, boolean newResourceMode) { final ResourceClassInternalInfo accessedResourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, accessedResource); // next ensure that the requested permissions are all in the correct resource class __assertUniqueResourcePermissionsNamesForResourceClass(connection, requestedResourcePermissions, accessedResourceClassInternalInfo); // if this method is being called to set the post create permissions on a newly created resource // we do not perform the security checks below, since it would be incorrect if (!newResourceMode) { if (!__isSuperUserOfResource(connection, grantorResource, accessedResource)) { // next check if the grantor (i.e. session resource) has permissions to grant the requested permissions final Set<ResourcePermission> grantorResourcePermissions = __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(connection, grantorResource, accessedResource); final Set<ResourcePermission> directAccessorResourcePermissions = __getDirectResourcePermissions(connection, accessorResource, accessedResource); final Set<ResourcePermission> requestedAddPermissions = __subtract(requestedResourcePermissions, directAccessorResourcePermissions); if (requestedAddPermissions.size() > 0) { final Set<ResourcePermission> unauthorizedAddPermissions = __subtractResourcePermissionsIfGrantableFrom(requestedAddPermissions, grantorResourcePermissions); if (unauthorizedAddPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(grantorResource, "add the following permission(s): " + unauthorizedAddPermissions); } } final Set<ResourcePermission> requestedRemovePermissions = __subtract(directAccessorResourcePermissions, requestedResourcePermissions); if (requestedRemovePermissions.size() > 0) { final Set<ResourcePermission> unauthorizedRemovePermissions = __subtractResourcePermissionsIfGrantableFrom(requestedRemovePermissions, grantorResourcePermissions); if (unauthorizedRemovePermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(grantorResource, "remove the following permission(s): " + unauthorizedRemovePermissions); } } } // if inherit permissions are about to be granted, first check for cycles if (requestedResourcePermissions.contains(ResourcePermission_INHERIT) || requestedResourcePermissions.contains(ResourcePermission_INHERIT_GRANT)) { Set<ResourcePermission> reversePathResourcePermissions = __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(connection, accessedResource, accessorResource); if (reversePathResourcePermissions.contains(ResourcePermission_INHERIT) || reversePathResourcePermissions.contains(ResourcePermission_INHERIT_GRANT) || accessorResource.equals(accessedResource)) { throw new OaccException("Granting the requested permission(s): " + requestedResourcePermissions + " will cause a cycle between: " + accessorResource + " and: " + accessedResource); } } // revoke any existing direct system permissions between the accessor and the accessed resource grantResourcePermissionSysPersister.removeResourceSysPermissions(connection, accessorResource, accessedResource); // revoke any existing direct non-system permissions between the accessor and the accessed resource grantResourcePermissionPersister.removeResourcePermissions(connection, accessorResource, accessedResource); } // add the new direct system permissions grantResourcePermissionSysPersister.addResourceSysPermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from( accessedResourceClassInternalInfo .getResourceClassId()), requestedResourcePermissions, grantorResource); // add the new direct non-system permissions grantResourcePermissionPersister.addResourcePermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from(accessedResourceClassInternalInfo .getResourceClassId()), requestedResourcePermissions, grantorResource); } private void __assertUniqueResourcePermissionsNamesForResourceClass(SQLConnection connection, Set<ResourcePermission> resourcePermissions, ResourceClassInternalInfo resourceClassInternalInfo) { final List<String> validPermissionNames = __getApplicableResourcePermissionNames(connection, resourceClassInternalInfo); final Set<String> uniquePermissionNames = new HashSet<>(resourcePermissions.size()); for (final ResourcePermission resourcePermission : resourcePermissions) { if (!validPermissionNames.contains(resourcePermission.getPermissionName())) { if (resourcePermission.isSystemPermission()) { // currently the only invalid system permissions are for unauthenticatable resource classes throw new IllegalArgumentException("Permission: " + resourcePermission.getPermissionName() + ", not valid for unauthenticatable resource"); } else { throw new IllegalArgumentException("Permission: " + resourcePermission.getPermissionName() + " is not defined for resource class: " + resourceClassInternalInfo.getResourceClassName()); } } if (uniquePermissionNames.contains(resourcePermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + resourcePermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniquePermissionNames.add(resourcePermission.getPermissionName()); } } } private Set<ResourcePermission> __subtractResourcePermissionsIfGrantableFrom(Set<ResourcePermission> candidatePermissionSet, Set<ResourcePermission> grantorPermissionSet) { Set<ResourcePermission> differenceSet = new HashSet<>(candidatePermissionSet); for (ResourcePermission candidatePermission : candidatePermissionSet) { for (ResourcePermission grantorPermission : grantorPermissionSet) { if (candidatePermission.isGrantableFrom(grantorPermission)) { differenceSet.remove(candidatePermission); break; } } } return differenceSet; } @Override public void grantResourcePermissions(Resource accessorResource, Resource accessedResource, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __grantDirectResourcePermissions(connection, accessorResource, accessedResource, resourcePermissions); } finally { __closeConnection(connection); } } @Override public void grantResourcePermissions(Resource accessorResource, Resource accessedResource, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __grantDirectResourcePermissions(connection, accessorResource, accessedResource, requestedResourcePermissions); } finally { __closeConnection(connection); } } private void __grantDirectResourcePermissions(SQLConnection connection, Resource accessorResource, Resource accessedResource, Set<ResourcePermission> requestedResourcePermissions) { final ResourceClassInternalInfo accessedResourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, accessedResource); // next ensure that the requested permissions are all in the correct resource class __assertUniqueResourcePermissionsNamesForResourceClass(connection, requestedResourcePermissions, accessedResourceClassInternalInfo); // check for authorization if (!__isSuperUserOfResource(connection, sessionResource, accessedResource)) { final Set<ResourcePermission> grantorResourcePermissions = __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, accessedResource); final Set<ResourcePermission> unauthorizedPermissions = __subtractResourcePermissionsIfGrantableFrom(requestedResourcePermissions, grantorResourcePermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "grant the following permission(s): " + unauthorizedPermissions); } } final Set<ResourcePermission> directAccessorResourcePermissions = __getDirectResourcePermissions(connection, accessorResource, accessedResource); final Set<ResourcePermission> addPermissions = new HashSet<>(requestedResourcePermissions.size()); final Set<ResourcePermission> updatePermissions = new HashSet<>(requestedResourcePermissions.size()); for (ResourcePermission requestedPermission : requestedResourcePermissions) { boolean existingPermission = false; for (ResourcePermission existingDirectPermission : directAccessorResourcePermissions) { if (requestedPermission.equalsIgnoreGrant(existingDirectPermission)) { // found a match by name - now let's see if we need to update existing or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && !requestedPermission.isGrantableFrom(existingDirectPermission)) { // requested permission has higher granting rights than the already existing direct permission, // so we need to update it updatePermissions.add(requestedPermission); } existingPermission = true; break; } } if (!existingPermission) { // couldn't find requested permission in set of already existing direct permissions, by name, so we need to add it addPermissions.add(requestedPermission); } } // if inherit permissions are about to be granted, first check for cycles if (addPermissions.contains(ResourcePermission_INHERIT) || addPermissions.contains(ResourcePermission_INHERIT_GRANT)) { Set<ResourcePermission> reversePathResourcePermissions = __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(connection, accessedResource, accessorResource); if (reversePathResourcePermissions.contains(ResourcePermission_INHERIT) || reversePathResourcePermissions.contains(ResourcePermission_INHERIT_GRANT) || accessorResource.equals(accessedResource)) { throw new OaccException("Granting the requested permission(s): " + requestedResourcePermissions + " will cause a cycle between: " + accessorResource + " and: " + accessedResource); } } // update any necessary direct system permissions between the accessor and the accessed resource grantResourcePermissionSysPersister.updateResourceSysPermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from( accessedResourceClassInternalInfo.getResourceClassId()), updatePermissions, sessionResource); // update any necessary direct non-system permissions between the accessor and the accessed resource grantResourcePermissionPersister.updateResourcePermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from( accessedResourceClassInternalInfo.getResourceClassId()), updatePermissions, sessionResource); // add the new direct system permissions grantResourcePermissionSysPersister.addResourceSysPermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from( accessedResourceClassInternalInfo.getResourceClassId()), addPermissions, sessionResource); // add the new direct non-system permissions grantResourcePermissionPersister.addResourcePermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from(accessedResourceClassInternalInfo.getResourceClassId()), addPermissions, sessionResource); } @Override public void revokeResourcePermissions(Resource accessorResource, Resource accessedResource, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __revokeDirectResourcePermissions(connection, accessorResource, accessedResource, resourcePermissions); } finally { __closeConnection(connection); } } @Override public void revokeResourcePermissions(Resource accessorResource, Resource accessedResource, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> obsoleteResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __revokeDirectResourcePermissions(connection, accessorResource, accessedResource, obsoleteResourcePermissions); } finally { __closeConnection(connection); } } private void __revokeDirectResourcePermissions(SQLConnection connection, Resource accessorResource, Resource accessedResource, Set<ResourcePermission> obsoleteResourcePermissions) { final ResourceClassInternalInfo accessedResourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, accessedResource); // next ensure that the requested permissions are unique in name __assertUniqueResourcePermissionsNamesForResourceClass(connection, obsoleteResourcePermissions, accessedResourceClassInternalInfo); // check for authorization if (!__isSuperUserOfResource(connection, sessionResource, accessedResource)) { final Set<ResourcePermission> grantorResourcePermissions = __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, accessedResource); final Set<ResourcePermission> unauthorizedPermissions = __subtractResourcePermissionsIfGrantableFrom(obsoleteResourcePermissions, grantorResourcePermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "revoke the following permission(s): " + unauthorizedPermissions); } } final Set<ResourcePermission> directAccessorResourcePermissions = __getDirectResourcePermissions(connection, accessorResource, accessedResource); final Set<ResourcePermission> removePermissions = new HashSet<>(obsoleteResourcePermissions.size()); for (ResourcePermission requestedPermission : obsoleteResourcePermissions) { for (ResourcePermission existingDirectPermission : directAccessorResourcePermissions) { if (requestedPermission.equalsIgnoreGrant(existingDirectPermission)) { // requested permission has same name and regardless of granting rights we need to remove it removePermissions.add(requestedPermission); break; } } } // update any necessary direct system permissions between the accessor and the accessed resource grantResourcePermissionSysPersister.removeResourceSysPermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from( accessedResourceClassInternalInfo .getResourceClassId()), removePermissions); // update any necessary direct non-system permissions between the accessor and the accessed resource grantResourcePermissionPersister.removeResourcePermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from( accessedResourceClassInternalInfo .getResourceClassId()), removePermissions); } @Override public Set<ResourcePermission> getResourcePermissions(Resource accessorResource, Resource accessedResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __assertQueryAuthorization(connection, accessorResource); return __getDirectResourcePermissions(connection, accessorResource, accessedResource); } finally { __closeConnection(connection); } } private Set<ResourcePermission> __getDirectResourcePermissions(SQLConnection connection, Resource accessorResource, Resource accessedResource) { Set<ResourcePermission> resourcePermissions = new HashSet<>(); // collect the system permissions that the accessor resource has to the accessed resource resourcePermissions.addAll(grantResourcePermissionSysPersister.getResourceSysPermissions(connection, accessorResource, accessedResource)); // collect the non-system permissions that the accessor has to the accessed resource resourcePermissions.addAll(grantResourcePermissionPersister.getResourcePermissions(connection, accessorResource, accessedResource)); return resourcePermissions; } @Override public Set<ResourcePermission> getEffectiveResourcePermissions(Resource accessorResource, Resource accessedResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __assertQueryAuthorization(connection, accessorResource); return __getEffectiveResourcePermissions(connection, accessorResource, accessedResource); } finally { __closeConnection(connection); } } private Set<ResourcePermission> __getEffectiveResourcePermissions(SQLConnection connection, Resource accessorResource, Resource accessedResource) { Set<ResourcePermission> resourcePermissions = new HashSet<>(); final Id<DomainId> accessedDomainId = resourcePersister.getDomainIdByResource(connection, accessedResource); final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, accessedResource); if (__isSuperUserOfDomain(connection, accessorResource, accessedDomainId)) { return __getApplicableResourcePermissions(connection, resourceClassInternalInfo); } // collect the system permissions that the accessor resource has to the accessed resource resourcePermissions.addAll(grantResourcePermissionSysPersister .getResourceSysPermissionsIncludeInherited(connection, accessorResource, accessedResource)); // collect the non-system permissions that the accessor has to the accessed resource resourcePermissions.addAll(grantResourcePermissionPersister.getResourcePermissionsIncludeInherited(connection, accessorResource, accessedResource)); final Id<ResourceClassId> accessedResourceClassId = Id.from(resourceClassInternalInfo.getResourceClassId()); // collect the global system permissions that the accessor has to the accessed resource's domain resourcePermissions .addAll(grantGlobalResourcePermissionSysPersister.getGlobalSysPermissionsIncludeInherited(connection, accessorResource, accessedResourceClassId, accessedDomainId)); // first collect the global non-system permissions that the accessor this resource has to the accessed resource's domain resourcePermissions .addAll(grantGlobalResourcePermissionPersister.getGlobalResourcePermissionsIncludeInherited(connection, accessorResource, accessedResourceClassId, accessedDomainId)); return __collapseResourcePermissions(resourcePermissions); } private Set<ResourcePermission> __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(SQLConnection connection, Resource accessorResource, Resource accessedResource) { Set<ResourcePermission> resourcePermissions = new HashSet<>(); // collect the system permissions that the accessor resource has to the accessed resource resourcePermissions.addAll(grantResourcePermissionSysPersister .getResourceSysPermissionsIncludeInherited(connection, accessorResource, accessedResource)); // collect the non-system permissions that the accessor has to the accessed resource resourcePermissions.addAll(grantResourcePermissionPersister.getResourcePermissionsIncludeInherited(connection, accessorResource, accessedResource)); final Id<DomainId> accessedDomainId = resourcePersister.getDomainIdByResource(connection, accessedResource); final Id<ResourceClassId> accessedResourceClassId = Id.from(resourceClassPersister .getResourceClassInfoByResourceId(connection, accessedResource) .getResourceClassId()); // collect the global system permissions that the accessor has to the accessed resource's domain resourcePermissions .addAll(grantGlobalResourcePermissionSysPersister.getGlobalSysPermissionsIncludeInherited(connection, accessorResource, accessedResourceClassId, accessedDomainId)); // first collect the global non-system permissions that the accessor this resource has to the accessed resource's domain resourcePermissions .addAll(grantGlobalResourcePermissionPersister.getGlobalResourcePermissionsIncludeInherited(connection, accessorResource, accessedResourceClassId, accessedDomainId)); return __collapseResourcePermissions(resourcePermissions); } @Override public void setGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); __setDirectGlobalPermissions(connection, accessorResource, resourceClassName, domainName, resourcePermissions); } finally { __closeConnection(connection); } } private void __setDirectGlobalPermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> requestedResourcePermissions) { // verify that resource class is defined final Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfo(connection, resourceClassName); // verify the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // next ensure that the requested permissions are all in the correct resource class __assertUniqueGlobalResourcePermissionNamesForResourceClass(connection, requestedResourcePermissions, resourceClassInternalInfo); if (!__isSuperUserOfDomain(connection, sessionResource, domainName)) { // check if the grantor (=session resource) is authorized to grant the requested permissions final Set<ResourcePermission> grantorPermissions = __getEffectiveGlobalResourcePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); final Set<ResourcePermission> directAccessorPermissions = __getDirectGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId); final Set<ResourcePermission> requestedAddPermissions = __subtract(requestedResourcePermissions, directAccessorPermissions); if (!requestedAddPermissions.isEmpty()) { final Set<ResourcePermission> unauthorizedAddPermissions = __subtractResourcePermissionsIfGrantableFrom(requestedAddPermissions, grantorPermissions); if (unauthorizedAddPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "add the following global permission(s): " + unauthorizedAddPermissions); } } final Set<ResourcePermission> requestedRemovePermissions = __subtract(directAccessorPermissions, requestedResourcePermissions); if (!requestedRemovePermissions.isEmpty()) { final Set<ResourcePermission> unauthorizedRemovePermissions = __subtractResourcePermissionsIfGrantableFrom(requestedRemovePermissions, grantorPermissions); if (unauthorizedRemovePermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "remove the following global permission(s): " + unauthorizedRemovePermissions); } } } // revoke any existing system permissions this accessor has to this domain + resource class grantGlobalResourcePermissionSysPersister.removeGlobalSysPermissions(connection, accessorResource, resourceClassId, domainId); // revoke any existing non-system permissions that this grantor gave this accessor to this domain to the resource class grantGlobalResourcePermissionPersister.removeGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId); // add the new system permissions grantGlobalResourcePermissionSysPersister.addGlobalSysPermissions(connection, accessorResource, resourceClassId, domainId, requestedResourcePermissions, sessionResource); // add the new non-system permissions grantGlobalResourcePermissionPersister.addGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId, requestedResourcePermissions, sessionResource); } private Set<ResourcePermission> __getDirectGlobalResourcePermissions(SQLConnection connection, Resource accessorResource, Id<ResourceClassId> resourceClassId, Id<DomainId> domainId) { Set<ResourcePermission> resourcePermissions = new HashSet<>(); // collect the global system permissions that the accessor resource has to the accessed resource class & domain directly resourcePermissions.addAll(grantGlobalResourcePermissionSysPersister.getGlobalSysPermissions(connection, accessorResource, resourceClassId, domainId)); // collect the global non-system permissions that the accessor has to the accessed resource class & domain directly resourcePermissions.addAll(grantGlobalResourcePermissionPersister.getGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId)); return resourcePermissions; } private void __assertUniqueGlobalResourcePermissionNamesForResourceClass(SQLConnection connection, Set<ResourcePermission> requestedResourcePermissions, ResourceClassInternalInfo resourceClassInternalInfo) { final List<String> validPermissionNames = __getApplicableResourcePermissionNames(connection, resourceClassInternalInfo); final HashSet<String> uniquePermissionNames = new HashSet<>(requestedResourcePermissions.size()); for (ResourcePermission resourcePermission : requestedResourcePermissions) { if (resourcePermission.isSystemPermission() && ResourcePermission_INHERIT.equals(resourcePermission)) { // we prohibit granting the system INHERIT permission, since cycle checking may be prohibitively compute intensive throw new IllegalArgumentException("Permission: " + String.valueOf(resourcePermission) + ", not valid in this context"); } if (!validPermissionNames.contains(resourcePermission.getPermissionName())) { if (resourcePermission.isSystemPermission()) { // currently the only invalid system permissions are for unauthenticatable resource classes throw new IllegalArgumentException("Permission " + resourcePermission.getPermissionName() + " not valid for unauthenticatable resource of class " + resourceClassInternalInfo.getResourceClassName()); } else { throw new IllegalArgumentException("Permission: " + resourcePermission.getPermissionName() + " is not defined for resource class: " + resourceClassInternalInfo.getResourceClassName()); } } if (uniquePermissionNames.contains(resourcePermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + resourcePermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniquePermissionNames.add(resourcePermission.getPermissionName()); } } } @Override public void grantGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); __grantDirectGlobalPermissions(connection, accessorResource, resourceClassName, domainName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public void grantGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); __grantDirectGlobalPermissions(connection, accessorResource, resourceClassName, domainName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private void __grantDirectGlobalPermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> requestedResourcePermissions) { // verify that resource class is defined final Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfo(connection, resourceClassName); // verify the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // next ensure that the requested permissions are all in the correct resource class __assertUniqueGlobalResourcePermissionNamesForResourceClass(connection, requestedResourcePermissions, resourceClassInternalInfo); // check for authorization if (!__isSuperUserOfDomain(connection, sessionResource, domainName)) { final Set<ResourcePermission> grantorPermissions = __getEffectiveGlobalResourcePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); final Set<ResourcePermission> unauthorizedPermissions = __subtractResourcePermissionsIfGrantableFrom(requestedResourcePermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "grant the following global permission(s): " + unauthorizedPermissions); } } final Set<ResourcePermission> directAccessorPermissions = __getDirectGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId); final Set<ResourcePermission> addPermissions = new HashSet<>(requestedResourcePermissions.size()); final Set<ResourcePermission> updatePermissions = new HashSet<>(requestedResourcePermissions.size()); for (ResourcePermission requestedPermission : requestedResourcePermissions) { boolean existingPermission = false; for (ResourcePermission existingDirectPermission : directAccessorPermissions) { if (requestedPermission.equalsIgnoreGrant(existingDirectPermission)) { // found a match by name - now let's check if we need to update existing or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && !requestedPermission.isGrantableFrom(existingDirectPermission)) { // requested permission has higher granting rights than the already existing direct permission, // so we need to update it updatePermissions.add(requestedPermission); } existingPermission = true; break; } } if (!existingPermission) { // couldn't find requested permission in set of already existing direct permissions, by name, so we need to add it addPermissions.add(requestedPermission); } } // update any necessary direct system permissions between the accessor and the accessed resource grantGlobalResourcePermissionSysPersister.updateGlobalSysPermissions(connection, accessorResource, resourceClassId, domainId, updatePermissions, sessionResource); // update any necessary direct non-system permissions between the accessor and the accessed resource grantGlobalResourcePermissionPersister.updateGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId, updatePermissions, sessionResource); // add the new system permissions grantGlobalResourcePermissionSysPersister.addGlobalSysPermissions(connection, accessorResource, resourceClassId, domainId, addPermissions, sessionResource); // add the new non-system permissions grantGlobalResourcePermissionPersister.addGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId, addPermissions, sessionResource); } @Override public void revokeGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); __revokeDirectGlobalPermissions(connection, accessorResource, resourceClassName, domainName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public void revokeGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); __revokeDirectGlobalPermissions(connection, accessorResource, resourceClassName, domainName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private void __revokeDirectGlobalPermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> requestedResourcePermissions) { // verify that resource class is defined final ResourceClassInternalInfo resourceClassInfo = __getResourceClassInternalInfo(connection, resourceClassName); final Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); // next ensure that the requested permissions are valid and unique in name __assertUniqueResourcePermissionsNamesForResourceClass(connection, requestedResourcePermissions, resourceClassInfo); // verify the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // check for authorization if (!__isSuperUserOfDomain(connection, sessionResource, domainName)) { final Set<ResourcePermission> grantorPermissions = __getEffectiveGlobalResourcePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); final Set<ResourcePermission> unauthorizedPermissions = __subtractResourcePermissionsIfGrantableFrom(requestedResourcePermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "revoke the following global permission(s): " + unauthorizedPermissions); } } final Set<ResourcePermission> directAccessorPermissions = __getDirectGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId); final Set<ResourcePermission> removePermissions = new HashSet<>(requestedResourcePermissions.size()); for (ResourcePermission requestedPermission : requestedResourcePermissions) { for (ResourcePermission existingDirectPermission : directAccessorPermissions) { if (requestedPermission.equalsIgnoreGrant(existingDirectPermission)) { // requested permission has same name and regardless of granting rights we need to remove it removePermissions.add(requestedPermission); break; } } } // remove any necessary direct system permissions grantGlobalResourcePermissionSysPersister.removeGlobalSysPermissions(connection, accessorResource, resourceClassId, domainId, removePermissions); // remove any necessary direct non-system permissions grantGlobalResourcePermissionPersister.removeGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId, removePermissions); } @Override public Set<ResourcePermission> getGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __getDirectGlobalResourcePermissions(connection, accessorResource, resourceClassName, domainName); } finally { __closeConnection(connection); } } private Set<ResourcePermission> __getDirectGlobalResourcePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName) { // verify that resource class is defined final Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // verify the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } return __getDirectGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId); } @Override public Set<ResourcePermission> getEffectiveGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __getEffectiveGlobalResourcePermissions(connection, accessorResource, resourceClassName, domainName); } finally { __closeConnection(connection); } } private Set<ResourcePermission> __getEffectiveGlobalResourcePermissionsIgnoringSuperUserPrivileges(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName) { // verify that resource class is defined final Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // verify the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } Set<ResourcePermission> resourcePermissions = new HashSet<>(); // first collect the system permissions that the accessor has to the accessed resource resourcePermissions.addAll(grantGlobalResourcePermissionSysPersister .getGlobalSysPermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); // first collect the non-system permissions that the accessor this resource has to the accessor resource resourcePermissions.addAll(grantGlobalResourcePermissionPersister .getGlobalResourcePermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); return __collapseResourcePermissions(resourcePermissions); } private Set<ResourcePermission> __getEffectiveGlobalResourcePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName) { // verify that resource class is defined final ResourceClassInternalInfo resourceClassInternalInfo = __getResourceClassInternalInfo(connection, resourceClassName); // verify the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } if (__isSuperUserOfDomain(connection, accessorResource, domainName)) { return __getApplicableResourcePermissions(connection, resourceClassInternalInfo); } final Id<ResourceClassId> resourceClassId = Id.from(resourceClassInternalInfo.getResourceClassId()); Set<ResourcePermission> resourcePermissions = new HashSet<>(); // first collect the system permissions that the accessor has to the accessed resource resourcePermissions.addAll(grantGlobalResourcePermissionSysPersister .getGlobalSysPermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); // first collect the non-system permissions that the accessor this resource has to the accessor resource resourcePermissions.addAll(grantGlobalResourcePermissionPersister .getGlobalResourcePermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); return __collapseResourcePermissions(resourcePermissions); } private Set<ResourcePermission> __getApplicableResourcePermissions(SQLConnection connection, ResourceClassInternalInfo resourceClassInternalInfo) { final List<String> resourcePermissionNames = __getApplicableResourcePermissionNames(connection, resourceClassInternalInfo); Set<ResourcePermission> superResourcePermissions = new HashSet<>(resourcePermissionNames.size()); for (String permissionName : resourcePermissionNames) { superResourcePermissions.add(ResourcePermissions.getInstance(permissionName, true)); } return superResourcePermissions; } private Set<ResourceCreatePermission> __getApplicableResourceCreatePermissions(SQLConnection connection, ResourceClassInternalInfo resourceClassInternalInfo) { final List<String> resourcePermissionNames = __getApplicableResourcePermissionNames(connection, resourceClassInternalInfo); Set<ResourceCreatePermission> superResourceCreatePermissions = new HashSet<>(resourcePermissionNames.size()+1); superResourceCreatePermissions.add(ResourceCreatePermissions.getInstance(ResourceCreatePermissions.CREATE, true)); for (String permissionName : resourcePermissionNames) { superResourceCreatePermissions.add(ResourceCreatePermissions .getInstance(ResourcePermissions .getInstance(permissionName, true), true)); } return superResourceCreatePermissions; } private Set<ResourcePermission> __collapseResourcePermissions(Set<ResourcePermission> resourcePermissions) { final Set<ResourcePermission> collapsedPermissions = new HashSet<>(resourcePermissions); for (ResourcePermission permission : resourcePermissions) { for (ResourcePermission grantEquivalentPermission : resourcePermissions) { if (permission.isGrantableFrom(grantEquivalentPermission) && !permission.equals(grantEquivalentPermission)) { collapsedPermissions.remove(permission); break; } } } return collapsedPermissions; } @Override public Map<String, Map<String, Set<ResourcePermission>>> getGlobalResourcePermissionsMap(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getDirectGlobalResourcePermissionsMap(connection, accessorResource); } finally { __closeConnection(connection); } } private Map<String, Map<String, Set<ResourcePermission>>> __getDirectGlobalResourcePermissionsMap(SQLConnection connection, Resource accessorResource) { final Map<String, Map<String, Set<ResourcePermission>>> globalALLPermissionsMap = new HashMap<>(); // collect the system permissions that the accessor has and add it into the globalALLPermissionsMap globalALLPermissionsMap .putAll(grantGlobalResourcePermissionSysPersister.getGlobalSysPermissions(connection, accessorResource)); // next collect the non-system permissions that the accessor has and add it into the globalALLPermissionsMap __mergeSourcePermissionsMapIntoTargetPermissionsMap(grantGlobalResourcePermissionPersister .getGlobalResourcePermissions(connection, accessorResource), globalALLPermissionsMap); return __collapseResourcePermissions(globalALLPermissionsMap); } @Override public Map<String, Map<String, Set<ResourcePermission>>> getEffectiveGlobalResourcePermissionsMap(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getEffectiveGlobalResourcePermissionsMap(connection, accessorResource); } finally { __closeConnection(connection); } } private Map<String, Map<String, Set<ResourcePermission>>> __getEffectiveGlobalResourcePermissionsMap(SQLConnection connection, Resource accessorResource) { final Map<String, Map<String, Set<ResourcePermission>>> globalALLPermissionsMap = new HashMap<>(); // collect the system permissions that the accessor has and add it into the globalALLPermissionsMap globalALLPermissionsMap .putAll(grantGlobalResourcePermissionSysPersister .getGlobalSysPermissionsIncludeInherited(connection, accessorResource)); // next collect the non-system permissions that the accessor has and add it into the globalALLPermissionsMap __mergeSourcePermissionsMapIntoTargetPermissionsMap( grantGlobalResourcePermissionPersister.getGlobalResourcePermissionsIncludeInherited(connection, accessorResource), globalALLPermissionsMap); // finally, collect all applicable permissions when accessor has super-user privileges to any domain // and add them into the globalALLPermissionsMap final Map<String, Map<String, Set<ResourcePermission>>> superGlobalResourcePermissionsMap = new HashMap<>(); Map<String, Set<ResourcePermission>> superResourcePermissionsMap = null; final Map<String, Set<DomainPermission>> effectiveDomainPermissionsMap = __getEffectiveDomainPermissionsMap(connection, accessorResource); for (String domainName : effectiveDomainPermissionsMap.keySet()) { final Set<DomainPermission> effectiveDomainPermissions = effectiveDomainPermissionsMap.get(domainName); if (effectiveDomainPermissions.contains(DomainPermission_SUPER_USER) || effectiveDomainPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { if (superResourcePermissionsMap == null) { // lazy-construct super-user-privileged resource-permissions map by resource classes final List<String> resourceClassNames = resourceClassPersister.getResourceClassNames(connection); superResourcePermissionsMap = new HashMap<>(resourceClassNames.size()); for (String resourceClassName : resourceClassNames) { final Set<ResourcePermission> applicableResourcePermissions = __getApplicableResourcePermissions(connection, __getResourceClassInternalInfo(connection, resourceClassName)); superResourcePermissionsMap.put(resourceClassName, applicableResourcePermissions); } } superGlobalResourcePermissionsMap.put(domainName, superResourcePermissionsMap); } } __mergeSourcePermissionsMapIntoTargetPermissionsMap(superGlobalResourcePermissionsMap, globalALLPermissionsMap); return __collapseResourcePermissions(globalALLPermissionsMap); } private void __mergeSourcePermissionsMapIntoTargetPermissionsMap(Map<String, Map<String, Set<ResourcePermission>>> sourcePermissionsMap, Map<String, Map<String, Set<ResourcePermission>>> targetPermissionsMap) { for (String domainName : sourcePermissionsMap.keySet()) { Map<String, Set<ResourcePermission>> targetPermsForDomainMap; // does the target map have domain? if ((targetPermsForDomainMap = targetPermissionsMap.get(domainName)) == null) { // no, add the domain targetPermissionsMap.put(domainName, targetPermsForDomainMap = new HashMap<>()); } for (String resourceClassName : sourcePermissionsMap.get(domainName).keySet()) { Set<ResourcePermission> targetPermsForClassSet; // does the target map have the resource class? if ((targetPermsForClassSet = targetPermsForDomainMap.get(resourceClassName)) == null) { // no, add the resource class targetPermsForDomainMap.put(resourceClassName, targetPermsForClassSet = new HashSet<>()); } // get the source permissions for the domain + resource class final Set<ResourcePermission> sourcePermissionsForClassSet = sourcePermissionsMap.get(domainName).get(resourceClassName); // add the source permissions above to the target for the respective domain + resource class targetPermsForClassSet.addAll(sourcePermissionsForClassSet); } } } private Map<String, Map<String, Set<ResourcePermission>>> __collapseResourcePermissions(Map<String, Map<String, Set<ResourcePermission>>> resourcePermissionsMap) { for (String domainName : resourcePermissionsMap.keySet()) { final Map<String, Set<ResourcePermission>> createPermissionsByDomainMap = resourcePermissionsMap.get(domainName); for (String resourceClassName : createPermissionsByDomainMap.keySet()) { final Set<ResourcePermission> createPermissionsByResourceClassMap = createPermissionsByDomainMap.get(resourceClassName); createPermissionsByDomainMap.put(resourceClassName, __collapseResourcePermissions(createPermissionsByResourceClassMap)); } } return resourcePermissionsMap; } @Override public String getDomainNameByResource(Resource resource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(resource); if (sessionResource.equals(resource)) { return sessionResourceDomainName; } else { try { connection = __getConnection(); resource = __resolveResource(connection, resource); return domainPersister.getResourceDomainNameByResourceId(connection, resource); } finally { __closeConnection(connection); } } } @Override public Set<String> getDomainDescendants(String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertDomainSpecified(domainName); try { connection = __getConnection(); domainName = domainName.trim(); return domainPersister.getResourceDomainNameDescendants(connection, domainName); } finally { __closeConnection(connection); } } @Override public ResourceClassInfo getResourceClassInfo(String resourceClassName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceClassSpecified(resourceClassName); try { connection = __getConnection(); final ResourceClassInternalInfo resourceClassInternalInfo = __getResourceClassInternalInfo(connection, resourceClassName); return new ResourceClassInfo(resourceClassInternalInfo.getResourceClassName(), resourceClassInternalInfo.isAuthenticatable(), resourceClassInternalInfo.isUnauthenticatedCreateAllowed()); } finally { __closeConnection(connection); } } @Override public ResourceClassInfo getResourceClassInfoByResource(Resource resource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(resource); try { connection = __getConnection(); resource = __resolveResource(connection, resource); final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, resource); return new ResourceClassInfo(resourceClassInternalInfo.getResourceClassName(), resourceClassInternalInfo.isAuthenticatable(), resourceClassInternalInfo.isUnauthenticatedCreateAllowed()); } finally { __closeConnection(connection); } } @Override public Resource getAuthenticatedResource() { __assertAuthenticated(); return authenticatedResource; } @Override public Resource getSessionResource() { __assertAuthenticated(); return sessionResource; } @Override public void assertPostCreateDomainPermissions(Resource accessorResource, Set<DomainPermission> domainPermissions) { if (!hasPostCreateDomainPermissions(accessorResource, domainPermissions)) { throw NotAuthorizedException.newInstanceForPostCreateDomainPermissions(accessorResource, domainPermissions); } } @Override public void assertPostCreateDomainPermissions(Resource accessorResource, DomainPermission domainPermission, DomainPermission... domainPermissions) { if (!hasPostCreateDomainPermissions(accessorResource, domainPermission, domainPermissions)) { throw NotAuthorizedException.newInstanceForPostCreateDomainPermissions(accessorResource, domainPermission, domainPermissions); } } @Override public boolean hasPostCreateDomainPermissions(Resource accessorResource, Set<DomainPermission> domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionsSpecified(domainPermissions); __assertPermissionsSetNotEmpty(domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); return __hasPostCreateDomainPermissions(connection, accessorResource, domainPermissions); } finally { __closeConnection(connection); } } @Override public boolean hasPostCreateDomainPermissions(Resource accessorResource, DomainPermission domainPermission, DomainPermission... domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionSpecified(domainPermission); __assertVarargPermissionsSpecified(domainPermissions); final Set<DomainPermission> requestedDomainPermissions = __getSetWithoutNullsOrDuplicates(domainPermission, domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); return __hasPostCreateDomainPermissions(connection, accessorResource, requestedDomainPermissions); } finally { __closeConnection(connection); } } private boolean __hasPostCreateDomainPermissions(SQLConnection connection, Resource accessorResource, Set<DomainPermission> requestedDomainPermissions) { __assertQueryAuthorization(connection, accessorResource); boolean hasPermission = false; // first check if the accessor even has *CREATE permission for domains final Set<DomainCreatePermission> effectiveDomainCreatePermissions = __getEffectiveDomainCreatePermissions(connection, accessorResource); for (DomainCreatePermission domainCreatePermission : effectiveDomainCreatePermissions) { if (domainCreatePermission.isSystemPermission() && DomainCreatePermissions.CREATE.equals(domainCreatePermission.getPermissionName())) { hasPermission = true; break; } } if (hasPermission) { // check if the requested permissions are permissible from the set of effective post-create permissions final Set<DomainPermission> postCreateDomainPermissions = __getPostCreateDomainPermissions(effectiveDomainCreatePermissions); for (DomainPermission requestedDomainPermission : requestedDomainPermissions) { if (!__isPermissible(requestedDomainPermission, postCreateDomainPermissions)) { hasPermission = false; break; } } if (!hasPermission) { hasPermission = postCreateDomainPermissions.contains(DomainPermission_SUPER_USER) || postCreateDomainPermissions.contains(DomainPermission_SUPER_USER_GRANT); } } return hasPermission; } private boolean __isPermissible(DomainPermission queriedDomainPermission, Set<DomainPermission> domainPermissions) { for (DomainPermission domainPermission : domainPermissions) { if (queriedDomainPermission.equals(domainPermission) || queriedDomainPermission.isGrantableFrom(domainPermission)) { return true; } } return false; } @Override public void assertDomainPermissions(Resource accessorResource, String domainName, Set<DomainPermission> domainPermissions) { if (!hasDomainPermissions(accessorResource, domainName, domainPermissions)) { throw NotAuthorizedException.newInstanceForDomainPermissions(accessorResource, domainName, domainPermissions); } } @Override public void assertDomainPermissions(Resource accessorResource, String domainName, DomainPermission domainPermission, DomainPermission... domainPermissions) { if (!hasDomainPermissions(accessorResource, domainName, domainPermission, domainPermissions)) { throw NotAuthorizedException.newInstanceForDomainPermissions(accessorResource, domainName, domainPermission, domainPermissions); } } @Override public boolean hasDomainPermissions(Resource accessorResource, String domainName, Set<DomainPermission> domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionsSpecified(domainPermissions); __assertPermissionsSetNotEmpty(domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); return __hasDomainPermissions(connection, accessorResource, domainName, domainPermissions); } finally { __closeConnection(connection); } } @Override public boolean hasDomainPermissions(Resource accessorResource, String domainName, DomainPermission domainPermission, DomainPermission... domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionSpecified(domainPermission); __assertVarargPermissionsSpecified(domainPermissions); final Set<DomainPermission> requestedDomainPermissions = __getSetWithoutNullsOrDuplicates(domainPermission, domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); return __hasDomainPermissions(connection, accessorResource, domainName, requestedDomainPermissions); } finally { __closeConnection(connection); } } private boolean __hasDomainPermissions(SQLConnection connection, Resource accessorResource, String domainName, Set<DomainPermission> requestedDomainPermissions) { __assertQueryAuthorization(connection, accessorResource); // first check for effective permissions final Set<DomainPermission> effectiveDomainPermissions = __getEffectiveDomainPermissions(connection, accessorResource, domainName); boolean hasPermission = true; for (DomainPermission domainPermission : requestedDomainPermissions) { if (!__isPermissible(domainPermission, effectiveDomainPermissions)) { hasPermission = false; break; } } // next check super-user permissions to the domain of the accessed resource if (!hasPermission) { hasPermission = __isSuperUserOfDomain(connection, accessorResource, domainName); } return hasPermission; } @Override public void assertDomainCreatePermissions(Resource accessorResource, Set<DomainCreatePermission> domainCreatePermissions) { if (!hasDomainCreatePermissions(accessorResource, domainCreatePermissions)) { throw NotAuthorizedException.newInstanceForDomainCreatePermissions(accessorResource, domainCreatePermissions); } } @Override public void assertDomainCreatePermissions(Resource accessorResource, DomainCreatePermission domainCreatePermission, DomainCreatePermission... domainCreatePermissions) { if (!hasDomainCreatePermissions(accessorResource, domainCreatePermission, domainCreatePermissions)) { throw NotAuthorizedException.newInstanceForDomainCreatePermissions(accessorResource, domainCreatePermission, domainCreatePermissions); } } @Override public boolean hasDomainCreatePermissions(Resource accessorResource, Set<DomainCreatePermission> domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionsSpecified(domainCreatePermissions); __assertPermissionsSetNotEmpty(domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); return __hasDomainCreatePermissions(connection, accessorResource, domainCreatePermissions); } finally { __closeConnection(connection); } } @Override public boolean hasDomainCreatePermissions(Resource accessorResource, DomainCreatePermission domainCreatePermission, DomainCreatePermission... domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionSpecified(domainCreatePermission); __assertVarargPermissionsSpecified(domainCreatePermissions); final Set<DomainCreatePermission> requestedDomainCreatePermissions = __getSetWithoutNullsOrDuplicates(domainCreatePermission, domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); return __hasDomainCreatePermissions(connection, accessorResource, requestedDomainCreatePermissions); } finally { __closeConnection(connection); } } private boolean __hasDomainCreatePermissions(SQLConnection connection, Resource accessorResource, Set<DomainCreatePermission> queriedDomainCreatePermissions) { __assertQueryAuthorization(connection, accessorResource); final Set<DomainCreatePermission> effectiveDomainCreatePermissions = __getEffectiveDomainCreatePermissions(connection, accessorResource); for (DomainCreatePermission domainCreatePermission : queriedDomainCreatePermissions) { if (!__isPermissible(domainCreatePermission, effectiveDomainCreatePermissions)) { return false; } } return true; } private boolean __isPermissible(DomainCreatePermission queriedDomainCreatePermission, Set<DomainCreatePermission> domainCreatePermissions) { for (DomainCreatePermission domainCreatePermission : domainCreatePermissions) { if (queriedDomainCreatePermission.equals(domainCreatePermission) || queriedDomainCreatePermission.isGrantableFrom(domainCreatePermission)) { return true; } } return false; } @Override public void assertPostCreateResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { if (!hasPostCreateResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermissions)) { throw NotAuthorizedException.newInstanceForPostCreateResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermissions); } } @Override public void assertPostCreateResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { if (!hasPostCreateResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermission, resourcePermissions)) { throw NotAuthorizedException.newInstanceForPostCreateResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermission, resourcePermissions); } } @Override public boolean hasPostCreateResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __hasPostCreateResourcePermissions(connection, accessorResource, resourceClassName, domainName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public boolean hasPostCreateResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __hasPostCreateResourcePermissions(connection, accessorResource, resourceClassName, domainName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private boolean __hasPostCreateResourcePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> requestedResourcePermissions) { __assertPermissionsValid(connection, resourceClassName, requestedResourcePermissions); __assertQueryAuthorization(connection, accessorResource); boolean hasPermission = false; // first check if the accessor even has *CREATE permission for the resource class and domain final Set<ResourceCreatePermission> effectiveResourceCreatePermissions = __getEffectiveResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName); for (ResourceCreatePermission resourceCreatePermission : effectiveResourceCreatePermissions) { if (resourceCreatePermission.isSystemPermission() && ResourceCreatePermissions.CREATE.equals(resourceCreatePermission.getPermissionName())) { hasPermission = true; break; } } if (hasPermission) { // check if the requested permission is permissible from the set of effective post-create permissions final Set<ResourcePermission> postCreateResourcePermissions = __getPostCreateResourcePermissions(effectiveResourceCreatePermissions); final Set<ResourcePermission> nonPostCreateResourcePermissions = new HashSet<>(requestedResourcePermissions.size()); for (ResourcePermission requestedResourcePermission : requestedResourcePermissions) { if (!__isPermissible(requestedResourcePermission, postCreateResourcePermissions)) { nonPostCreateResourcePermissions.add(requestedResourcePermission); } } if (!nonPostCreateResourcePermissions.isEmpty()) { // check if the requested permission is permissible from the set of effective global permissions final Set<ResourcePermission> globalResourcePermissions = __getEffectiveGlobalResourcePermissions(connection, accessorResource, resourceClassName, domainName); for (ResourcePermission requestedResourcePermission : nonPostCreateResourcePermissions) { if (!__isPermissible(requestedResourcePermission, globalResourcePermissions)) { hasPermission = false; break; } } } } if (!hasPermission) { hasPermission = __isSuperUserOfDomain(connection, accessorResource, domainName); } return hasPermission; } private boolean __isPermissible(ResourcePermission queriedResourcePermission, Set<ResourcePermission> resourcePermissions) { for (ResourcePermission resourcePermission : resourcePermissions) { if (queriedResourcePermission.equals(resourcePermission) || queriedResourcePermission.isGrantableFrom(resourcePermission)) { return true; } } return false; } @Override public void assertGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { if (!hasGlobalResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermissions)) { throw NotAuthorizedException.newInstanceForGlobalResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermissions); } } @Override public void assertGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { if (!hasGlobalResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermission, resourcePermissions)) { throw NotAuthorizedException.newInstanceForGlobalResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermission, resourcePermissions); } } @Override public boolean hasGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __hasGlobalResourcePermissions(connection, accessorResource, resourceClassName, domainName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public boolean hasGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __hasGlobalResourcePermissions(connection, accessorResource, resourceClassName, domainName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private boolean __hasGlobalResourcePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> requestedResourcePermissions) { __assertPermissionsValid(connection, resourceClassName, requestedResourcePermissions); __assertQueryAuthorization(connection, accessorResource); final Set<ResourcePermission> globalResourcePermissions = __getEffectiveGlobalResourcePermissions(connection, accessorResource, resourceClassName, domainName); boolean hasPermission = true; for (ResourcePermission requestedResourcePermission : requestedResourcePermissions) { if (!__isPermissible(requestedResourcePermission, globalResourcePermissions)) { hasPermission = false; break; } } if (!hasPermission) { hasPermission = __isSuperUserOfDomain(connection, accessorResource, domainName); } return hasPermission; } @Override public void assertResourcePermissions(Resource accessorResource, Resource accessedResource, Set<ResourcePermission> resourcePermissions) { if (!hasResourcePermissions(accessorResource, accessedResource, resourcePermissions)) { throw NotAuthorizedException.newInstanceForResourcePermissions(accessorResource, accessedResource, resourcePermissions); } } @Override public void assertResourcePermissions(Resource accessorResource, Resource accessedResource, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { if (!hasResourcePermissions(accessorResource, accessedResource, resourcePermission, resourcePermissions)) { throw NotAuthorizedException.newInstanceForResourcePermissions(accessorResource, accessedResource, resourcePermission, resourcePermissions); } } @Override public boolean hasResourcePermissions(Resource accessorResource, Resource accessedResource, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); return __hasResourcePermissions(connection, accessorResource, accessedResource, resourcePermissions); } finally { __closeConnection(connection); } } @Override public boolean hasResourcePermissions(Resource accessorResource, Resource accessedResource, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); return __hasResourcePermissions(connection, accessorResource, accessedResource, requestedResourcePermissions); } finally { __closeConnection(connection); } } private boolean __hasResourcePermissions(SQLConnection connection, Resource accessorResource, Resource accessedResource, Set<ResourcePermission> requestedResourcePermissions) { __assertQueryAuthorization(connection, accessorResource); final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, accessedResource); __assertPermissionsValid(connection, resourceClassInternalInfo.getResourceClassName(), requestedResourcePermissions); // first check for effective permissions final Set<ResourcePermission> effectiveResourcePermissions = __getEffectiveResourcePermissions(connection, accessorResource, accessedResource); boolean hasPermission = true; for (ResourcePermission requestedResourcePermission : requestedResourcePermissions) { if (!__isPermissible(requestedResourcePermission, effectiveResourcePermissions)) { hasPermission = false; break; } } // next check super-user permissions to the domain of the accessed resource if (!hasPermission) { final String domainName = domainPersister.getResourceDomainNameByResourceId(connection, accessedResource); hasPermission = __isSuperUserOfDomain(connection, accessorResource, domainName); } return hasPermission; } @Override public void assertResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> resourceCreatePermissions) { if (!hasResourceCreatePermissions(accessorResource, resourceClassName, domainName, resourceCreatePermissions)) { throw NotAuthorizedException.newInstanceForResourceCreatePermissions(accessorResource, resourceCreatePermissions); } } @Override public void assertResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourceCreatePermission resourceCreatePermission, ResourceCreatePermission... resourceCreatePermissions) { if (!hasResourceCreatePermissions(accessorResource, resourceClassName, domainName, resourceCreatePermission, resourceCreatePermissions)) { throw NotAuthorizedException.newInstanceForResourceCreatePermissions(accessorResource, resourceCreatePermission, resourceCreatePermissions); } } @Override public boolean hasResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourceCreatePermissions); __assertPermissionsSetNotEmpty(resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __hasResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, resourceCreatePermissions); } finally { __closeConnection(connection); } } @Override public boolean hasResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourceCreatePermission resourceCreatePermission, ResourceCreatePermission... resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourceCreatePermission); __assertVarargPermissionsSpecified(resourceCreatePermissions); final Set<ResourceCreatePermission> requestedResourceCreatePermissions = __getSetWithoutNullsOrDuplicates(resourceCreatePermission, resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __hasResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, requestedResourceCreatePermissions); } finally { __closeConnection(connection); } } private boolean __hasResourceCreatePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> requestedResourceCreatePermissions) { __assertPermissionsValid(connection, resourceClassName, __getPostCreateResourcePermissions(requestedResourceCreatePermissions)); __assertQueryAuthorization(connection, accessorResource); final Set<ResourceCreatePermission> effectiveResourceCreatePermissions = __getEffectiveResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName); boolean hasPermission = true; // first check for effective create permissions for (ResourceCreatePermission resourceCreatePermission : requestedResourceCreatePermissions) { if (!__isPermissible(resourceCreatePermission, effectiveResourceCreatePermissions)) { hasPermission = false; break; } } // next check super-user permissions to the domain if (!hasPermission) { hasPermission = __isSuperUserOfDomain(connection, accessorResource, domainName); } return hasPermission; } private boolean __isPermissible(ResourceCreatePermission queriedResourceCreatePermission, Set<ResourceCreatePermission> resourceCreatePermissions) { for (ResourceCreatePermission resourceCreatePermission : resourceCreatePermissions) { if (queriedResourceCreatePermission.equals(resourceCreatePermission) || queriedResourceCreatePermission.isGrantableFrom(resourceCreatePermission)) { return true; } } return false; } @Override public Set<Resource> getResourcesByResourcePermissions(Resource accessorResource, String resourceClassName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); return __getResourcesByPermissions(connection, accessorResource, resourceClassName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public Set<Resource> getResourcesByResourcePermissions(Resource accessorResource, String resourceClassName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); return __getResourcesByPermissions(connection, accessorResource, resourceClassName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private Set<Resource> __getResourcesByPermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, Set<ResourcePermission> requestedResourcePermissions) { // first verify that resource class is defined Id<ResourceClassId> resourceClassId; Id<ResourcePermissionId> permissionId; resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // verify permissions are valid for resource class __assertPermissionsValid(connection, resourceClassName, requestedResourcePermissions); Set<Resource> resources = new HashSet<>(); for (ResourcePermission resourcePermission : requestedResourcePermissions) { Set<Resource> currentResources = new HashSet<>(); if (resourcePermission.isSystemPermission()) { // get the list of objects of the specified type that the session has access to via direct permissions currentResources.addAll(grantResourcePermissionSysPersister .getResourcesByResourceSysPermission(connection, accessorResource, resourceClassId, resourcePermission)); // get the list of objects of the specified type that the session has access to via global permissions currentResources.addAll(grantGlobalResourcePermissionSysPersister .getResourcesByGlobalSysPermission(connection, accessorResource, resourceClassId, resourcePermission)); } else { // check if the non-system permission name is valid permissionId = resourceClassPermissionPersister.getResourceClassPermissionId(connection, resourceClassId, resourcePermission .getPermissionName()); if (permissionId == null) { throw new IllegalArgumentException("Permission: " + resourcePermission + " is not defined for resource class: " + resourceClassName); } // get the list of objects of the specified type that the session has access to via direct permissions currentResources.addAll(grantResourcePermissionPersister .getResourcesByResourcePermission(connection, accessorResource, resourceClassId, resourcePermission, permissionId)); // get the list of objects of the specified type that the session has access to via global permissions currentResources.addAll(grantGlobalResourcePermissionPersister .getResourcesByGlobalResourcePermission(connection, accessorResource, resourceClassId, resourcePermission, permissionId)); } if (currentResources.isEmpty()) { // we got an empty set for a permission, we are done since this and all future intersects will be empty resources = currentResources; break; } else { // the only way resources will be empty below is if we never entered this else clause before if (resources.isEmpty()) { resources = currentResources; } else { // compute the intersection of previous iterations and the current resources resources.retainAll(currentResources); if (resources.isEmpty()) { // if intersection with previous results is empty, then all future intersections will be empty, as well break; } } } } // finally get the list of objects of the specified type that the session has access to via super user permissions resources.addAll(grantDomainPermissionSysPersister.getResourcesByDomainSuperUserPermission(connection, accessorResource, resourceClassId)); return resources; } @Override public Set<Resource> getResourcesByResourcePermissionsAndDomain(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); return __getResourcesByPermissionsAndDomain(connection, accessorResource, resourceClassName, domainName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public Set<Resource> getResourcesByResourcePermissionsAndDomain(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); return __getResourcesByPermissionsAndDomain(connection, accessorResource, resourceClassName, domainName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private Set<Resource> __getResourcesByPermissionsAndDomain(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> requestedResourcePermissions) { // first verify that resource class and domain is defined Id<ResourceClassId> resourceClassId; Id<DomainId> domainId; Id<ResourcePermissionId> permissionId; resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // verify permissions are valid for resource class __assertPermissionsValid(connection, resourceClassName, requestedResourcePermissions); Set<Resource> resources = new HashSet<>(); for (ResourcePermission resourcePermission : requestedResourcePermissions) { Set<Resource> currentResources = new HashSet<>(); if (resourcePermission.isSystemPermission()) { // get the list of objects of the specified type that the session has access to via direct permissions currentResources.addAll(grantResourcePermissionSysPersister .getResourcesByResourceSysPermission(connection, accessorResource, resourceClassId, domainId, resourcePermission)); // get the list of objects of the specified type that the session has access to via global permissions currentResources.addAll(grantGlobalResourcePermissionSysPersister .getResourcesByGlobalSysPermission(connection, accessorResource, resourceClassId, domainId, resourcePermission)); } else { // check if the non-system permission name is valid permissionId = resourceClassPermissionPersister.getResourceClassPermissionId(connection, resourceClassId, resourcePermission .getPermissionName()); if (permissionId == null) { throw new IllegalArgumentException("Permission: " + resourcePermission + " is not defined for resource class: " + resourceClassName); } // get the list of objects of the specified type that the session has access to via direct permissions currentResources.addAll(grantResourcePermissionPersister .getResourcesByResourcePermission(connection, accessorResource, resourceClassId, domainId, resourcePermission, permissionId)); // get the list of objects of the specified type that the session has access to via global permissions currentResources.addAll(grantGlobalResourcePermissionPersister .getResourcesByGlobalResourcePermission(connection, accessorResource, resourceClassId, domainId, resourcePermission, permissionId)); } if (currentResources.isEmpty()) { // we got an empty set for a permission, we are done since this and all future intersects will be empty resources = currentResources; break; } else { // the only way resources will be empty below is if we never entered this else clause before if (resources.isEmpty()) { resources = currentResources; } else { // compute the intersection of previous iterations and the current resources resources.retainAll(currentResources); if (resources.isEmpty()) { // if intersection with previous results is empty, then all future intersections will be empty, as well break; } } } } // finally get the list of objects of the specified type that the session has access to via super user permissions resources.addAll(grantDomainPermissionSysPersister.getResourcesByDomainSuperUserPermission(connection, accessorResource, resourceClassId, domainId)); return resources; } @Override public Set<Resource> getAccessorResourcesByResourcePermissions(Resource accessedResource, String resourceClassName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessedResource); __assertResourceClassSpecified(resourceClassName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessedResource = __resolveResource(connection, accessedResource); __assertQueryAuthorization(connection, accessedResource); resourceClassName = resourceClassName.trim(); return __getAccessorResourcesByResourcePermissions(connection, accessedResource, resourceClassName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public Set<Resource> getAccessorResourcesByResourcePermissions(Resource accessedResource, String resourceClassName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessedResource); __assertResourceClassSpecified(resourceClassName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessedResource = __resolveResource(connection, accessedResource); __assertQueryAuthorization(connection, accessedResource); resourceClassName = resourceClassName.trim(); return __getAccessorResourcesByResourcePermissions(connection, accessedResource, resourceClassName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private Set<Resource> __getAccessorResourcesByResourcePermissions(SQLConnection connection, Resource accessedResource, String resourceClassName, Set<ResourcePermission> requestedResourcePermissions) { // first verify that resource class is defined Id<ResourceClassId> resourceClassId; Id<ResourcePermissionId> permissionId; resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // verify permissions are valid for the resource class __assertPermissionsValid(connection, resourceClassName, requestedResourcePermissions); Set<Resource> resources = new HashSet<>(); for (ResourcePermission resourcePermission : requestedResourcePermissions) { Set<Resource> currentResources = new HashSet<>(); if (resourcePermission.isSystemPermission()) { // get the list of objects of the specified type that the session has access to via direct permissions currentResources.addAll(grantResourcePermissionSysPersister .getAccessorResourcesByResourceSysPermission(connection, accessedResource, resourceClassId, resourcePermission)); } else { // check if the non-system permission name is valid permissionId = resourceClassPermissionPersister.getResourceClassPermissionId(connection, resourceClassId, resourcePermission .getPermissionName()); if (permissionId == null) { throw new IllegalArgumentException("Permission: " + resourcePermission + " is not defined for resource class: " + resourceClassName); } // get the list of objects of the specified type that the session has access to via direct permissions currentResources.addAll(grantResourcePermissionPersister .getAccessorResourcesByResourcePermission(connection, accessedResource, resourceClassId, resourcePermission, permissionId)); } if (currentResources.isEmpty()) { // we got an empty set for a permission, we are done since this and all future intersects will be empty resources = currentResources; break; } else { // the only way resources will be empty below is if we never entered this else clause before if (resources.isEmpty()) { resources = currentResources; } else { // compute the intersection of previous iterations and the current resources resources.retainAll(currentResources); if (resources.isEmpty()) { // if intersection with previous results is empty, then all future intersections will be empty, as well break; } } } } return resources; } @Override public List<String> getResourceClassNames() { SQLConnection connection = null; __assertAuthenticated(); try { connection = __getConnection(); return resourceClassPersister.getResourceClassNames(connection); } finally { __closeConnection(connection); } } @Override public List<String> getResourcePermissionNames(String resourceClassName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceClassSpecified(resourceClassName); try { connection = __getConnection(); resourceClassName = resourceClassName.trim(); return __getApplicableResourcePermissionNames(connection, resourceClassName); } finally { __closeConnection(connection); } } // private shared helper methods private Resource __resolveResource(SQLConnection connection, Resource resource) { final Resource resolvedResource; if (resource.getId() != null) { if (resource.getExternalId() != null) { // the resource has both internal and external Ids, so let's see if they match resolvedResource = resourcePersister.resolveResourceByExternalId(connection, resource.getExternalId()); if (resolvedResource == null || !resource.equals(resolvedResource)) { throw new IllegalArgumentException("Resource " + resource + "'s id does not resolve to the specified externalId!"); } } else { // ensure that we have a valid internal resource id, so we might as well also fully resolve it, UNLESS // we already have a resolved instance of this resource cached in authenticatedResource/sessionResource if (sessionResource != null) { // this context is authenticated, so let's compare against our resolved cached resources if (sessionResource.equals(resource)) { resolvedResource = sessionResource; } else if (authenticatedResource.equals(resource)) { resolvedResource = authenticatedResource; } else { resolvedResource = resourcePersister.resolveResourceByResourceId(connection, resource); } } else { resolvedResource = resourcePersister.resolveResourceByResourceId(connection, resource); } if (resolvedResource == null) { throw new IllegalArgumentException("Resource " + resource + " not found!"); } } } else if (resource.getExternalId() != null) { // there is no internal resource Id, so we need to look it up, UNLESS // we already have a resolved instance of this resource cached in authenticatedResource/sessionResource if (sessionResource != null) { // this context is authenticated, so let's compare against our resolved cached resources if (resource.getExternalId().equals(sessionResource.getExternalId())) { resolvedResource = sessionResource; } else if (resource.getExternalId().equals(authenticatedResource.getExternalId())) { resolvedResource = authenticatedResource; } else { resolvedResource = resourcePersister.resolveResourceByExternalId(connection, resource.getExternalId()); } } else { resolvedResource = resourcePersister.resolveResourceByExternalId(connection, resource.getExternalId()); } if (resolvedResource == null) { throw new IllegalArgumentException("Resource " + resource + " not found!"); } } else { throw new IllegalArgumentException("A resource id and/or external id is required, but neither was specified"); } return resolvedResource; } private List<String> __getApplicableResourcePermissionNames(SQLConnection connection, String resourceClassName) { return __getApplicableResourcePermissionNames(connection, __getResourceClassInternalInfo(connection, resourceClassName)); } private List<String> __getApplicableResourcePermissionNames(SQLConnection connection, ResourceClassInternalInfo resourceClassInternalInfo) { final List<String> permissionNames = resourceClassPermissionPersister.getPermissionNames(connection, resourceClassInternalInfo.getResourceClassName()); permissionNames.add(ResourcePermissions.INHERIT); permissionNames.add(ResourcePermissions.DELETE); permissionNames.add(ResourcePermissions.QUERY); if (resourceClassInternalInfo.isAuthenticatable()) { permissionNames.add(ResourcePermissions.IMPERSONATE); permissionNames.add(ResourcePermissions.RESET_CREDENTIALS); } return permissionNames; } private ResourceClassInternalInfo __getResourceClassInternalInfo(SQLConnection connection, String resourceClassName) { final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfo(connection, resourceClassName); // check if the resource class is valid if (resourceClassInternalInfo == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } return resourceClassInternalInfo; } private boolean __isSuperUserOfResource(SQLConnection connection, Resource accessorResource, Resource accessedResource) { return __isSuperUserOfDomain(connection, accessorResource, domainPersister.getResourceDomainNameByResourceId(connection, accessedResource)); } private boolean __isSuperUserOfDomain(SQLConnection connection, Resource accessorResource, String queriedDomain) { Set<DomainPermission> domainPermissions = __getEffectiveDomainPermissions(connection, accessorResource, queriedDomain); return domainPermissions.contains(DomainPermission_SUPER_USER) || domainPermissions.contains(DomainPermission_SUPER_USER_GRANT); } private boolean __isSuperUserOfDomain(SQLConnection connection, Resource accessorResource, Id<DomainId> queriedDomainId) { Set<DomainPermission> domainPermissions = __getEffectiveDomainPermissions(connection, accessorResource, queriedDomainId); return domainPermissions.contains(DomainPermission_SUPER_USER) || domainPermissions.contains(DomainPermission_SUPER_USER_GRANT); } private Set<DomainPermission> __getPostCreateDomainPermissions(Set<DomainCreatePermission> domainCreatePermissions) { Set<DomainPermission> domainPermissions = new HashSet<>(); for (DomainCreatePermission domainCreatePermission : domainCreatePermissions) { if (!domainCreatePermission.isSystemPermission()) { domainPermissions.add(domainCreatePermission.getPostCreateDomainPermission()); } } return domainPermissions; } private Set<ResourcePermission> __getPostCreateResourcePermissions(Set<ResourceCreatePermission> resourceCreatePermissions) { Set<ResourcePermission> resourcePermissions = new HashSet<>(); for (ResourceCreatePermission resourceCreatePermission : resourceCreatePermissions) { if (!resourceCreatePermission.isSystemPermission()) { resourcePermissions.add(resourceCreatePermission.getPostCreateResourcePermission()); } } return resourcePermissions; } // helper methods private void __assertResourceSpecified(Resource resource) { if (resource == null) { throw new NullPointerException("Resource required, none specified"); } } private void __assertCredentialsSpecified(Credentials credentials) { if (credentials == null) { throw new NullPointerException("Credentials required, none specified"); } } private void __assertCredentialsNotSpecified(Credentials credentials) { if (credentials != null) { throw new IllegalArgumentException("Credentials not supported, but specified for unauthenticatable resource class"); } } private void __assertExternalIdSpecified(String externalId) { if (externalId == null) { throw new NullPointerException("External id required, none specified"); } else if (externalId.trim().isEmpty()) { throw new IllegalArgumentException("External id required, none specified"); } } private void __assertDomainSpecified(String domainName) { if (domainName == null) { throw new NullPointerException("Domain required, none specified"); } else if (domainName.trim().isEmpty()) { throw new IllegalArgumentException("Domain required, none specified"); } } private void __assertParentDomainSpecified(String domainName) { if (domainName == null) { throw new NullPointerException("Parent domain required, none specified"); } else if (domainName.trim().isEmpty()) { throw new IllegalArgumentException("Parent domain required, none specified"); } } private void __assertAuthenticatedAsSystemResource() { if (sessionResource == null || !SYSTEM_RESOURCE_ID.equals(sessionResource.getId())) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "perform operation reserved for the system resource"); } } private void __assertAuthenticated() { if (sessionResource == null) { throw new NotAuthenticatedException("Session not authenticated"); } } private void __assertResourceClassSpecified(String resourceClassName) { if (resourceClassName == null) { throw new NullPointerException("Resource class required, none specified"); } else if (resourceClassName.trim().isEmpty()) { throw new IllegalArgumentException("Resource class required, none specified"); } } private void __assertPermissionSpecified(ResourcePermission resourcePermission) { if (resourcePermission == null) { throw new NullPointerException("Resource permission required, none specified"); } } private void __assertVarargPermissionsSpecified(ResourcePermission... resourcePermissions) { if (resourcePermissions == null) { throw new NullPointerException("An array or a sequence of resource permissions are required, but the null value was specified"); } } private void __assertPermissionSpecified(ResourceCreatePermission resourceCreatePermission) { if (resourceCreatePermission == null) { throw new NullPointerException("Resource create permission required, none specified"); } } private void __assertVarargPermissionsSpecified(ResourceCreatePermission... resourceCreatePermissions) { if (resourceCreatePermissions == null) { throw new NullPointerException("An array or a sequence of resource create permissions are required, but the null value was specified"); } } private void __assertPermissionSpecified(DomainCreatePermission domainCreatePermission) { if (domainCreatePermission == null) { throw new NullPointerException("Domain create permission required, none specified"); } } private void __assertVarargPermissionsSpecified(DomainCreatePermission... domainCreatePermissions) { if (domainCreatePermissions == null) { throw new NullPointerException("An array or a sequence of domain create permissions are required, but the null value was specified"); } } private void __assertPermissionSpecified(DomainPermission domainPermission) { if (domainPermission == null) { throw new NullPointerException("Domain permission required, none specified"); } } private void __assertVarargPermissionsSpecified(DomainPermission... domainPermissions) { if (domainPermissions == null) { throw new NullPointerException("An array or a sequence of domain permissions are required, but the null value was specified"); } } private void __assertPermissionsSpecified(Set permissionSet) { if (permissionSet == null) { throw new NullPointerException("Set of permissions required, none specified"); } if (permissionSet.contains(null)) { throw new NullPointerException("Set of permissions contains null element"); } } private void __assertPermissionsSetNotEmpty(Set permissionSet) { if (permissionSet.isEmpty()) { throw new IllegalArgumentException("Set of permissions required, empty set specified"); } } private void __assertPermissionNameValid(String permissionName) { if (permissionName == null) { throw new NullPointerException("Permission name may not be null"); } else if (permissionName.trim().isEmpty()) { throw new IllegalArgumentException("Permission name may not be blank"); } if (permissionName.trim().startsWith("*")) { throw new IllegalArgumentException("Permission name may not start with asterisk '*'"); } } private void __assertResourceClassNameValid(String resourceClassName) { if (resourceClassName == null) { throw new NullPointerException("Resource class name may not be null"); } else if (resourceClassName.trim().isEmpty()) { throw new IllegalArgumentException("Resource class name may not be blank"); } } private void __assertPermissionsValid(SQLConnection connection, String resourceClassName, Set<ResourcePermission> resourcePermissions) { final List<String> permissionNames = __getApplicableResourcePermissionNames(connection, resourceClassName); for (ResourcePermission resourcePermission : resourcePermissions) { if (!permissionNames.contains(resourcePermission.getPermissionName())) { if (resourcePermission.isSystemPermission()) { // currently the only invalid system permissions are for unauthenticatable resource classes throw new IllegalArgumentException("Permission " + resourcePermission.getPermissionName() + " not valid for unauthenticatable resource class " + resourceClassName); } else { throw new IllegalArgumentException("Permission: " + resourcePermission.getPermissionName() + " is not defined for resource class: " + resourceClassName); } } } } private void __assertQueryAuthorization(SQLConnection connection, Resource accessorResource) { if (!sessionResource.equals(accessorResource)) { final Set<ResourcePermission> effectiveResourcePermissions = __getEffectiveResourcePermissions(connection, sessionResource, accessorResource); if (!effectiveResourcePermissions.contains(ResourcePermission_QUERY) && !effectiveResourcePermissions.contains(ResourcePermission_QUERY_GRANT) && !effectiveResourcePermissions.contains(ResourcePermission_IMPERSONATE) && !effectiveResourcePermissions.contains(ResourcePermission_IMPERSONATE_GRANT)) { throw NotAuthorizedException.newInstanceForActionOnResource(sessionResource, "query", accessorResource); } } } @SafeVarargs private static <T> Set<T> __getSetWithoutNullsOrDuplicates(T firstElement, T... elements) { // not null constraint if (elements == null) { throw new NullPointerException("An array or a sequence of arguments are required, but none were specified"); } final HashSet<T> resultSet = new HashSet<>(elements.length + 1); resultSet.add(firstElement); for (T element : elements) { // non-null elements constraint if (element == null) { throw new NullPointerException("A " + elements.getClass().getSimpleName() + " argument (or sequence of varargs) without null elements is required, but received: " + Arrays.asList(elements)); } // duplicate elements get ignored silently if (!resultSet.add(element)) { throw new IllegalArgumentException("Duplicate element: " + element); } } return resultSet; } // private connection management helper methods private SQLConnection __getConnection() { if (dataSource != null) { try { return new SQLConnection(dataSource.getConnection()); } catch (SQLException e) { throw new RuntimeException(e); } } else if (connection != null) { return new SQLConnection(connection); } else { throw new IllegalStateException("Not initialized! No data source or connection, perhaps missing call to postDeserialize()?"); } } private void __closeConnection(SQLConnection connection) { // only close the connection if we got it from a pool, otherwise just leave the connection open if (dataSource != null) { if (connection != null) { try { connection.close(); } catch (SQLException e) { throw new RuntimeException(e); } } } } }
src/main/java/com/acciente/oacc/sql/internal/SQLAccessControlContext.java
/* * Copyright 2009-2015, Acciente LLC * * Acciente LLC licenses this file to you under the * Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in * writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES * OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing * permissions and limitations under the License. */ package com.acciente.oacc.sql.internal; import com.acciente.oacc.AccessControlContext; import com.acciente.oacc.AuthenticationProvider; import com.acciente.oacc.Credentials; import com.acciente.oacc.DomainCreatePermission; import com.acciente.oacc.DomainCreatePermissions; import com.acciente.oacc.DomainPermission; import com.acciente.oacc.DomainPermissions; import com.acciente.oacc.NotAuthenticatedException; import com.acciente.oacc.NotAuthorizedException; import com.acciente.oacc.OaccException; import com.acciente.oacc.Resource; import com.acciente.oacc.ResourceClassInfo; import com.acciente.oacc.ResourceCreatePermission; import com.acciente.oacc.ResourceCreatePermissions; import com.acciente.oacc.ResourcePermission; import com.acciente.oacc.ResourcePermissions; import com.acciente.oacc.sql.SQLProfile; import com.acciente.oacc.sql.internal.persister.DomainPersister; import com.acciente.oacc.sql.internal.persister.GrantDomainCreatePermissionPostCreateSysPersister; import com.acciente.oacc.sql.internal.persister.GrantDomainCreatePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.GrantDomainPermissionSysPersister; import com.acciente.oacc.sql.internal.persister.GrantGlobalResourcePermissionPersister; import com.acciente.oacc.sql.internal.persister.GrantGlobalResourcePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.GrantResourceCreatePermissionPostCreatePersister; import com.acciente.oacc.sql.internal.persister.GrantResourceCreatePermissionPostCreateSysPersister; import com.acciente.oacc.sql.internal.persister.GrantResourceCreatePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.GrantResourcePermissionPersister; import com.acciente.oacc.sql.internal.persister.GrantResourcePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveDomainPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantDomainCreatePermissionPostCreateSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantDomainCreatePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantDomainPermissionSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantGlobalResourcePermissionPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantGlobalResourcePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantResourceCreatePermissionPostCreatePersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantResourceCreatePermissionPostCreateSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantResourceCreatePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantResourcePermissionPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveGrantResourcePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.NonRecursiveResourcePersister; import com.acciente.oacc.sql.internal.persister.RecursiveDomainPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantDomainCreatePermissionPostCreateSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantDomainCreatePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantDomainPermissionSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantGlobalResourcePermissionPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantGlobalResourcePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantResourceCreatePermissionPostCreatePersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantResourceCreatePermissionPostCreateSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantResourceCreatePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantResourcePermissionPersister; import com.acciente.oacc.sql.internal.persister.RecursiveGrantResourcePermissionSysPersister; import com.acciente.oacc.sql.internal.persister.RecursiveResourcePersister; import com.acciente.oacc.sql.internal.persister.ResourceClassPermissionPersister; import com.acciente.oacc.sql.internal.persister.ResourceClassPersister; import com.acciente.oacc.sql.internal.persister.ResourcePersister; import com.acciente.oacc.sql.internal.persister.SQLConnection; import com.acciente.oacc.sql.internal.persister.SQLStrings; import com.acciente.oacc.sql.internal.persister.id.DomainId; import com.acciente.oacc.sql.internal.persister.id.Id; import com.acciente.oacc.sql.internal.persister.id.ResourceClassId; import com.acciente.oacc.sql.internal.persister.id.ResourcePermissionId; import javax.sql.DataSource; import java.io.Serializable; import java.sql.Connection; import java.sql.SQLException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @SuppressWarnings({"UnusedAssignment", "ThrowFromFinallyBlock"}) public class SQLAccessControlContext implements AccessControlContext, Serializable { // services private DataSource dataSource; private Connection connection; // state private AuthenticationProvider authenticationProvider; private boolean hasDefaultAuthenticationProvider; // The resource that authenticated in this session with a call to one of the authenticate() methods private Resource authenticatedResource; private String authenticatedResourceDomainName; // The resource as which the session's credentials are checked. This would be the same as the resource // that initially authenticated - UNLESS a another resource is being IMPERSONATED private Resource sessionResource; private String sessionResourceDomainName; // resource ID constants private static final Long SYSTEM_RESOURCE_ID = Long.valueOf(0); // domain permissions constants private static final DomainPermission DomainPermission_CREATE_CHILD_DOMAIN = DomainPermissions.getInstance(DomainPermissions.CREATE_CHILD_DOMAIN, false); private static final DomainPermission DomainPermission_CREATE_CHILD_DOMAIN_GRANT = DomainPermissions.getInstance(DomainPermissions.CREATE_CHILD_DOMAIN, true); private static final DomainPermission DomainPermission_DELETE = DomainPermissions.getInstance(DomainPermissions.DELETE, false); private static final DomainPermission DomainPermission_DELETE_GRANT = DomainPermissions.getInstance(DomainPermissions.DELETE, true); private static final DomainPermission DomainPermission_SUPER_USER = DomainPermissions.getInstance(DomainPermissions.SUPER_USER, false); private static final DomainPermission DomainPermission_SUPER_USER_GRANT = DomainPermissions.getInstance(DomainPermissions.SUPER_USER, true); // resource permissions constants private static final ResourcePermission ResourcePermission_INHERIT = ResourcePermissions.getInstance(ResourcePermissions.INHERIT, false); private static final ResourcePermission ResourcePermission_INHERIT_GRANT = ResourcePermissions.getInstance(ResourcePermissions.INHERIT, true); private static final ResourcePermission ResourcePermission_IMPERSONATE = ResourcePermissions.getInstance(ResourcePermissions.IMPERSONATE, false); private static final ResourcePermission ResourcePermission_IMPERSONATE_GRANT = ResourcePermissions.getInstance(ResourcePermissions.IMPERSONATE, true); private static final ResourcePermission ResourcePermission_RESET_CREDENTIALS = ResourcePermissions.getInstance(ResourcePermissions.RESET_CREDENTIALS, false); private static final ResourcePermission ResourcePermission_RESET_CREDENTIALS_GRANT = ResourcePermissions.getInstance(ResourcePermissions.RESET_CREDENTIALS, true); private static final ResourcePermission ResourcePermission_DELETE = ResourcePermissions.getInstance(ResourcePermissions.DELETE, false); private static final ResourcePermission ResourcePermission_DELETE_GRANT = ResourcePermissions.getInstance(ResourcePermissions.DELETE, true); private static final ResourcePermission ResourcePermission_QUERY = ResourcePermissions.getInstance(ResourcePermissions.QUERY, false); private static final ResourcePermission ResourcePermission_QUERY_GRANT = ResourcePermissions.getInstance(ResourcePermissions.QUERY, true); // persisters private final ResourceClassPersister resourceClassPersister; private final ResourceClassPermissionPersister resourceClassPermissionPersister; private final DomainPersister domainPersister; private final GrantDomainCreatePermissionSysPersister grantDomainCreatePermissionSysPersister; private final GrantDomainCreatePermissionPostCreateSysPersister grantDomainCreatePermissionPostCreateSysPersister; private final GrantDomainPermissionSysPersister grantDomainPermissionSysPersister; private final ResourcePersister resourcePersister; private final GrantResourceCreatePermissionSysPersister grantResourceCreatePermissionSysPersister; private final GrantResourceCreatePermissionPostCreateSysPersister grantResourceCreatePermissionPostCreateSysPersister; private final GrantResourceCreatePermissionPostCreatePersister grantResourceCreatePermissionPostCreatePersister; private final GrantResourcePermissionSysPersister grantResourcePermissionSysPersister; private final GrantGlobalResourcePermissionSysPersister grantGlobalResourcePermissionSysPersister; private final GrantResourcePermissionPersister grantResourcePermissionPersister; private final GrantGlobalResourcePermissionPersister grantGlobalResourcePermissionPersister; public static AccessControlContext getAccessControlContext(Connection connection, String schemaName, SQLProfile sqlProfile) { return new SQLAccessControlContext(connection, schemaName, sqlProfile); } public static AccessControlContext getAccessControlContext(DataSource dataSource, String schemaName, SQLProfile sqlProfile) { return new SQLAccessControlContext(dataSource, schemaName, sqlProfile); } public static AccessControlContext getAccessControlContext(Connection connection, String schemaName, SQLProfile sqlProfile, AuthenticationProvider authenticationProvider) { return new SQLAccessControlContext(connection, schemaName, sqlProfile, authenticationProvider); } public static AccessControlContext getAccessControlContext(DataSource dataSource, String schemaName, SQLProfile sqlProfile, AuthenticationProvider authenticationProvider) { return new SQLAccessControlContext(dataSource, schemaName, sqlProfile, authenticationProvider); } public static void preSerialize(AccessControlContext accessControlContext) { if (accessControlContext instanceof SQLAccessControlContext) { SQLAccessControlContext sqlAccessControlContext = (SQLAccessControlContext) accessControlContext; sqlAccessControlContext.__preSerialize(); } } public static void postDeserialize(AccessControlContext accessControlContext, Connection connection) { if (accessControlContext instanceof SQLAccessControlContext) { SQLAccessControlContext sqlAccessControlContext = (SQLAccessControlContext) accessControlContext; sqlAccessControlContext.__postDeserialize(connection); } } public static void postDeserialize(AccessControlContext accessControlContext, DataSource dataSource) { if (accessControlContext instanceof SQLAccessControlContext) { SQLAccessControlContext sqlAccessControlContext = (SQLAccessControlContext) accessControlContext; sqlAccessControlContext.__postDeserialize(dataSource); } } private SQLAccessControlContext(Connection connection, String schemaName, SQLProfile sqlProfile) { this(schemaName, sqlProfile); this.connection = connection; // use the built-in authentication provider when no custom implementation is provided this.authenticationProvider = new SQLPasswordAuthenticationProvider(connection, schemaName, sqlProfile.getSqlDialect()); this.hasDefaultAuthenticationProvider = true; } private SQLAccessControlContext(Connection connection, String schemaName, SQLProfile sqlProfile, AuthenticationProvider authenticationProvider) { this(schemaName, sqlProfile); this.connection = connection; this.authenticationProvider = authenticationProvider; this.hasDefaultAuthenticationProvider = false; } private SQLAccessControlContext(DataSource dataSource, String schemaName, SQLProfile sqlProfile) { this(schemaName, sqlProfile); this.dataSource = dataSource; // use the built-in authentication provider when no custom implementation is provided this.authenticationProvider = new SQLPasswordAuthenticationProvider(dataSource, schemaName, sqlProfile.getSqlDialect()); this.hasDefaultAuthenticationProvider = true; } private SQLAccessControlContext(DataSource dataSource, String schemaName, SQLProfile sqlProfile, AuthenticationProvider authenticationProvider) { this(schemaName, sqlProfile); this.dataSource = dataSource; this.authenticationProvider = authenticationProvider; this.hasDefaultAuthenticationProvider = false; } private SQLAccessControlContext(String schemaName, SQLProfile sqlProfile) { // generate all the SQLs the persisters need based on the database dialect SQLStrings sqlStrings = SQLStrings.getSQLStrings(schemaName, sqlProfile); // setup persisters resourceClassPersister = new ResourceClassPersister(sqlProfile, sqlStrings); resourceClassPermissionPersister = new ResourceClassPermissionPersister(sqlProfile, sqlStrings); if (sqlProfile.isRecursiveCTEEnabled()) { grantDomainCreatePermissionSysPersister = new RecursiveGrantDomainCreatePermissionSysPersister(sqlProfile, sqlStrings); grantDomainCreatePermissionPostCreateSysPersister = new RecursiveGrantDomainCreatePermissionPostCreateSysPersister(sqlProfile, sqlStrings); grantDomainPermissionSysPersister = new RecursiveGrantDomainPermissionSysPersister(sqlProfile, sqlStrings); domainPersister = new RecursiveDomainPersister(sqlProfile, sqlStrings); resourcePersister = new RecursiveResourcePersister(sqlProfile, sqlStrings); grantResourceCreatePermissionSysPersister = new RecursiveGrantResourceCreatePermissionSysPersister(sqlProfile, sqlStrings); grantResourceCreatePermissionPostCreateSysPersister = new RecursiveGrantResourceCreatePermissionPostCreateSysPersister(sqlProfile, sqlStrings); grantResourceCreatePermissionPostCreatePersister = new RecursiveGrantResourceCreatePermissionPostCreatePersister(sqlProfile, sqlStrings); grantResourcePermissionSysPersister = new RecursiveGrantResourcePermissionSysPersister(sqlProfile, sqlStrings); grantGlobalResourcePermissionSysPersister = new RecursiveGrantGlobalResourcePermissionSysPersister(sqlProfile, sqlStrings); grantResourcePermissionPersister = new RecursiveGrantResourcePermissionPersister(sqlProfile, sqlStrings); grantGlobalResourcePermissionPersister = new RecursiveGrantGlobalResourcePermissionPersister(sqlProfile, sqlStrings); } else { grantDomainCreatePermissionSysPersister = new NonRecursiveGrantDomainCreatePermissionSysPersister(sqlProfile, sqlStrings); grantDomainCreatePermissionPostCreateSysPersister = new NonRecursiveGrantDomainCreatePermissionPostCreateSysPersister(sqlProfile, sqlStrings); grantDomainPermissionSysPersister = new NonRecursiveGrantDomainPermissionSysPersister(sqlProfile, sqlStrings); domainPersister = new NonRecursiveDomainPersister(sqlProfile, sqlStrings); resourcePersister = new NonRecursiveResourcePersister(sqlProfile, sqlStrings); grantResourceCreatePermissionSysPersister = new NonRecursiveGrantResourceCreatePermissionSysPersister(sqlProfile, sqlStrings); grantResourceCreatePermissionPostCreateSysPersister = new NonRecursiveGrantResourceCreatePermissionPostCreateSysPersister(sqlProfile, sqlStrings); grantResourceCreatePermissionPostCreatePersister = new NonRecursiveGrantResourceCreatePermissionPostCreatePersister(sqlProfile, sqlStrings); grantResourcePermissionSysPersister = new NonRecursiveGrantResourcePermissionSysPersister(sqlProfile, sqlStrings); grantGlobalResourcePermissionSysPersister = new NonRecursiveGrantGlobalResourcePermissionSysPersister(sqlProfile, sqlStrings); grantResourcePermissionPersister = new NonRecursiveGrantResourcePermissionPersister(sqlProfile, sqlStrings); grantGlobalResourcePermissionPersister = new NonRecursiveGrantGlobalResourcePermissionPersister(sqlProfile, sqlStrings); } } private void __preSerialize() { this.dataSource = null; this.connection = null; if (hasDefaultAuthenticationProvider) { ((SQLPasswordAuthenticationProvider) authenticationProvider).preSerialize(); } } private void __postDeserialize(DataSource dataSource) { this.dataSource = dataSource; this.connection = null; if (hasDefaultAuthenticationProvider) { ((SQLPasswordAuthenticationProvider) authenticationProvider).postDeserialize(dataSource); } } private void __postDeserialize(Connection connection) { this.dataSource = null; this.connection = connection; if (hasDefaultAuthenticationProvider) { ((SQLPasswordAuthenticationProvider) authenticationProvider).postDeserialize(connection); } } @Override public void authenticate(Resource resource, Credentials credentials) { __assertResourceSpecified(resource); __assertCredentialsSpecified(credentials); // we deliberately don't resolve the resource before calling the common handler method, to avoid having // to keep the connection open across a potentially long call to a third-party authenticationProvider or // to avoid having to get a connection twice __authenticate(resource, credentials); } @Override public void authenticate(Resource resource) { __assertResourceSpecified(resource); // we deliberately don't resolve the resource before calling the common handler method, to avoid having // to keep the connection open across a potentially long call to a third-party authenticationProvider or // to avoid having to get a connection twice __authenticate(resource, null); } private void __authenticate(Resource resource, Credentials credentials) { // before delegating to the authentication provider we do some basic validation SQLConnection connection = null; final String resourceDomainForResource; try { connection = __getConnection(); // resolve the resource here - instead of outside this method - to avoid having // to keep the connection open across a potentially long call to a third-party authenticationProvider or // to avoid having to get a connection twice resource = __resolveResource(connection, resource); final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, resource); // complain if the resource is not marked as supporting authentication if (!resourceClassInternalInfo.isAuthenticatable()) { throw new IllegalArgumentException("Resource " + resource + " is not of an authenticatable resource class: " + resourceClassInternalInfo.getResourceClassName()); } resourceDomainForResource = domainPersister.getResourceDomainNameByResourceId(connection, resource); } finally { __closeConnection(connection); } // now we delegate to the authentication provider if (credentials != null) { authenticationProvider.authenticate(resource, credentials); } else { authenticationProvider.authenticate(resource); } authenticatedResource = resource; authenticatedResourceDomainName = resourceDomainForResource; sessionResource = authenticatedResource; sessionResourceDomainName = authenticatedResourceDomainName; } @Override public void unauthenticate() { sessionResource = authenticatedResource = null; sessionResourceDomainName = authenticatedResourceDomainName = null; } @Override public void impersonate(Resource resource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(resource); try { connection = __getConnection(); resource = __resolveResource(connection, resource); __assertImpersonatePermission(connection, resource); // switch the session credentials to the new resource sessionResource = resource; sessionResourceDomainName = domainPersister.getResourceDomainNameByResourceId(connection, resource); } finally { __closeConnection(connection); } } private void __assertImpersonatePermission(SQLConnection connection, Resource resource) { __assertResourceExists(connection, resource); final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, resource); // complain if the resource is not of an authenticatable resource-class if (!resourceClassInternalInfo.isAuthenticatable()) { throw new IllegalArgumentException("Resource " + resource + " is not of an authenticatable resource class: " + resourceClassInternalInfo.getResourceClassName()); } boolean impersonatePermissionOK = false; // first check direct permissions final Set<ResourcePermission> resourcePermissions = __getEffectiveResourcePermissions(connection, authenticatedResource, resource); if (resourcePermissions.contains(ResourcePermission_IMPERSONATE) || resourcePermissions.contains(ResourcePermission_IMPERSONATE_GRANT)) { impersonatePermissionOK = true; } if (!impersonatePermissionOK) { // next check global direct permissions final String domainName = domainPersister.getResourceDomainNameByResourceId(connection, resource); final Set<ResourcePermission> globalResourcePermissions = __getEffectiveGlobalResourcePermissions(connection, authenticatedResource, resourceClassInternalInfo.getResourceClassName(), domainName); if (globalResourcePermissions.contains(ResourcePermission_IMPERSONATE) || globalResourcePermissions.contains(ResourcePermission_IMPERSONATE_GRANT)) { impersonatePermissionOK = true; } } if (!impersonatePermissionOK) { // finally check for super user permissions if (__isSuperUserOfResource(connection, authenticatedResource, resource)) { impersonatePermissionOK = true; } } if (!impersonatePermissionOK) { throw NotAuthorizedException.newInstanceForActionOnResource(authenticatedResource, "impersonate", resource); } } @Override public void unimpersonate() { sessionResource = authenticatedResource; sessionResourceDomainName = authenticatedResourceDomainName; } @Override public void setCredentials(Resource resource, Credentials newCredentials) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(resource); if (!authenticatedResource.equals(sessionResource)) { throw new IllegalStateException("Calling setCredentials while impersonating another resource is not valid"); } __assertCredentialsSpecified(newCredentials); final ResourceClassInternalInfo resourceClassInfo; final String domainName; try { connection = __getConnection(); resource = __resolveResource(connection, resource); resourceClassInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, resource); if (!resourceClassInfo.isAuthenticatable()) { throw new IllegalArgumentException("Calling setCredentials for an unauthenticatable resource is not valid"); } domainName = domainPersister.getResourceDomainNameByResourceId(connection, resource); // skip permission checks if the authenticated resource is trying to set its own credentials if (!authenticatedResource.equals(resource)) { __assertResetCredentialsResourcePermission(connection, resource, resourceClassInfo.getResourceClassName(), domainName); } } finally { __closeConnection(connection); } authenticationProvider.validateCredentials(resourceClassInfo.getResourceClassName(), domainName, newCredentials); authenticationProvider.setCredentials(resource, newCredentials); } private void __assertResetCredentialsResourcePermission(SQLConnection connection, Resource resource, String resourceClassName, String domainName) { // first check direct permissions boolean hasResetCredentialsPermission = false; final Set<ResourcePermission> resourcePermissions = __getEffectiveResourcePermissions(connection, authenticatedResource, resource); if (resourcePermissions.contains(ResourcePermission_RESET_CREDENTIALS) || resourcePermissions.contains(ResourcePermission_RESET_CREDENTIALS_GRANT)) { hasResetCredentialsPermission = true; } if (!hasResetCredentialsPermission) { // next check global direct permissions final Set<ResourcePermission> globalResourcePermissions = __getEffectiveGlobalResourcePermissions(connection, authenticatedResource, resourceClassName, domainName); if (globalResourcePermissions.contains(ResourcePermission_RESET_CREDENTIALS) || globalResourcePermissions.contains(ResourcePermission_RESET_CREDENTIALS_GRANT)) { hasResetCredentialsPermission = true; } } if (!hasResetCredentialsPermission) { // finally check for super user permissions if (__isSuperUserOfResource(connection, authenticatedResource, resource)) { hasResetCredentialsPermission = true; } } if (!hasResetCredentialsPermission) { throw NotAuthorizedException.newInstanceForActionOnResource(authenticatedResource, "reset credentials", resource); } } @Override public void createResourceClass(String resourceClassName, boolean authenticatable, boolean unauthenticatedCreateAllowed) { SQLConnection connection = null; __assertAuthenticated(); __assertAuthenticatedAsSystemResource(); // check if the auth resource is permitted to create resource classes __assertResourceClassNameValid(resourceClassName); try { connection = __getConnection(); resourceClassName = resourceClassName.trim(); // check if this resource class already exists if (resourceClassPersister.getResourceClassId(connection, resourceClassName) != null) { throw new IllegalArgumentException("Duplicate resource class: " + resourceClassName); } resourceClassPersister.addResourceClass(connection, resourceClassName, authenticatable, unauthenticatedCreateAllowed); } finally { __closeConnection(connection); } } @Override public void createResourcePermission(String resourceClassName, String permissionName) { SQLConnection connection = null; __assertAuthenticated(); __assertAuthenticatedAsSystemResource(); // check if the auth resource is permitted to create resource classes __assertResourceClassSpecified(resourceClassName); __assertPermissionNameValid(permissionName); try { connection = __getConnection(); resourceClassName = resourceClassName.trim(); permissionName = permissionName.trim(); // first verify that resource class is defined Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // check if the permission name is already defined! Id<ResourcePermissionId> permissionId = resourceClassPermissionPersister.getResourceClassPermissionId(connection, resourceClassId, permissionName); if (permissionId != null) { throw new IllegalArgumentException("Duplicate permission: " + permissionName + " for resource class: " + resourceClassName); } resourceClassPermissionPersister.addResourceClassPermission(connection, resourceClassId, permissionName); } finally { __closeConnection(connection); } } @Override public void createDomain(String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertDomainSpecified(domainName); try { connection = __getConnection(); domainName = domainName.trim(); __createDomain(connection, domainName, null); } finally { __closeConnection(connection); } } @Override public void createDomain(String domainName, String parentDomainName) { SQLConnection connection = null; __assertAuthenticated(); __assertDomainSpecified(domainName); __assertParentDomainSpecified(parentDomainName); try { connection = __getConnection(); domainName = domainName.trim(); parentDomainName = parentDomainName.trim(); __createDomain(connection, domainName, parentDomainName); } finally { __closeConnection(connection); } } private void __createDomain(SQLConnection connection, String domainName, String parentDomainName) { // we need to check if the currently authenticated resource is allowed to create domains final Set<DomainCreatePermission> domainCreatePermissions = grantDomainCreatePermissionSysPersister.getDomainCreateSysPermissionsIncludeInherited(connection, sessionResource); // if there is at least one permission, then it implies that this resource is allowed to create domains if (domainCreatePermissions.isEmpty()) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "create domain"); } // determine the post create permissions on the new domain final Set<DomainPermission> newDomainPermissions = __getPostCreateDomainPermissions(grantDomainCreatePermissionPostCreateSysPersister .getDomainCreatePostCreateSysPermissionsIncludeInherited(connection, sessionResource)); // check to ensure that the requested domain name does not already exist if (domainPersister.getResourceDomainId(connection, domainName) != null) { throw new IllegalArgumentException("Duplicate domain: " + domainName); } if (parentDomainName == null) { // create the new root domain domainPersister.addResourceDomain(connection, domainName); } else { // check to ensure that the parent domain name exists Id<DomainId> parentDomainId = domainPersister.getResourceDomainId(connection, parentDomainName); if (parentDomainId == null) { throw new IllegalArgumentException("Parent domain: " + parentDomainName + " not found!"); } // we need to check if the currently authenticated resource is allowed to create child domains in the parent Set<DomainPermission> parentDomainPermissions; parentDomainPermissions = __getEffectiveDomainPermissions(connection, sessionResource, parentDomainName); if (!parentDomainPermissions.contains(DomainPermission_CREATE_CHILD_DOMAIN) && !parentDomainPermissions.contains(DomainPermission_CREATE_CHILD_DOMAIN_GRANT) && !parentDomainPermissions.contains(DomainPermission_SUPER_USER) && !parentDomainPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "create child domain in domain: " + parentDomainName); } // create the new child domain domainPersister.addResourceDomain(connection, domainName, parentDomainId); } if (newDomainPermissions.size() > 0) { // grant the currently authenticated resource the privileges to the new domain __setDirectDomainPermissions(connection, sessionResource, domainName, newDomainPermissions, true); } } @Override public boolean deleteDomain(String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertDomainSpecified(domainName); try { connection = __getConnection(); return __deleteDomain(connection, domainName); } finally { __closeConnection(connection); } } private boolean __deleteDomain(SQLConnection connection, String domainName) { // short-circuit out of this call if the specified resource does not exist final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { return false; } // check for authorization (using internal has-permission method is ok because querying for session resource) if (!__hasDomainPermissions(connection, sessionResource, domainName, Collections.singleton(DomainPermission_DELETE))) { throw NotAuthorizedException.newInstanceForDomainPermissions(sessionResource, domainName, DomainPermission_DELETE); } // check if the domain is empty (=domain must not contain any resources, and none in any descendant domains) if (!resourcePersister.isDomainEmpty(connection, domainId)) { throw new IllegalArgumentException("Deleting a domain (" + domainName + ") that contains resources directly or in a descendant domain is invalid"); } // remove any permissions the obsolete resource has as an accessor resource grantDomainPermissionSysPersister.removeAllDomainSysPermissions(connection, domainId); grantResourceCreatePermissionPostCreatePersister.removeAllResourceCreatePostCreatePermissions(connection, domainId); grantResourceCreatePermissionPostCreateSysPersister.removeAllResourceCreatePostCreateSysPermissions(connection, domainId); grantResourceCreatePermissionSysPersister.removeAllResourceCreateSysPermissions(connection, domainId); grantGlobalResourcePermissionPersister.removeAllGlobalResourcePermissions(connection, domainId); grantGlobalResourcePermissionSysPersister.removeAllGlobalSysPermissions(connection, domainId); // remove the domain domainPersister.deleteDomain(connection, domainId); return true; } @Override public Resource createResource(String resourceClassName, String domainName) { SQLConnection connection = null; try { connection = __getConnection(); return __createResource(connection, resourceClassName, domainName, null, null); } finally { __closeConnection(connection); } } @Override public Resource createResource(String resourceClassName, String domainName, Credentials credentials) { SQLConnection connection = null; __assertCredentialsSpecified(credentials); try { connection = __getConnection(); return __createResource(connection, resourceClassName, domainName, null, credentials); } finally { __closeConnection(connection); } } @Override public Resource createResource(String resourceClassName, String domainName, String externalId) { SQLConnection connection = null; __assertExternalIdSpecified(externalId); try { connection = __getConnection(); return __createResource(connection, resourceClassName, domainName, externalId, null); } finally { __closeConnection(connection); } } @Override public Resource createResource(String resourceClassName, String domainName, String externalId, Credentials credentials) { SQLConnection connection = null; __assertExternalIdSpecified(externalId); __assertCredentialsSpecified(credentials); try { connection = __getConnection(); return __createResource(connection, resourceClassName, domainName, externalId, credentials); } finally { __closeConnection(connection); } } private Resource __createResource(SQLConnection connection, String resourceClassName, String domainName, String externalId, Credentials credentials) { __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); // validate the resource class resourceClassName = resourceClassName.trim(); final ResourceClassInternalInfo resourceClassInternalInfo = __getResourceClassInternalInfo(connection, resourceClassName); if (!resourceClassInternalInfo.isUnauthenticatedCreateAllowed()) { __assertAuthenticated(); } if (resourceClassInternalInfo.isAuthenticatable()) { // if this resource class is authenticatable, then validate the credentials authenticationProvider.validateCredentials(resourceClassName, domainName, credentials); } else { // if this resource class is NOT authenticatable, then specifying credentials is invalid __assertCredentialsNotSpecified(credentials); } // validate the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // check to ensure that the specified external id does not already exist if (externalId != null && resourcePersister.resolveResourceByExternalId(connection, externalId) != null) { throw new IllegalArgumentException("External id is not unique: " + externalId); } // we first check the create permissions final Set<ResourcePermission> newResourcePermissions; // the only way we can have come here with _sessionResource == null is // when non-authenticated create is allowed for this resource class if (sessionResource == null) { // if this session is unauthenticated then give the new resource all available // permissions to itself newResourcePermissions = new HashSet<>(); for (String permissionName : resourceClassPermissionPersister.getPermissionNames(connection, resourceClassName)) { newResourcePermissions.add(ResourcePermissions.getInstance(permissionName, true)); } newResourcePermissions.add(ResourcePermissions.getInstance(ResourcePermissions.DELETE, true)); newResourcePermissions.add(ResourcePermissions.getInstance(ResourcePermissions.QUERY, true)); if (resourceClassInternalInfo.isAuthenticatable()) { newResourcePermissions.add(ResourcePermissions.getInstance(ResourcePermissions.RESET_CREDENTIALS, true)); newResourcePermissions.add(ResourcePermissions.getInstance(ResourcePermissions.IMPERSONATE, true)); } } else { final Set<ResourceCreatePermission> resourceCreatePermissions; boolean createPermissionOK = false; resourceCreatePermissions = __getEffectiveResourceCreatePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); newResourcePermissions = __getPostCreateResourcePermissions(resourceCreatePermissions); if (resourceCreatePermissions.size() > 0) { createPermissionOK = true; } // if that did not work, next we check the session resource has super user permissions // to the domain of the new resource if (!createPermissionOK) { createPermissionOK = __isSuperUserOfDomain(connection, sessionResource, domainName); } if (!createPermissionOK) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "create resource of resource class " + resourceClassName); } } // create the new resource final Resource newResource = resourcePersister.createResource(connection, Id.<ResourceClassId>from(resourceClassInternalInfo .getResourceClassId()), domainId, externalId); // set permissions on the new resource, if applicable if (newResourcePermissions != null && newResourcePermissions.size() > 0) { if (sessionResource != null) { __setDirectResourcePermissions(connection, sessionResource, newResource, newResourcePermissions, sessionResource, true); } else { // if this session is unauthenticated the permissions are granted to the newly created resource __setDirectResourcePermissions(connection, newResource, newResource, newResourcePermissions, newResource, true); } } if (credentials != null) { authenticationProvider.setCredentials(newResource, credentials); } return newResource; } @Override public boolean deleteResource(Resource obsoleteResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(obsoleteResource); try { connection = __getConnection(); // we deliberately don't resolve the resource before calling the handler method, because the // delete operation should be idempotent and return false if the resource does not resolve/exist return __deleteResource(connection, obsoleteResource); } finally { __closeConnection(connection); } } private boolean __deleteResource(SQLConnection connection, Resource obsoleteResource) { try { obsoleteResource = __resolveResource(connection, obsoleteResource); } catch (IllegalArgumentException e) { // short-circuit out of this call if the specified resource does not exist/resolve // NOTE that this will still throw an exception if a resource does not match its // specified external id if (e.getMessage().toLowerCase().contains("not found")) { return false; } throw e; } // check for authorization if (!__isSuperUserOfResource(connection, sessionResource, obsoleteResource)) { final Set<ResourcePermission> sessionResourcePermissions = __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, obsoleteResource); if (!sessionResourcePermissions.contains(ResourcePermission_DELETE) && !sessionResourcePermissions.contains(ResourcePermission_DELETE_GRANT)) { throw NotAuthorizedException.newInstanceForActionOnResource(sessionResource, "delete", obsoleteResource); } } // remove the resource's credentials, if necessary final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, obsoleteResource); if (resourceClassInternalInfo.isAuthenticatable()) { authenticationProvider.deleteCredentials(obsoleteResource); } // remove any permissions the obsolete resource has as an accessor resource grantDomainCreatePermissionPostCreateSysPersister.removeDomainCreatePostCreateSysPermissions(connection, obsoleteResource); grantDomainCreatePermissionSysPersister.removeDomainCreateSysPermissions(connection, obsoleteResource); grantDomainPermissionSysPersister.removeAllDomainSysPermissions(connection, obsoleteResource); grantResourceCreatePermissionPostCreatePersister.removeAllResourceCreatePostCreatePermissions(connection, obsoleteResource); grantResourceCreatePermissionPostCreateSysPersister.removeAllResourceCreatePostCreateSysPermissions(connection, obsoleteResource); grantResourceCreatePermissionSysPersister.removeAllResourceCreateSysPermissions(connection, obsoleteResource); grantGlobalResourcePermissionPersister.removeAllGlobalResourcePermissions(connection, obsoleteResource); grantGlobalResourcePermissionSysPersister.removeAllGlobalSysPermissions(connection, obsoleteResource); // remove any permissions the obsolete resource has as an accessor resource OR as an accessed resource grantResourcePermissionPersister.removeAllResourcePermissionsAsAccessorOrAccessed(connection, obsoleteResource); grantResourcePermissionSysPersister.removeAllResourceSysPermissionsAsAccessorOrAccessed(connection, obsoleteResource); // remove the resource resourcePersister.deleteResource(connection, obsoleteResource); // handle special case where deleted resource is the session or authenticated resource if (authenticatedResource.equals(obsoleteResource)) { unauthenticate(); } else if (sessionResource.equals(obsoleteResource)) { unimpersonate(); } return true; } @Override public void setDomainPermissions(Resource accessorResource, String domainName, Set<DomainPermission> permissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionsSpecified(permissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __setDirectDomainPermissions(connection, accessorResource, domainName, permissions, false); } finally { __closeConnection(connection); } } private void __setDirectDomainPermissions(SQLConnection connection, Resource accessorResource, String domainName, Set<DomainPermission> requestedDomainPermissions, boolean newDomainMode) { // determine the domain ID of the domain, for use in the grant below Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // validate requested set is not null; empty set is valid and would remove any direct domain permissions if (requestedDomainPermissions == null) { throw new IllegalArgumentException("Set of requested domain permissions may not be null"); } if (!newDomainMode) { __assertResourceExists(connection, accessorResource); // check if the grantor (=session resource) has permissions to grant the requested permissions final Set<DomainPermission> grantorPermissions = __getEffectiveDomainPermissions(connection, sessionResource, domainName); // check if the grantor (=session resource) has super user permissions to the target domain if (!grantorPermissions.contains(DomainPermission_SUPER_USER) && !grantorPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { final Set<DomainPermission> directAccessorPermissions = __getDirectDomainPermissions(connection, accessorResource, domainId); final Set<DomainPermission> requestedAddPermissions = __subtract(requestedDomainPermissions, directAccessorPermissions); if (!requestedAddPermissions.isEmpty()) { final Set<DomainPermission> unauthorizedAddPermissions; unauthorizedAddPermissions = __subtractDomainPermissionsIfGrantableFrom(requestedAddPermissions, grantorPermissions); if (unauthorizedAddPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "add the following domain permission(s): " + unauthorizedAddPermissions); } } final Set<DomainPermission> requestedRemovePermissions = __subtract(directAccessorPermissions, requestedDomainPermissions); if (!requestedRemovePermissions.isEmpty()) { final Set<DomainPermission> unauthorizedRemovePermissions; unauthorizedRemovePermissions = __subtractDomainPermissionsIfGrantableFrom(requestedRemovePermissions, grantorPermissions); if (unauthorizedRemovePermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "remove the following domain permission(s): " + unauthorizedRemovePermissions); } } } // revoke any existing permissions that accessor to has to this domain directly grantDomainPermissionSysPersister.removeDomainSysPermissions(connection, accessorResource, domainId); } // add the new permissions grantDomainPermissionSysPersister.addDomainSysPermissions(connection, accessorResource, sessionResource, domainId, requestedDomainPermissions); } private Set<DomainPermission> __getDirectDomainPermissions(SQLConnection connection, Resource accessorResource, Id<DomainId> domainId) { // only system permissions are possible on a domain return grantDomainPermissionSysPersister.getDomainSysPermissions(connection, accessorResource, domainId); } private Set<DomainPermission> __subtractDomainPermissionsIfGrantableFrom(Set<DomainPermission> candidatePermissionSet, Set<DomainPermission> grantorPermissionSet) { Set<DomainPermission> differenceSet = new HashSet<>(candidatePermissionSet); for (DomainPermission candidatePermission : candidatePermissionSet) { for (DomainPermission grantorPermission : grantorPermissionSet) { if (candidatePermission.isGrantableFrom(grantorPermission)) { differenceSet.remove(candidatePermission); break; } } } return differenceSet; } @Override public void grantDomainPermissions(Resource accessorResource, String domainName, Set<DomainPermission> domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionsSpecified(domainPermissions); __assertPermissionsSetNotEmpty(domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __grantDirectDomainPermissions(connection, accessorResource, domainName, domainPermissions); } finally { __closeConnection(connection); } } @Override public void grantDomainPermissions(Resource accessorResource, String domainName, DomainPermission domainPermission, DomainPermission... domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionSpecified(domainPermission); __assertVarargPermissionsSpecified(domainPermissions); final Set<DomainPermission> requestedDomainPermissions = __getSetWithoutNullsOrDuplicates(domainPermission, domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __grantDirectDomainPermissions(connection, accessorResource, domainName, requestedDomainPermissions); } finally { __closeConnection(connection); } } private void __grantDirectDomainPermissions(SQLConnection connection, Resource accessorResource, String domainName, Set<DomainPermission> requestedDomainPermissions) { __assertUniqueDomainPermissionsNames(requestedDomainPermissions); // determine the domain ID of the domain, for use in the grant below Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // validate requested set is not null; empty set is valid and would remove any direct domain permissions if (requestedDomainPermissions == null) { throw new IllegalArgumentException("Set of requested domain permissions may not be null"); } __assertResourceExists(connection, accessorResource); // check if the grantor (=session resource) has permissions to grant the requested permissions final Set<DomainPermission> grantorPermissions = __getEffectiveDomainPermissions(connection, sessionResource, domainName); // check if the grantor (=session resource) has super user permissions to the target domain if (!grantorPermissions.contains(DomainPermission_SUPER_USER) && !grantorPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { final Set<DomainPermission> unauthorizedPermissions = __subtractDomainPermissionsIfGrantableFrom(requestedDomainPermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "grant the following domain permission(s): " + unauthorizedPermissions); } } final Set<DomainPermission> directAccessorPermissions = __getDirectDomainPermissions(connection, accessorResource, domainId); final Set<DomainPermission> addPermissions = new HashSet<>(requestedDomainPermissions.size()); final Set<DomainPermission> updatePermissions = new HashSet<>(requestedDomainPermissions.size()); for (DomainPermission requestedPermission : requestedDomainPermissions) { boolean existingPermission = false; for (DomainPermission existingDirectPermission : directAccessorPermissions) { if (requestedPermission.equalsIgnoreGrant(existingDirectPermission)) { // we found a match by permission name - now let's see if we need to update existing or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && !requestedPermission.isGrantableFrom(existingDirectPermission)) { // requested permission has higher granting rights than the already existing direct permission, // so we need to update it updatePermissions.add(requestedPermission); } existingPermission = true; break; } } if (!existingPermission) { // couldn't find requested permission in set of already existing direct permissions, by name, so we need to add it addPermissions.add(requestedPermission); } } // update any existing permissions that accessor to has to this domain directly grantDomainPermissionSysPersister.updateDomainSysPermissions(connection, accessorResource, sessionResource, domainId, updatePermissions); // add the new permissions grantDomainPermissionSysPersister.addDomainSysPermissions(connection, accessorResource, sessionResource, domainId, addPermissions); } private void __assertUniqueDomainPermissionsNames(Set<DomainPermission> domainPermissions) { final Set<String> uniquePermissionNames = new HashSet<>(domainPermissions.size()); for (final DomainPermission domainPermissionPermission : domainPermissions) { if (uniquePermissionNames.contains(domainPermissionPermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + domainPermissionPermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniquePermissionNames.add(domainPermissionPermission.getPermissionName()); } } } @Override public void revokeDomainPermissions(Resource accessorResource, String domainName, Set<DomainPermission> domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionsSpecified(domainPermissions); __assertPermissionsSetNotEmpty(domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __revokeDirectDomainPermissions(connection, accessorResource, domainName, domainPermissions); } finally { __closeConnection(connection); } } @Override public void revokeDomainPermissions(Resource accessorResource, String domainName, DomainPermission domainPermission, DomainPermission... domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionSpecified(domainPermission); __assertVarargPermissionsSpecified(domainPermissions); final Set<DomainPermission> requestedDomainPermissions = __getSetWithoutNullsOrDuplicates(domainPermission, domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __revokeDirectDomainPermissions(connection, accessorResource, domainName, requestedDomainPermissions); } finally { __closeConnection(connection); } } private void __revokeDirectDomainPermissions(SQLConnection connection, Resource accessorResource, String domainName, Set<DomainPermission> requestedDomainPermissions) { __assertUniqueDomainPermissionsNames(requestedDomainPermissions); // determine the domain ID of the domain, for use in the revocation below Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // validate requested set is not null if (requestedDomainPermissions == null) { throw new IllegalArgumentException("Set of requested domain permissions to be revoked may not be null"); } __assertResourceExists(connection, accessorResource); final Set<DomainPermission> grantorPermissions = __getEffectiveDomainPermissions(connection, sessionResource, domainName); // check if the grantor (=session resource) has super user permissions to the target domain or // has permissions to grant the requested permissions if (!grantorPermissions.contains(DomainPermission_SUPER_USER) && !grantorPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { final Set<DomainPermission> unauthorizedPermissions = __subtractDomainPermissionsIfGrantableFrom(requestedDomainPermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "revoke the following domain permission(s): " + unauthorizedPermissions); } } final Set<DomainPermission> directAccessorPermissions = __getDirectDomainPermissions(connection, accessorResource, domainId); final Set<DomainPermission> removePermissions = new HashSet<>(requestedDomainPermissions.size()); for (DomainPermission requestedPermission : requestedDomainPermissions) { for (DomainPermission existingDirectPermission : directAccessorPermissions) { if (requestedPermission.equalsIgnoreGrant(existingDirectPermission)) { // requested permission has same name and regardless of granting rights we need to remove it removePermissions.add(requestedPermission); break; } } } // remove any existing permissions that accessor has to this domain directly grantDomainPermissionSysPersister.removeDomainSysPermissions(connection, accessorResource, domainId, removePermissions); } @Override public Set<DomainPermission> getDomainPermissions(Resource accessorResource, String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } return __getDirectDomainPermissions(connection, accessorResource, domainId); } finally { __closeConnection(connection); } } @Override public Map<String, Set<DomainPermission>> getDomainPermissionsMap(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __collapseDomainPermissions(grantDomainPermissionSysPersister.getDomainSysPermissions(connection, accessorResource)); } finally { __closeConnection(connection); } } @Override public Set<DomainPermission> getEffectiveDomainPermissions(Resource accessorResource, String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getEffectiveDomainPermissions(connection, accessorResource, domainName); } finally { __closeConnection(connection); } } private Set<DomainPermission> __getEffectiveDomainPermissions(SQLConnection connection, Resource accessorResource, String domainName) { Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } return __getEffectiveDomainPermissions(connection, accessorResource, domainId); } private Set<DomainPermission> __getEffectiveDomainPermissions(SQLConnection connection, Resource accessorResource, Id<DomainId> domainId) { // only system permissions are possible on a domain final Set<DomainPermission> domainSysPermissionsIncludingInherited = grantDomainPermissionSysPersister.getDomainSysPermissionsIncludeInherited(connection, accessorResource, domainId); for (DomainPermission permission : domainSysPermissionsIncludingInherited) { // check if super-user privileges apply and construct set of all possible permissions, if necessary if (DomainPermissions.SUPER_USER.equals(permission.getPermissionName())) { return __getApplicableDomainPermissions(); } } return __collapseDomainPermissions(domainSysPermissionsIncludingInherited); } private Set<DomainPermission> __collapseDomainPermissions(Set<DomainPermission> domainPermissions) { final Set<DomainPermission> collapsedPermissions = new HashSet<>(domainPermissions); for (DomainPermission permission : domainPermissions) { for (DomainPermission grantEquivalentPermission : domainPermissions) { if (permission.isGrantableFrom(grantEquivalentPermission) && !permission.equals(grantEquivalentPermission)) { collapsedPermissions.remove(permission); break; } } } return collapsedPermissions; } @Override public Map<String, Set<DomainPermission>> getEffectiveDomainPermissionsMap(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getEffectiveDomainPermissionsMap(connection, accessorResource); } finally { __closeConnection(connection); } } private Map<String, Set<DomainPermission>> __getEffectiveDomainPermissionsMap(SQLConnection connection, Resource accessorResource) { final Map<String, Set<DomainPermission>> domainSysPermissionsIncludingInherited = grantDomainPermissionSysPersister.getDomainSysPermissionsIncludeInherited(connection, accessorResource); for (String domainName : domainSysPermissionsIncludingInherited.keySet()) { final Set<DomainPermission> domainPermissions = domainSysPermissionsIncludingInherited.get(domainName); if (domainPermissions.contains(DomainPermission_SUPER_USER) || domainPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { domainSysPermissionsIncludingInherited.put(domainName, __getApplicableDomainPermissions()); } } return __collapseDomainPermissions(domainSysPermissionsIncludingInherited); } private static Set<DomainPermission> __getApplicableDomainPermissions() { Set<DomainPermission> superDomainPermissions = new HashSet<>(3); superDomainPermissions.add(DomainPermission_SUPER_USER_GRANT); superDomainPermissions.add(DomainPermission_CREATE_CHILD_DOMAIN_GRANT); superDomainPermissions.add(DomainPermission_DELETE_GRANT); return superDomainPermissions; } private Map<String, Set<DomainPermission>> __collapseDomainPermissions(Map<String, Set<DomainPermission>> domainPermissionsMap) { Map<String, Set<DomainPermission>> collapsedDomainPermissionsMap = new HashMap<>(domainPermissionsMap.size()); for (String domainName : domainPermissionsMap.keySet()) { collapsedDomainPermissionsMap.put(domainName, __collapseDomainPermissions(domainPermissionsMap.get(domainName))); } return collapsedDomainPermissionsMap; } @Override public void setDomainCreatePermissions(Resource accessorResource, Set<DomainCreatePermission> domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionsSpecified(domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __setDirectDomainCreatePermissions(connection, accessorResource, domainCreatePermissions); } finally { __closeConnection(connection); } } private void __setDirectDomainCreatePermissions(SQLConnection connection, Resource accessorResource, Set<DomainCreatePermission> requestedDomainCreatePermissions) { __assertSetContainsDomainCreateSystemPermission(requestedDomainCreatePermissions); __assertUniqueSystemOrPostCreateDomainPermissionNames(requestedDomainCreatePermissions); __assertResourceExists(connection, accessorResource); // check if grantor (=session resource) is authorized to add/remove requested permissions final Set<DomainCreatePermission> grantorPermissions = __getEffectiveDomainCreatePermissions(connection, sessionResource); final Set<DomainCreatePermission> directAccessorPermissions = __getDirectDomainCreatePermissions(connection, accessorResource); final Set<DomainCreatePermission> requestedAddPermissions = __subtract(requestedDomainCreatePermissions, directAccessorPermissions); if (!requestedAddPermissions.isEmpty()) { final Set<DomainCreatePermission> unauthorizedAddPermissions = __subtractDomainCreatePermissionsIfGrantableFrom(requestedAddPermissions, grantorPermissions); if (unauthorizedAddPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "add the following domain create permission(s): " + unauthorizedAddPermissions); } } final Set<DomainCreatePermission> requestedRemovePermissions = __subtract(directAccessorPermissions, requestedDomainCreatePermissions); if (!requestedRemovePermissions.isEmpty()) { final Set<DomainCreatePermission> unauthorizedRemovePermissions = __subtractDomainCreatePermissionsIfGrantableFrom(requestedRemovePermissions, grantorPermissions); if (unauthorizedRemovePermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "remove the following domain create permission(s): " + unauthorizedRemovePermissions); } } // NOTE: our current data model only support system permissions for domains // revoke any existing domain system permission (*CREATE) this accessor has to this domain grantDomainCreatePermissionSysPersister.removeDomainCreateSysPermissions(connection, accessorResource); // revoke any existing domain post create system permissions this accessor has to this domain grantDomainCreatePermissionPostCreateSysPersister.removeDomainCreatePostCreateSysPermissions(connection, accessorResource); // add the domain system permissions (*CREATE) grantDomainCreatePermissionSysPersister.addDomainCreateSysPermissions(connection, accessorResource, sessionResource, requestedDomainCreatePermissions); // add the domain post create system permissions grantDomainCreatePermissionPostCreateSysPersister .addDomainCreatePostCreateSysPermissions(connection, accessorResource, sessionResource, requestedDomainCreatePermissions); } private void __assertSetContainsDomainCreateSystemPermission(Set<DomainCreatePermission> domainCreatePermissions) { if (!domainCreatePermissions.isEmpty()) { // if at least one permission is specified, then there must be a *CREATE permission in the set if (!__setContainsDomainCreateSystemPermission(domainCreatePermissions)) { throw new IllegalArgumentException("Domain create permission *CREATE must be specified"); } } } private boolean __setContainsDomainCreateSystemPermission(Set<DomainCreatePermission> domainCreatePermissions) { for (final DomainCreatePermission domainCreatePermission : domainCreatePermissions) { if (domainCreatePermission.isSystemPermission() && DomainCreatePermissions.CREATE.equals(domainCreatePermission.getPermissionName())) { return true; } } return false; } private Set<DomainCreatePermission> __getDirectDomainCreatePermissions(SQLConnection connection, Resource accessorResource) { final Set<DomainCreatePermission> domainCreatePermissions = new HashSet<>(); domainCreatePermissions .addAll(grantDomainCreatePermissionSysPersister.getDomainCreateSysPermissions(connection, accessorResource)); domainCreatePermissions .addAll(grantDomainCreatePermissionPostCreateSysPersister.getDomainCreatePostCreateSysPermissions( connection, accessorResource)); return domainCreatePermissions; } private Set<DomainCreatePermission> __subtractDomainCreatePermissionsIfGrantableFrom(Set<DomainCreatePermission> candidatePermissionSet, Set<DomainCreatePermission> grantorPermissionSet) { Set<DomainCreatePermission> differenceSet = new HashSet<>(candidatePermissionSet); for (DomainCreatePermission candidatePermission : candidatePermissionSet) { for (DomainCreatePermission grantorPermission : grantorPermissionSet) { if (candidatePermission.isGrantableFrom(grantorPermission)) { differenceSet.remove(candidatePermission); break; } } } return differenceSet; } @Override public void grantDomainCreatePermissions(Resource accessorResource, Set<DomainCreatePermission> domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionsSpecified(domainCreatePermissions); __assertPermissionsSetNotEmpty(domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __grantDirectDomainCreatePermissions(connection, accessorResource, domainCreatePermissions); } finally { __closeConnection(connection); } } @Override public void grantDomainCreatePermissions(Resource accessorResource, DomainCreatePermission domainCreatePermission, DomainCreatePermission... domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionSpecified(domainCreatePermission); __assertVarargPermissionsSpecified(domainCreatePermissions); final Set<DomainCreatePermission> requestedDomainCreatePermissions = __getSetWithoutNullsOrDuplicates(domainCreatePermission, domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __grantDirectDomainCreatePermissions(connection, accessorResource, requestedDomainCreatePermissions); } finally { __closeConnection(connection); } } private void __grantDirectDomainCreatePermissions(SQLConnection connection, Resource accessorResource, Set<DomainCreatePermission> requestedDomainCreatePermissions) { __assertUniqueSystemOrPostCreateDomainPermissionNames(requestedDomainCreatePermissions); __assertResourceExists(connection, accessorResource); // check if grantor (=session resource) is authorized to add requested permissions final Set<DomainCreatePermission> grantorPermissions = __getEffectiveDomainCreatePermissions(connection, sessionResource); final Set<DomainCreatePermission> unauthorizedPermissions = __subtractDomainCreatePermissionsIfGrantableFrom(requestedDomainCreatePermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "grant the following domain create permission(s): " + unauthorizedPermissions); } final Set<DomainCreatePermission> directAccessorPermissions = __getDirectDomainCreatePermissions(connection, accessorResource); if (directAccessorPermissions.isEmpty()) { // our invariant is that a resource's direct create permissions must include the *CREATE system permission; // if there are no direct create permissions, then the requested permissions to be granted need to include *CREATE __assertSetContainsDomainCreateSystemPermission(requestedDomainCreatePermissions); } final Set<DomainCreatePermission> addPermissions = new HashSet<>(requestedDomainCreatePermissions.size()); final Set<DomainCreatePermission> updatePermissions = new HashSet<>(requestedDomainCreatePermissions.size()); for (DomainCreatePermission requestedPermission : requestedDomainCreatePermissions) { boolean existingPermission = false; if (requestedPermission.isSystemPermission()) { for (DomainCreatePermission existingDirectPermission : directAccessorPermissions) { if (existingDirectPermission.isSystemPermission() && requestedPermission.getSystemPermissionId() == existingDirectPermission.getSystemPermissionId()) { // we found a match by sysId - now let's see if we need to update existing or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && !requestedPermission.isGrantableFrom(existingDirectPermission)) { // requested permission has higher granting rights than // the already existing direct permission, so we need to update it updatePermissions.add(requestedPermission); } existingPermission = true; break; } } } else { final DomainPermission requestedPostCreateDomainPermission = requestedPermission.getPostCreateDomainPermission(); for (DomainCreatePermission existingDirectPermission : directAccessorPermissions) { if (!existingDirectPermission.isSystemPermission()) { final DomainPermission existingPostCreateDomainPermission = existingDirectPermission.getPostCreateDomainPermission(); if (requestedPostCreateDomainPermission.equalsIgnoreGrant(existingPostCreateDomainPermission)) { // found a match in name - let's check compatibility first if (requestedPermission.isWithGrant() != requestedPostCreateDomainPermission.isWithGrant() && existingDirectPermission.isWithGrant() != existingPostCreateDomainPermission.isWithGrant() && requestedPermission.isWithGrant() != existingDirectPermission.isWithGrant()) { // the requested permission is incompatible to the existing permission because we can't // perform grant operations (a)/G -> (a/G) or (a/G) -> (a)/G without removing either the // create or post-create granting option throw new IllegalArgumentException("Requested create permissions " + requestedDomainCreatePermissions + " are incompatible with existing create permissions " + directAccessorPermissions); } // now let's see if we need to update existing permission or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && ((requestedPermission.isWithGrant() && requestedPostCreateDomainPermission.isWithGrant()) || (!existingDirectPermission.isWithGrant() && !existingPostCreateDomainPermission.isWithGrant()))) { // the two permissions match in name, but the requested has higher granting rights, // so we need to update updatePermissions.add(requestedPermission); } // because we found a match in name, we can skip comparing requested against other existing permissions existingPermission = true; break; } } } } if (!existingPermission) { // couldn't find requested permission in set of already existing direct permissions, by name, so we need to add it addPermissions.add(requestedPermission); } } // update the domain system permissions (*CREATE), if necessary grantDomainCreatePermissionSysPersister.updateDomainCreateSysPermissions(connection, accessorResource, sessionResource, updatePermissions); // update the domain post create system permissions, if necessary grantDomainCreatePermissionPostCreateSysPersister .updateDomainCreatePostCreateSysPermissions(connection, accessorResource, sessionResource, updatePermissions); // add any new domain system permissions (*CREATE) grantDomainCreatePermissionSysPersister.addDomainCreateSysPermissions(connection, accessorResource, sessionResource, addPermissions); // add any new domain post create system permissions grantDomainCreatePermissionPostCreateSysPersister .addDomainCreatePostCreateSysPermissions(connection, accessorResource, sessionResource, addPermissions); } private void __assertUniqueSystemOrPostCreateDomainPermissionNames(Set<DomainCreatePermission> domainCreatePermissions) { final Set<String> uniqueSystemPermissionNames = new HashSet<>(domainCreatePermissions.size()); final Set<String> uniquePostCreatePermissionNames = new HashSet<>(domainCreatePermissions.size()); for (final DomainCreatePermission domainCreatePermission : domainCreatePermissions) { if (domainCreatePermission.isSystemPermission()) { if (uniqueSystemPermissionNames.contains(domainCreatePermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + domainCreatePermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniqueSystemPermissionNames.add(domainCreatePermission.getPermissionName()); } } else { final DomainPermission postCreateDomainPermission = domainCreatePermission.getPostCreateDomainPermission(); if (uniquePostCreatePermissionNames.contains(postCreateDomainPermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + postCreateDomainPermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniquePostCreatePermissionNames.add(postCreateDomainPermission.getPermissionName()); } } } } @Override public void revokeDomainCreatePermissions(Resource accessorResource, Set<DomainCreatePermission> domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionsSpecified(domainCreatePermissions); __assertPermissionsSetNotEmpty(domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __revokeDirectDomainCreatePermissions(connection, accessorResource, domainCreatePermissions); } finally { __closeConnection(connection); } } @Override public void revokeDomainCreatePermissions(Resource accessorResource, DomainCreatePermission domainCreatePermission, DomainCreatePermission... domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionSpecified(domainCreatePermission); __assertVarargPermissionsSpecified(domainCreatePermissions); final Set<DomainCreatePermission> requestedDomainCreatePermissions = __getSetWithoutNullsOrDuplicates(domainCreatePermission, domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __revokeDirectDomainCreatePermissions(connection, accessorResource, requestedDomainCreatePermissions); } finally { __closeConnection(connection); } } private void __revokeDirectDomainCreatePermissions(SQLConnection connection, Resource accessorResource, Set<DomainCreatePermission> requestedDomainCreatePermissions) { __assertUniqueSystemOrPostCreateDomainPermissionNames(requestedDomainCreatePermissions); __assertResourceExists(connection, accessorResource); // check if grantor (=session resource) is authorized to revoke requested permissions final Set<DomainCreatePermission> grantorPermissions = __getEffectiveDomainCreatePermissions(connection, sessionResource); final Set<DomainCreatePermission> unauthorizedPermissions = __subtractDomainCreatePermissionsIfGrantableFrom(requestedDomainCreatePermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "revoke the following domain create permission(s): " + unauthorizedPermissions); } final Set<DomainCreatePermission> directAccessorPermissions = __getDirectDomainCreatePermissions(connection, accessorResource); if ((directAccessorPermissions.size() > requestedDomainCreatePermissions.size()) && __setContainsDomainCreateSystemPermission(requestedDomainCreatePermissions)) { // our invariant is that a resource's direct create permissions must include the *CREATE system permission; // if after revoking the requested permissions, the remaining set wouldn't include the *CREATE, we'd have a problem throw new IllegalArgumentException( "Attempt to revoke a subset of domain create permissions that includes the *CREATE system permission: " + requestedDomainCreatePermissions); } final Set<DomainCreatePermission> removePermissions = new HashSet<>(requestedDomainCreatePermissions.size()); for (DomainCreatePermission requestedPermission : requestedDomainCreatePermissions) { if (requestedPermission.isSystemPermission()) { for (DomainCreatePermission existingDirectPermission : directAccessorPermissions) { if (existingDirectPermission.isSystemPermission() && requestedPermission.getSystemPermissionId() == existingDirectPermission.getSystemPermissionId()) { // requested permission has same system Id as an already existing direct permission, so remove it removePermissions.add(requestedPermission); break; } } } else { final DomainPermission requestedPostCreateDomainPermission = requestedPermission.getPostCreateDomainPermission(); for (DomainCreatePermission existingDirectPermission : directAccessorPermissions) { if (!existingDirectPermission.isSystemPermission()) { // now let's look at the post-create permissions if (requestedPostCreateDomainPermission.equalsIgnoreGrant(existingDirectPermission.getPostCreateDomainPermission())) { // requested post-create permission has same name as an already existing direct permission, so remove it removePermissions.add(requestedPermission); break; } } } } } // remove the domain system permissions (*CREATE), if necessary grantDomainCreatePermissionSysPersister.removeDomainCreateSysPermissions(connection, accessorResource, removePermissions); // remove the domain post create system permissions, if necessary grantDomainCreatePermissionPostCreateSysPersister.removeDomainCreatePostCreateSysPermissions(connection, accessorResource, removePermissions); } @Override public Set<DomainCreatePermission> getDomainCreatePermissions(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getDirectDomainCreatePermissions(connection, accessorResource); } finally { __closeConnection(connection); } } @Override public Set<DomainCreatePermission> getEffectiveDomainCreatePermissions(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getEffectiveDomainCreatePermissions(connection, accessorResource); } finally { __closeConnection(connection); } } private Set<DomainCreatePermission> __getEffectiveDomainCreatePermissions(SQLConnection connection, Resource accessorResource) { final Set<DomainCreatePermission> domainCreatePermissions = new HashSet<>(); domainCreatePermissions .addAll(grantDomainCreatePermissionSysPersister.getDomainCreateSysPermissionsIncludeInherited(connection, accessorResource)); domainCreatePermissions .addAll(grantDomainCreatePermissionPostCreateSysPersister .getDomainCreatePostCreateSysPermissionsIncludeInherited(connection, accessorResource)); return __collapseDomainCreatePermissions(domainCreatePermissions); } private Set<DomainCreatePermission> __collapseDomainCreatePermissions(Set<DomainCreatePermission> domainCreatePermissions) { final Set<DomainCreatePermission> collapsedPermissions = new HashSet<>(domainCreatePermissions); for (DomainCreatePermission permission : domainCreatePermissions) { for (DomainCreatePermission grantEquivalentPermission : domainCreatePermissions) { if (permission.isGrantableFrom(grantEquivalentPermission) && !permission.equals(grantEquivalentPermission)) { collapsedPermissions.remove(permission); break; } } } return collapsedPermissions; } @Override public void setResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __setDirectResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, resourceCreatePermissions); } finally { __closeConnection(connection); } } private void __setDirectResourceCreatePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> requestedResourceCreatePermissions) { __assertResourceExists(connection, accessorResource); // verify that resource class is defined and get its metadata final ResourceClassInternalInfo resourceClassInfo = __getResourceClassInternalInfo(connection, resourceClassName); final Id<ResourceClassId> resourceClassId = Id.from(resourceClassInfo.getResourceClassId()); // verify that domain is defined final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // ensure that the *CREATE system permissions was specified __assertSetContainsResourceCreateSystemPermission(requestedResourceCreatePermissions); // ensure that the post create permissions are all in the correct resource class __assertUniquePostCreatePermissionsNamesForResourceClass(connection, requestedResourceCreatePermissions, resourceClassInfo); // check if the grantor (=session resource) is authorized to grant the requested permissions if (!__isSuperUserOfDomain(connection, sessionResource, domainName)) { final Set<ResourceCreatePermission> grantorPermissions = __getEffectiveResourceCreatePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); final Set<ResourceCreatePermission> directAccessorPermissions = __getDirectResourceCreatePermissions(connection, accessorResource, resourceClassId, domainId); final Set<ResourceCreatePermission> requestedAddPermissions = __subtract(requestedResourceCreatePermissions, directAccessorPermissions); if (!requestedAddPermissions.isEmpty()) { final Set<ResourceCreatePermission> unauthorizedAddPermissions = __subtractResourceCreatePermissionsIfGrantableFrom(requestedAddPermissions, grantorPermissions); if (unauthorizedAddPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "add the following permission(s): " + unauthorizedAddPermissions); } } final Set<ResourceCreatePermission> requestedRemovePermissions = __subtract(directAccessorPermissions, requestedResourceCreatePermissions); if (!requestedRemovePermissions.isEmpty()) { final Set<ResourceCreatePermission> unauthorizedRemovePermissions = __subtractResourceCreatePermissionsIfGrantableFrom(requestedRemovePermissions, grantorPermissions); if (unauthorizedRemovePermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "remove the following permission(s): " + unauthorizedRemovePermissions); } } } // revoke any existing *CREATE system permissions this accessor has to this resource class grantResourceCreatePermissionSysPersister.removeResourceCreateSysPermissions(connection, accessorResource, resourceClassId, domainId); // revoke any existing post create system permissions this accessor has to this resource class grantResourceCreatePermissionPostCreateSysPersister.removeResourceCreatePostCreateSysPermissions(connection, accessorResource, resourceClassId, domainId); // revoke any existing post create non-system permissions this accessor has to this resource class grantResourceCreatePermissionPostCreatePersister.removeResourceCreatePostCreatePermissions(connection, accessorResource, resourceClassId, domainId); // grant the *CREATE system permissions grantResourceCreatePermissionSysPersister.addResourceCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, requestedResourceCreatePermissions, sessionResource); // grant the post create system permissions grantResourceCreatePermissionPostCreateSysPersister.addResourceCreatePostCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, requestedResourceCreatePermissions, sessionResource); // grant the post create non-system permissions grantResourceCreatePermissionPostCreatePersister.addResourceCreatePostCreatePermissions(connection, accessorResource, resourceClassId, domainId, requestedResourceCreatePermissions, sessionResource); } private void __assertSetContainsResourceCreateSystemPermission(Set<ResourceCreatePermission> resourceCreatePermissions) { if (!resourceCreatePermissions.isEmpty()) { boolean createSysPermissionFound = false; for (final ResourceCreatePermission resourceCreatePermission : resourceCreatePermissions) { if (resourceCreatePermission.isSystemPermission() && ResourceCreatePermissions.CREATE.equals(resourceCreatePermission.getPermissionName())) { createSysPermissionFound = true; break; } } // if at least one permission is specified, then there must be a *CREATE permission in the set if (!createSysPermissionFound) { throw new IllegalArgumentException("Permission: *CREATE must be specified"); } } } private void __assertUniquePostCreatePermissionsNamesForResourceClass(SQLConnection connection, Set<ResourceCreatePermission> resourceCreatePermissions, ResourceClassInternalInfo resourceClassInternalInfo) { final List<String> validPermissionNames = __getApplicableResourcePermissionNames(connection, resourceClassInternalInfo); final Set<String> uniqueSystemPermissionNames = new HashSet<>(resourceCreatePermissions.size()); final Set<String> uniquePostCreatePermissionNames = new HashSet<>(resourceCreatePermissions.size()); for (final ResourceCreatePermission resourceCreatePermission : resourceCreatePermissions) { if (resourceCreatePermission.isSystemPermission()) { if (uniqueSystemPermissionNames.contains(resourceCreatePermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + resourceCreatePermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniqueSystemPermissionNames.add(resourceCreatePermission.getPermissionName()); } } else { final ResourcePermission postCreateResourcePermission = resourceCreatePermission.getPostCreateResourcePermission(); if (!validPermissionNames.contains(postCreateResourcePermission.getPermissionName())) { if (postCreateResourcePermission.isSystemPermission()) { // currently the only invalid system permissions are for unauthenticatable resource classes throw new IllegalArgumentException("Permission: " + postCreateResourcePermission.getPermissionName() + ", not valid for unauthenticatable resource"); } else { throw new IllegalArgumentException("Permission: " + postCreateResourcePermission.getPermissionName() + " is not defined for resource class: " + resourceClassInternalInfo.getResourceClassName()); } } if (uniquePostCreatePermissionNames.contains(postCreateResourcePermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + postCreateResourcePermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniquePostCreatePermissionNames.add(postCreateResourcePermission.getPermissionName()); } } } } private Set<ResourceCreatePermission> __subtractResourceCreatePermissionsIfGrantableFrom(Set<ResourceCreatePermission> candidatePermissionSet, Set<ResourceCreatePermission> grantorPermissionSet) { Set<ResourceCreatePermission> differenceSet = new HashSet<>(candidatePermissionSet); for (ResourceCreatePermission candidatePermission : candidatePermissionSet) { for (ResourceCreatePermission grantorPermission : grantorPermissionSet) { if (candidatePermission.isGrantableFrom(grantorPermission)) { differenceSet.remove(candidatePermission); break; } } } return differenceSet; } private <T> Set<T> __subtract(Set<T> minuendSet, Set<T> subtrahendSet) { Set<T> differenceSet = new HashSet<>(minuendSet); differenceSet.removeAll(subtrahendSet); return differenceSet; } private Set<ResourceCreatePermission> __getDirectResourceCreatePermissions(SQLConnection connection, Resource accessorResource, Id<ResourceClassId> resourceClassId, Id<DomainId> domainId) { Set<ResourceCreatePermission> resourceCreatePermissions = new HashSet<>(); // first get the *CREATE system permission the accessor has directly to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionSysPersister.getResourceCreateSysPermissions(connection, accessorResource, resourceClassId, domainId)); // next get the post create system permissions the accessor has directly to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionPostCreateSysPersister.getResourceCreatePostCreateSysPermissions( connection, accessorResource, resourceClassId, domainId)); // next get the post create non-system permissions the accessor has directly to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionPostCreatePersister.getResourceCreatePostCreatePermissions( connection, accessorResource, resourceClassId, domainId)); return resourceCreatePermissions; } @Override public void grantResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourceCreatePermissions); __assertPermissionsSetNotEmpty(resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __grantDirectResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, resourceCreatePermissions); } finally { __closeConnection(connection); } } @Override public void grantResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourceCreatePermission resourceCreatePermission, ResourceCreatePermission... resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourceCreatePermission); __assertVarargPermissionsSpecified(resourceCreatePermissions); final Set<ResourceCreatePermission> requestedResourceCreatePermissions = __getSetWithoutNullsOrDuplicates(resourceCreatePermission, resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __grantDirectResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, requestedResourceCreatePermissions); } finally { __closeConnection(connection); } } private void __grantDirectResourceCreatePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> requestedResourceCreatePermissions) { __assertResourceExists(connection, accessorResource); // verify that resource class is defined and get its metadata final ResourceClassInternalInfo resourceClassInfo = __getResourceClassInternalInfo(connection, resourceClassName); final Id<ResourceClassId> resourceClassId = Id.from(resourceClassInfo.getResourceClassId()); // verify that domain is defined final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // ensure that the post create permissions are all in the correct resource class __assertUniquePostCreatePermissionsNamesForResourceClass(connection, requestedResourceCreatePermissions, resourceClassInfo); // check if the grantor (=session resource) is authorized to grant the requested permissions if (!__isSuperUserOfDomain(connection, sessionResource, domainName)) { final Set<ResourceCreatePermission> grantorPermissions = __getEffectiveResourceCreatePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); final Set<ResourceCreatePermission> unauthorizedAddPermissions = __subtractResourceCreatePermissionsIfGrantableFrom(requestedResourceCreatePermissions, grantorPermissions); if (unauthorizedAddPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "grant the following permission(s): " + unauthorizedAddPermissions); } } // ensure that the *CREATE system permissions was specified final Set<ResourceCreatePermission> directAccessorPermissions = __getDirectResourceCreatePermissions(connection, accessorResource, resourceClassId, domainId); if (directAccessorPermissions.isEmpty()) { // our invariant is that a resource's direct create permissions must include the *CREATE system permission; // if there are no direct create permissions, then the requested permissions to be granted needs to include *CREATE __assertSetContainsResourceCreateSystemPermission(requestedResourceCreatePermissions); } final Set<ResourceCreatePermission> addPermissions = new HashSet<>(requestedResourceCreatePermissions.size()); final Set<ResourceCreatePermission> updatePermissions = new HashSet<>(requestedResourceCreatePermissions.size()); for (ResourceCreatePermission requestedPermission : requestedResourceCreatePermissions) { boolean existingPermission = false; if (requestedPermission.isSystemPermission()) { for (ResourceCreatePermission existingDirectPermission : directAccessorPermissions) { if (existingDirectPermission.isSystemPermission() && requestedPermission.getSystemPermissionId() == existingDirectPermission.getSystemPermissionId()) { // we found a match by sysId - now let's see if we need to update existing or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && !requestedPermission.isGrantableFrom(existingDirectPermission)) { // requested permission has higher granting rights than // the already existing direct permission, so we need to update it updatePermissions.add(requestedPermission); } existingPermission = true; break; } } } else { final ResourcePermission requestedPostCreateResourcePermission = requestedPermission.getPostCreateResourcePermission(); for (ResourceCreatePermission existingDirectPermission : directAccessorPermissions) { if (!existingDirectPermission.isSystemPermission()) { final ResourcePermission existingPostCreateResourcePermission = existingDirectPermission.getPostCreateResourcePermission(); if (requestedPostCreateResourcePermission.equalsIgnoreGrant(existingPostCreateResourcePermission)) { // found a match in name - let's check compatibility first if (requestedPermission.isWithGrant() != requestedPostCreateResourcePermission.isWithGrant() && existingDirectPermission.isWithGrant() != existingPostCreateResourcePermission.isWithGrant() && requestedPermission.isWithGrant() != existingDirectPermission.isWithGrant()) { // the requested permission is incompatible to the existing permission because we can't // perform grant operations (a)/G -> (a/G) or (a/G) -> (a)/G without removing either the // create or post-create granting option throw new IllegalArgumentException("Requested create permissions " + requestedResourceCreatePermissions + " are incompatible with existing create permissions " + directAccessorPermissions); } // now let's see if we need to update existing permission or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && ((requestedPermission.isWithGrant() && requestedPostCreateResourcePermission.isWithGrant()) || (!existingDirectPermission.isWithGrant() && !existingPostCreateResourcePermission.isWithGrant()))) { // the two permissions match in name, but the requested has higher granting rights, // so we need to update updatePermissions.add(requestedPermission); } // because we found a match in name, we can skip comparing requested against other existing permissions existingPermission = true; break; } } } } if (!existingPermission) { // couldn't find requested permission in set of already existing direct permissions, by name, so we need to add it addPermissions.add(requestedPermission); } } // update *CREATE system permission, if necessary grantResourceCreatePermissionSysPersister.updateResourceCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, updatePermissions, sessionResource); // update any post create system permissions, if necessary grantResourceCreatePermissionPostCreateSysPersister.updateResourceCreatePostCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, updatePermissions, sessionResource); // update any post create non-system permissions, if necessary grantResourceCreatePermissionPostCreatePersister.updateResourceCreatePostCreatePermissions(connection, accessorResource, resourceClassId, domainId, updatePermissions, sessionResource); // grant the *CREATE system permissions, if necessary grantResourceCreatePermissionSysPersister.addResourceCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, addPermissions, sessionResource); // grant any post create system permissions, if necessary grantResourceCreatePermissionPostCreateSysPersister.addResourceCreatePostCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, addPermissions, sessionResource); // grant any post create non-system permissions, if necessary grantResourceCreatePermissionPostCreatePersister.addResourceCreatePostCreatePermissions(connection, accessorResource, resourceClassId, domainId, addPermissions, sessionResource); } @Override public void revokeResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourceCreatePermissions); __assertPermissionsSetNotEmpty(resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __revokeDirectResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, resourceCreatePermissions); } finally { __closeConnection(connection); } } @Override public void revokeResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourceCreatePermission resourceCreatePermission, ResourceCreatePermission... resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourceCreatePermission); __assertVarargPermissionsSpecified(resourceCreatePermissions); final Set<ResourceCreatePermission> requestedResourceCreatePermissions = __getSetWithoutNullsOrDuplicates(resourceCreatePermission, resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __revokeDirectResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, requestedResourceCreatePermissions); } finally { __closeConnection(connection); } } private void __revokeDirectResourceCreatePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> requestedResourceCreatePermissions) { __assertResourceExists(connection, accessorResource); // verify that resource class is defined and get its metadata final ResourceClassInternalInfo resourceClassInfo = __getResourceClassInternalInfo(connection, resourceClassName); final Id<ResourceClassId> resourceClassId = Id.from(resourceClassInfo.getResourceClassId()); // verify that domain is defined final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } __assertUniquePostCreatePermissionsNamesForResourceClass(connection, requestedResourceCreatePermissions, resourceClassInfo); // check if the grantor (=session resource) is authorized to grant the requested permissions if (!__isSuperUserOfDomain(connection, sessionResource, domainName)) { final Set<ResourceCreatePermission> grantorPermissions = __getEffectiveResourceCreatePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); final Set<ResourceCreatePermission> unauthorizedPermissions = __subtractResourceCreatePermissionsIfGrantableFrom(requestedResourceCreatePermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "revoke the following permission(s): " + unauthorizedPermissions); } } // ensure that the *CREATE system permissions will remain if not all are cleared final Set<ResourceCreatePermission> directAccessorPermissions = __getDirectResourceCreatePermissions(connection, accessorResource, resourceClassId, domainId); if ((directAccessorPermissions.size() > requestedResourceCreatePermissions.size()) && __setContainsResourceCreateSystemPermission(requestedResourceCreatePermissions)) { // our invariant is that a resource's direct create permissions must include the *CREATE system permission; // if after revoking the requested permissions, the remaining set wouldn't include the *CREATE, we'd have a problem throw new IllegalArgumentException( "Attempt to revoke a subset of resource create permissions that includes the *CREATE system permission: " + requestedResourceCreatePermissions); } final Set<ResourceCreatePermission> removePermissions = new HashSet<>(requestedResourceCreatePermissions.size()); for (ResourceCreatePermission requestedPermission : requestedResourceCreatePermissions) { if (requestedPermission.isSystemPermission()) { for (ResourceCreatePermission existingDirectPermission : directAccessorPermissions) { if (existingDirectPermission.isSystemPermission() && requestedPermission.getSystemPermissionId() == existingDirectPermission.getSystemPermissionId()) { // requested permission has same system Id as an already existing direct permission, so remove it removePermissions.add(requestedPermission); break; } } } else { final ResourcePermission requestedPostCreateResourcePermission = requestedPermission.getPostCreateResourcePermission(); for (ResourceCreatePermission existingDirectPermission : directAccessorPermissions) { if (!existingDirectPermission.isSystemPermission()) { // now let's look at the post-create permissions if (requestedPostCreateResourcePermission .equalsIgnoreGrant(existingDirectPermission.getPostCreateResourcePermission())) { // requested post-create permission has same name as an already existing direct permission, so remove it removePermissions.add(requestedPermission); break; } } } } } // remove *CREATE system permission, if necessary grantResourceCreatePermissionSysPersister.removeResourceCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, removePermissions); // remove any post create system permissions, if necessary grantResourceCreatePermissionPostCreateSysPersister.removeResourceCreatePostCreateSysPermissions(connection, accessorResource, resourceClassId, domainId, removePermissions); // remove any post create non-system permissions, if necessary grantResourceCreatePermissionPostCreatePersister.removeResourceCreatePostCreatePermissions(connection, accessorResource, resourceClassId, domainId, removePermissions); } private boolean __setContainsResourceCreateSystemPermission(Set<ResourceCreatePermission> resourceCreatePermissions) { for (final ResourceCreatePermission resourceCreatePermission : resourceCreatePermissions) { if (resourceCreatePermission.isSystemPermission() && ResourceCreatePermissions.CREATE.equals(resourceCreatePermission.getPermissionName())) { return true; } } return false; } @Override public Set<ResourceCreatePermission> getResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __getDirectResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName); } finally { __closeConnection(connection); } } private Set<ResourceCreatePermission> __getDirectResourceCreatePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName) { // verify that resource class is defined Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // verify that domain is defined final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } return __getDirectResourceCreatePermissions(connection, accessorResource, resourceClassId, domainId); } @Override public Map<String, Map<String, Set<ResourceCreatePermission>>> getResourceCreatePermissionsMap(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getDirectResourceCreatePermissionsMap(connection, accessorResource); } finally { __closeConnection(connection); } } private Map<String, Map<String, Set<ResourceCreatePermission>>> __getDirectResourceCreatePermissionsMap(SQLConnection connection, Resource accessorResource) { // collect all the create permissions that the accessor has Map<String, Map<String, Set<ResourceCreatePermission>>> allResourceCreatePermissionsMap = new HashMap<>(); // read the *CREATE system permissions and add to allResourceCreatePermissionsMap allResourceCreatePermissionsMap .putAll(grantResourceCreatePermissionSysPersister.getResourceCreateSysPermissions(connection, accessorResource)); // read the post create system permissions and add to allResourceCreatePermissionsMap __mergeSourceCreatePermissionsMapIntoTargetCreatePermissionsMap( grantResourceCreatePermissionPostCreateSysPersister .getResourceCreatePostCreateSysPermissions(connection, accessorResource), allResourceCreatePermissionsMap); // read the post create non-system permissions and add to allResourceCreatePermissionsMap __mergeSourceCreatePermissionsMapIntoTargetCreatePermissionsMap( grantResourceCreatePermissionPostCreatePersister .getResourceCreatePostCreatePermissions(connection, accessorResource), allResourceCreatePermissionsMap); return __collapseResourceCreatePermissions(allResourceCreatePermissionsMap); } @Override public Set<ResourceCreatePermission> getEffectiveResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __getEffectiveResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName); } finally { __closeConnection(connection); } } private Set<ResourceCreatePermission> __getEffectiveResourceCreatePermissionsIgnoringSuperUserPrivileges(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName) { // verify that resource class is defined Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // verify that domain is defined final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // collect the create permissions that this resource has to this resource class Set<ResourceCreatePermission> resourceCreatePermissions = new HashSet<>(); // first read the *CREATE system permission the accessor has to the specified resource class resourceCreatePermissions.addAll( grantResourceCreatePermissionSysPersister.getResourceCreateSysPermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); // next read the post create system permissions the accessor has to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionPostCreateSysPersister .getResourceCreatePostCreateSysPermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); // next read the post create non-system permissions the accessor has to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionPostCreatePersister .getResourceCreatePostCreatePermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); return __collapseResourceCreatePermissions(resourceCreatePermissions); } private Set<ResourceCreatePermission> __getEffectiveResourceCreatePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName) { // verify that resource class is defined final ResourceClassInternalInfo resourceClassInternalInfo = __getResourceClassInternalInfo(connection, resourceClassName); // verify that domain is defined final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } if (__isSuperUserOfDomain(connection, accessorResource, domainName)) { return __getApplicableResourceCreatePermissions(connection, resourceClassInternalInfo); } Id<ResourceClassId> resourceClassId = Id.from(resourceClassInternalInfo.getResourceClassId()); // collect the create permissions that this resource has to this resource class Set<ResourceCreatePermission> resourceCreatePermissions = new HashSet<>(); // first read the *CREATE system permission the accessor has to the specified resource class resourceCreatePermissions.addAll( grantResourceCreatePermissionSysPersister.getResourceCreateSysPermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); // next read the post create system permissions the accessor has to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionPostCreateSysPersister .getResourceCreatePostCreateSysPermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); // next read the post create non-system permissions the accessor has to the specified resource class resourceCreatePermissions .addAll(grantResourceCreatePermissionPostCreatePersister .getResourceCreatePostCreatePermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); return __collapseResourceCreatePermissions(resourceCreatePermissions); } private Set<ResourceCreatePermission> __collapseResourceCreatePermissions(Set<ResourceCreatePermission> resourceCreatePermissions) { final Set<ResourceCreatePermission> collapsedPermissions = new HashSet<>(resourceCreatePermissions); for (ResourceCreatePermission permission : resourceCreatePermissions) { for (ResourceCreatePermission grantEquivalentPermission : resourceCreatePermissions) { if (permission.isGrantableFrom(grantEquivalentPermission) && !permission.equals(grantEquivalentPermission)) { collapsedPermissions.remove(permission); break; } } } return collapsedPermissions; } @Override public Map<String, Map<String, Set<ResourceCreatePermission>>> getEffectiveResourceCreatePermissionsMap(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getEffectiveResourceCreatePermissionsMap(connection, accessorResource); } finally { __closeConnection(connection); } } private Map<String, Map<String, Set<ResourceCreatePermission>>> __getEffectiveResourceCreatePermissionsMap( SQLConnection connection, Resource accessorResource) { // collect all the create permissions that the accessor has Map<String, Map<String, Set<ResourceCreatePermission>>> allResourceCreatePermissionsMap = new HashMap<>(); // read the *CREATE system permissions and add to allResourceCreatePermissionsMap allResourceCreatePermissionsMap .putAll(grantResourceCreatePermissionSysPersister .getResourceCreateSysPermissionsIncludeInherited(connection, accessorResource)); // read the post create system permissions and add to allResourceCreatePermissionsMap __mergeSourceCreatePermissionsMapIntoTargetCreatePermissionsMap( grantResourceCreatePermissionPostCreateSysPersister .getResourceCreatePostCreateSysPermissionsIncludeInherited(connection, accessorResource), allResourceCreatePermissionsMap); // read the post create non-system permissions and add to allResourceCreatePermissionsMap __mergeSourceCreatePermissionsMapIntoTargetCreatePermissionsMap( grantResourceCreatePermissionPostCreatePersister .getResourceCreatePostCreatePermissionsIncludeInherited(connection, accessorResource), allResourceCreatePermissionsMap); // finally, collect all applicable create permissions when accessor has super-user privileges to any domain // and add them into the globalALLPermissionsMap final Map<String, Map<String, Set<ResourceCreatePermission>>> allSuperResourceCreatePermissionsMap = new HashMap<>(); Map<String, Set<ResourceCreatePermission>> superResourceCreatePermissionsMap = null; final Map<String, Set<DomainPermission>> effectiveDomainPermissionsMap = __getEffectiveDomainPermissionsMap(connection, accessorResource); for (String domainName : effectiveDomainPermissionsMap.keySet()) { final Set<DomainPermission> effectiveDomainPermissions = effectiveDomainPermissionsMap.get(domainName); if (effectiveDomainPermissions.contains(DomainPermission_SUPER_USER) || effectiveDomainPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { if (superResourceCreatePermissionsMap == null) { // lazy-construct super-user-privileged resource-permissions map by resource classes final List<String> resourceClassNames = resourceClassPersister.getResourceClassNames(connection); superResourceCreatePermissionsMap = new HashMap<>(resourceClassNames.size()); for (String resourceClassName : resourceClassNames) { final Set<ResourceCreatePermission> applicableResourceCreatePermissions = __getApplicableResourceCreatePermissions(connection, __getResourceClassInternalInfo(connection, resourceClassName)); superResourceCreatePermissionsMap.put(resourceClassName, applicableResourceCreatePermissions); } } allSuperResourceCreatePermissionsMap.put(domainName, superResourceCreatePermissionsMap); } } __mergeSourceCreatePermissionsMapIntoTargetCreatePermissionsMap(allSuperResourceCreatePermissionsMap, allResourceCreatePermissionsMap); return __collapseResourceCreatePermissions(allResourceCreatePermissionsMap); } private void __mergeSourceCreatePermissionsMapIntoTargetCreatePermissionsMap(Map<String, Map<String, Set<ResourceCreatePermission>>> sourceCreatePermissionsMap, Map<String, Map<String, Set<ResourceCreatePermission>>> targetCreatePermissionsMap) { for (String domainName : sourceCreatePermissionsMap.keySet()) { Map<String, Set<ResourceCreatePermission>> targetCreatePermsForDomainMap; // does the target map have domain? if ((targetCreatePermsForDomainMap = targetCreatePermissionsMap.get(domainName)) == null) { // no, add the domain targetCreatePermissionsMap.put(domainName, targetCreatePermsForDomainMap = new HashMap<>()); } for (String resourceClassName : sourceCreatePermissionsMap.get(domainName).keySet()) { Set<ResourceCreatePermission> targetCreatePermsForClassSet; // does the target map have the resource class? if ((targetCreatePermsForClassSet = targetCreatePermsForDomainMap.get(resourceClassName)) == null) { // no, add the resource class targetCreatePermsForDomainMap.put(resourceClassName, targetCreatePermsForClassSet = new HashSet<>()); } // get the source permissions for the domain + resource class final Set<ResourceCreatePermission> sourceCreatePermsForClassSet = sourceCreatePermissionsMap.get(domainName).get(resourceClassName); // add the source permissions above to the target for the respective domain + resource class targetCreatePermsForClassSet.addAll(sourceCreatePermsForClassSet); } } } private Map<String, Map<String, Set<ResourceCreatePermission>>> __collapseResourceCreatePermissions(Map<String, Map<String, Set<ResourceCreatePermission>>> resourceCreatePermissionsMap) { for (String domainName : resourceCreatePermissionsMap.keySet()) { final Map<String, Set<ResourceCreatePermission>> createPermissionsByDomainMap = resourceCreatePermissionsMap.get(domainName); for (String resourceClassName : createPermissionsByDomainMap.keySet()) { final Set<ResourceCreatePermission> createPermissionsByResourceClassMap = createPermissionsByDomainMap.get(resourceClassName); createPermissionsByDomainMap.put(resourceClassName, __collapseResourceCreatePermissions(createPermissionsByResourceClassMap)); } } return resourceCreatePermissionsMap; } @Override public void setResourcePermissions(Resource accessorResource, Resource accessedResource, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionsSpecified(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __setDirectResourcePermissions(connection, accessorResource, accessedResource, resourcePermissions, sessionResource, false); } finally { __closeConnection(connection); } } private void __setDirectResourcePermissions(SQLConnection connection, Resource accessorResource, Resource accessedResource, Set<ResourcePermission> requestedResourcePermissions, Resource grantorResource, boolean newResourceMode) { final ResourceClassInternalInfo accessedResourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, accessedResource); // next ensure that the requested permissions are all in the correct resource class __assertUniqueResourcePermissionsNamesForResourceClass(connection, requestedResourcePermissions, accessedResourceClassInternalInfo); // if this method is being called to set the post create permissions on a newly created resource // we do not perform the security checks below, since it would be incorrect if (!newResourceMode) { __assertResourceExists(connection, accessorResource); if (!__isSuperUserOfResource(connection, grantorResource, accessedResource)) { // next check if the grantor (i.e. session resource) has permissions to grant the requested permissions final Set<ResourcePermission> grantorResourcePermissions = __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(connection, grantorResource, accessedResource); final Set<ResourcePermission> directAccessorResourcePermissions = __getDirectResourcePermissions(connection, accessorResource, accessedResource); final Set<ResourcePermission> requestedAddPermissions = __subtract(requestedResourcePermissions, directAccessorResourcePermissions); if (requestedAddPermissions.size() > 0) { final Set<ResourcePermission> unauthorizedAddPermissions = __subtractResourcePermissionsIfGrantableFrom(requestedAddPermissions, grantorResourcePermissions); if (unauthorizedAddPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(grantorResource, "add the following permission(s): " + unauthorizedAddPermissions); } } final Set<ResourcePermission> requestedRemovePermissions = __subtract(directAccessorResourcePermissions, requestedResourcePermissions); if (requestedRemovePermissions.size() > 0) { final Set<ResourcePermission> unauthorizedRemovePermissions = __subtractResourcePermissionsIfGrantableFrom(requestedRemovePermissions, grantorResourcePermissions); if (unauthorizedRemovePermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(grantorResource, "remove the following permission(s): " + unauthorizedRemovePermissions); } } } // if inherit permissions are about to be granted, first check for cycles if (requestedResourcePermissions.contains(ResourcePermission_INHERIT) || requestedResourcePermissions.contains(ResourcePermission_INHERIT_GRANT)) { Set<ResourcePermission> reversePathResourcePermissions = __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(connection, accessedResource, accessorResource); if (reversePathResourcePermissions.contains(ResourcePermission_INHERIT) || reversePathResourcePermissions.contains(ResourcePermission_INHERIT_GRANT) || accessorResource.equals(accessedResource)) { throw new OaccException("Granting the requested permission(s): " + requestedResourcePermissions + " will cause a cycle between: " + accessorResource + " and: " + accessedResource); } } // revoke any existing direct system permissions between the accessor and the accessed resource grantResourcePermissionSysPersister.removeResourceSysPermissions(connection, accessorResource, accessedResource); // revoke any existing direct non-system permissions between the accessor and the accessed resource grantResourcePermissionPersister.removeResourcePermissions(connection, accessorResource, accessedResource); } // add the new direct system permissions grantResourcePermissionSysPersister.addResourceSysPermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from( accessedResourceClassInternalInfo .getResourceClassId()), requestedResourcePermissions, grantorResource); // add the new direct non-system permissions grantResourcePermissionPersister.addResourcePermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from(accessedResourceClassInternalInfo .getResourceClassId()), requestedResourcePermissions, grantorResource); } private void __assertUniqueResourcePermissionsNamesForResourceClass(SQLConnection connection, Set<ResourcePermission> resourcePermissions, ResourceClassInternalInfo resourceClassInternalInfo) { final List<String> validPermissionNames = __getApplicableResourcePermissionNames(connection, resourceClassInternalInfo); final Set<String> uniquePermissionNames = new HashSet<>(resourcePermissions.size()); for (final ResourcePermission resourcePermission : resourcePermissions) { if (!validPermissionNames.contains(resourcePermission.getPermissionName())) { if (resourcePermission.isSystemPermission()) { // currently the only invalid system permissions are for unauthenticatable resource classes throw new IllegalArgumentException("Permission: " + resourcePermission.getPermissionName() + ", not valid for unauthenticatable resource"); } else { throw new IllegalArgumentException("Permission: " + resourcePermission.getPermissionName() + " is not defined for resource class: " + resourceClassInternalInfo.getResourceClassName()); } } if (uniquePermissionNames.contains(resourcePermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + resourcePermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniquePermissionNames.add(resourcePermission.getPermissionName()); } } } private Set<ResourcePermission> __subtractResourcePermissionsIfGrantableFrom(Set<ResourcePermission> candidatePermissionSet, Set<ResourcePermission> grantorPermissionSet) { Set<ResourcePermission> differenceSet = new HashSet<>(candidatePermissionSet); for (ResourcePermission candidatePermission : candidatePermissionSet) { for (ResourcePermission grantorPermission : grantorPermissionSet) { if (candidatePermission.isGrantableFrom(grantorPermission)) { differenceSet.remove(candidatePermission); break; } } } return differenceSet; } @Override public void grantResourcePermissions(Resource accessorResource, Resource accessedResource, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __grantDirectResourcePermissions(connection, accessorResource, accessedResource, resourcePermissions); } finally { __closeConnection(connection); } } @Override public void grantResourcePermissions(Resource accessorResource, Resource accessedResource, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __grantDirectResourcePermissions(connection, accessorResource, accessedResource, requestedResourcePermissions); } finally { __closeConnection(connection); } } private void __grantDirectResourcePermissions(SQLConnection connection, Resource accessorResource, Resource accessedResource, Set<ResourcePermission> requestedResourcePermissions) { __assertResourceExists(connection, accessorResource); final ResourceClassInternalInfo accessedResourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, accessedResource); // next ensure that the requested permissions are all in the correct resource class __assertUniqueResourcePermissionsNamesForResourceClass(connection, requestedResourcePermissions, accessedResourceClassInternalInfo); // check for authorization if (!__isSuperUserOfResource(connection, sessionResource, accessedResource)) { final Set<ResourcePermission> grantorResourcePermissions = __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, accessedResource); final Set<ResourcePermission> unauthorizedPermissions = __subtractResourcePermissionsIfGrantableFrom(requestedResourcePermissions, grantorResourcePermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "grant the following permission(s): " + unauthorizedPermissions); } } final Set<ResourcePermission> directAccessorResourcePermissions = __getDirectResourcePermissions(connection, accessorResource, accessedResource); final Set<ResourcePermission> addPermissions = new HashSet<>(requestedResourcePermissions.size()); final Set<ResourcePermission> updatePermissions = new HashSet<>(requestedResourcePermissions.size()); for (ResourcePermission requestedPermission : requestedResourcePermissions) { boolean existingPermission = false; for (ResourcePermission existingDirectPermission : directAccessorResourcePermissions) { if (requestedPermission.equalsIgnoreGrant(existingDirectPermission)) { // found a match by name - now let's see if we need to update existing or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && !requestedPermission.isGrantableFrom(existingDirectPermission)) { // requested permission has higher granting rights than the already existing direct permission, // so we need to update it updatePermissions.add(requestedPermission); } existingPermission = true; break; } } if (!existingPermission) { // couldn't find requested permission in set of already existing direct permissions, by name, so we need to add it addPermissions.add(requestedPermission); } } // if inherit permissions are about to be granted, first check for cycles if (addPermissions.contains(ResourcePermission_INHERIT) || addPermissions.contains(ResourcePermission_INHERIT_GRANT)) { Set<ResourcePermission> reversePathResourcePermissions = __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(connection, accessedResource, accessorResource); if (reversePathResourcePermissions.contains(ResourcePermission_INHERIT) || reversePathResourcePermissions.contains(ResourcePermission_INHERIT_GRANT) || accessorResource.equals(accessedResource)) { throw new OaccException("Granting the requested permission(s): " + requestedResourcePermissions + " will cause a cycle between: " + accessorResource + " and: " + accessedResource); } } // update any necessary direct system permissions between the accessor and the accessed resource grantResourcePermissionSysPersister.updateResourceSysPermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from( accessedResourceClassInternalInfo.getResourceClassId()), updatePermissions, sessionResource); // update any necessary direct non-system permissions between the accessor and the accessed resource grantResourcePermissionPersister.updateResourcePermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from( accessedResourceClassInternalInfo.getResourceClassId()), updatePermissions, sessionResource); // add the new direct system permissions grantResourcePermissionSysPersister.addResourceSysPermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from( accessedResourceClassInternalInfo.getResourceClassId()), addPermissions, sessionResource); // add the new direct non-system permissions grantResourcePermissionPersister.addResourcePermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from(accessedResourceClassInternalInfo.getResourceClassId()), addPermissions, sessionResource); } @Override public void revokeResourcePermissions(Resource accessorResource, Resource accessedResource, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __revokeDirectResourcePermissions(connection, accessorResource, accessedResource, resourcePermissions); } finally { __closeConnection(connection); } } @Override public void revokeResourcePermissions(Resource accessorResource, Resource accessedResource, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> obsoleteResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __revokeDirectResourcePermissions(connection, accessorResource, accessedResource, obsoleteResourcePermissions); } finally { __closeConnection(connection); } } private void __revokeDirectResourcePermissions(SQLConnection connection, Resource accessorResource, Resource accessedResource, Set<ResourcePermission> obsoleteResourcePermissions) { __assertResourceExists(connection, accessorResource); final ResourceClassInternalInfo accessedResourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, accessedResource); // next ensure that the requested permissions are unique in name __assertUniqueResourcePermissionsNamesForResourceClass(connection, obsoleteResourcePermissions, accessedResourceClassInternalInfo); // check for authorization if (!__isSuperUserOfResource(connection, sessionResource, accessedResource)) { final Set<ResourcePermission> grantorResourcePermissions = __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, accessedResource); final Set<ResourcePermission> unauthorizedPermissions = __subtractResourcePermissionsIfGrantableFrom(obsoleteResourcePermissions, grantorResourcePermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "revoke the following permission(s): " + unauthorizedPermissions); } } final Set<ResourcePermission> directAccessorResourcePermissions = __getDirectResourcePermissions(connection, accessorResource, accessedResource); final Set<ResourcePermission> removePermissions = new HashSet<>(obsoleteResourcePermissions.size()); for (ResourcePermission requestedPermission : obsoleteResourcePermissions) { for (ResourcePermission existingDirectPermission : directAccessorResourcePermissions) { if (requestedPermission.equalsIgnoreGrant(existingDirectPermission)) { // requested permission has same name and regardless of granting rights we need to remove it removePermissions.add(requestedPermission); break; } } } // update any necessary direct system permissions between the accessor and the accessed resource grantResourcePermissionSysPersister.removeResourceSysPermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from( accessedResourceClassInternalInfo .getResourceClassId()), removePermissions); // update any necessary direct non-system permissions between the accessor and the accessed resource grantResourcePermissionPersister.removeResourcePermissions(connection, accessorResource, accessedResource, Id.<ResourceClassId>from( accessedResourceClassInternalInfo .getResourceClassId()), removePermissions); } @Override public Set<ResourcePermission> getResourcePermissions(Resource accessorResource, Resource accessedResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __assertQueryAuthorization(connection, accessorResource); return __getDirectResourcePermissions(connection, accessorResource, accessedResource); } finally { __closeConnection(connection); } } private Set<ResourcePermission> __getDirectResourcePermissions(SQLConnection connection, Resource accessorResource, Resource accessedResource) { Set<ResourcePermission> resourcePermissions = new HashSet<>(); // collect the system permissions that the accessor resource has to the accessed resource resourcePermissions.addAll(grantResourcePermissionSysPersister.getResourceSysPermissions(connection, accessorResource, accessedResource)); // collect the non-system permissions that the accessor has to the accessed resource resourcePermissions.addAll(grantResourcePermissionPersister.getResourcePermissions(connection, accessorResource, accessedResource)); return resourcePermissions; } @Override public Set<ResourcePermission> getEffectiveResourcePermissions(Resource accessorResource, Resource accessedResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); __assertQueryAuthorization(connection, accessorResource); return __getEffectiveResourcePermissions(connection, accessorResource, accessedResource); } finally { __closeConnection(connection); } } private Set<ResourcePermission> __getEffectiveResourcePermissions(SQLConnection connection, Resource accessorResource, Resource accessedResource) { Set<ResourcePermission> resourcePermissions = new HashSet<>(); final Id<DomainId> accessedDomainId = resourcePersister.getDomainIdByResource(connection, accessedResource); final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, accessedResource); if (__isSuperUserOfDomain(connection, accessorResource, accessedDomainId)) { return __getApplicableResourcePermissions(connection, resourceClassInternalInfo); } // collect the system permissions that the accessor resource has to the accessed resource resourcePermissions.addAll(grantResourcePermissionSysPersister .getResourceSysPermissionsIncludeInherited(connection, accessorResource, accessedResource)); // collect the non-system permissions that the accessor has to the accessed resource resourcePermissions.addAll(grantResourcePermissionPersister.getResourcePermissionsIncludeInherited(connection, accessorResource, accessedResource)); final Id<ResourceClassId> accessedResourceClassId = Id.from(resourceClassInternalInfo.getResourceClassId()); // collect the global system permissions that the accessor has to the accessed resource's domain resourcePermissions .addAll(grantGlobalResourcePermissionSysPersister.getGlobalSysPermissionsIncludeInherited(connection, accessorResource, accessedResourceClassId, accessedDomainId)); // first collect the global non-system permissions that the accessor this resource has to the accessed resource's domain resourcePermissions .addAll(grantGlobalResourcePermissionPersister.getGlobalResourcePermissionsIncludeInherited(connection, accessorResource, accessedResourceClassId, accessedDomainId)); return __collapseResourcePermissions(resourcePermissions); } private Set<ResourcePermission> __getEffectiveResourcePermissionsIgnoringSuperUserPrivileges(SQLConnection connection, Resource accessorResource, Resource accessedResource) { Set<ResourcePermission> resourcePermissions = new HashSet<>(); // collect the system permissions that the accessor resource has to the accessed resource resourcePermissions.addAll(grantResourcePermissionSysPersister .getResourceSysPermissionsIncludeInherited(connection, accessorResource, accessedResource)); // collect the non-system permissions that the accessor has to the accessed resource resourcePermissions.addAll(grantResourcePermissionPersister.getResourcePermissionsIncludeInherited(connection, accessorResource, accessedResource)); final Id<DomainId> accessedDomainId = resourcePersister.getDomainIdByResource(connection, accessedResource); final Id<ResourceClassId> accessedResourceClassId = Id.from(resourceClassPersister .getResourceClassInfoByResourceId(connection, accessedResource) .getResourceClassId()); // collect the global system permissions that the accessor has to the accessed resource's domain resourcePermissions .addAll(grantGlobalResourcePermissionSysPersister.getGlobalSysPermissionsIncludeInherited(connection, accessorResource, accessedResourceClassId, accessedDomainId)); // first collect the global non-system permissions that the accessor this resource has to the accessed resource's domain resourcePermissions .addAll(grantGlobalResourcePermissionPersister.getGlobalResourcePermissionsIncludeInherited(connection, accessorResource, accessedResourceClassId, accessedDomainId)); return __collapseResourcePermissions(resourcePermissions); } @Override public void setGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); __setDirectGlobalPermissions(connection, accessorResource, resourceClassName, domainName, resourcePermissions); } finally { __closeConnection(connection); } } private void __setDirectGlobalPermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> requestedResourcePermissions) { __assertResourceExists(connection, accessorResource); // verify that resource class is defined final Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfo(connection, resourceClassName); // verify the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // next ensure that the requested permissions are all in the correct resource class __assertUniqueGlobalResourcePermissionNamesForResourceClass(connection, requestedResourcePermissions, resourceClassInternalInfo); if (!__isSuperUserOfDomain(connection, sessionResource, domainName)) { // check if the grantor (=session resource) is authorized to grant the requested permissions final Set<ResourcePermission> grantorPermissions = __getEffectiveGlobalResourcePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); final Set<ResourcePermission> directAccessorPermissions = __getDirectGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId); final Set<ResourcePermission> requestedAddPermissions = __subtract(requestedResourcePermissions, directAccessorPermissions); if (!requestedAddPermissions.isEmpty()) { final Set<ResourcePermission> unauthorizedAddPermissions = __subtractResourcePermissionsIfGrantableFrom(requestedAddPermissions, grantorPermissions); if (unauthorizedAddPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "add the following global permission(s): " + unauthorizedAddPermissions); } } final Set<ResourcePermission> requestedRemovePermissions = __subtract(directAccessorPermissions, requestedResourcePermissions); if (!requestedRemovePermissions.isEmpty()) { final Set<ResourcePermission> unauthorizedRemovePermissions = __subtractResourcePermissionsIfGrantableFrom(requestedRemovePermissions, grantorPermissions); if (unauthorizedRemovePermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "remove the following global permission(s): " + unauthorizedRemovePermissions); } } } // revoke any existing system permissions this accessor has to this domain + resource class grantGlobalResourcePermissionSysPersister.removeGlobalSysPermissions(connection, accessorResource, resourceClassId, domainId); // revoke any existing non-system permissions that this grantor gave this accessor to this domain to the resource class grantGlobalResourcePermissionPersister.removeGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId); // add the new system permissions grantGlobalResourcePermissionSysPersister.addGlobalSysPermissions(connection, accessorResource, resourceClassId, domainId, requestedResourcePermissions, sessionResource); // add the new non-system permissions grantGlobalResourcePermissionPersister.addGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId, requestedResourcePermissions, sessionResource); } private Set<ResourcePermission> __getDirectGlobalResourcePermissions(SQLConnection connection, Resource accessorResource, Id<ResourceClassId> resourceClassId, Id<DomainId> domainId) { Set<ResourcePermission> resourcePermissions = new HashSet<>(); // collect the global system permissions that the accessor resource has to the accessed resource class & domain directly resourcePermissions.addAll(grantGlobalResourcePermissionSysPersister.getGlobalSysPermissions(connection, accessorResource, resourceClassId, domainId)); // collect the global non-system permissions that the accessor has to the accessed resource class & domain directly resourcePermissions.addAll(grantGlobalResourcePermissionPersister.getGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId)); return resourcePermissions; } private void __assertUniqueGlobalResourcePermissionNamesForResourceClass(SQLConnection connection, Set<ResourcePermission> requestedResourcePermissions, ResourceClassInternalInfo resourceClassInternalInfo) { final List<String> validPermissionNames = __getApplicableResourcePermissionNames(connection, resourceClassInternalInfo); final HashSet<String> uniquePermissionNames = new HashSet<>(requestedResourcePermissions.size()); for (ResourcePermission resourcePermission : requestedResourcePermissions) { if (resourcePermission.isSystemPermission() && ResourcePermission_INHERIT.equals(resourcePermission)) { // we prohibit granting the system INHERIT permission, since cycle checking may be prohibitively compute intensive throw new IllegalArgumentException("Permission: " + String.valueOf(resourcePermission) + ", not valid in this context"); } if (!validPermissionNames.contains(resourcePermission.getPermissionName())) { if (resourcePermission.isSystemPermission()) { // currently the only invalid system permissions are for unauthenticatable resource classes throw new IllegalArgumentException("Permission " + resourcePermission.getPermissionName() + " not valid for unauthenticatable resource of class " + resourceClassInternalInfo.getResourceClassName()); } else { throw new IllegalArgumentException("Permission: " + resourcePermission.getPermissionName() + " is not defined for resource class: " + resourceClassInternalInfo.getResourceClassName()); } } if (uniquePermissionNames.contains(resourcePermission.getPermissionName())) { throw new IllegalArgumentException("Duplicate permission: " + resourcePermission.getPermissionName() + " that only differs in 'withGrant' option"); } else { uniquePermissionNames.add(resourcePermission.getPermissionName()); } } } @Override public void grantGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); __grantDirectGlobalPermissions(connection, accessorResource, resourceClassName, domainName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public void grantGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); __grantDirectGlobalPermissions(connection, accessorResource, resourceClassName, domainName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private void __grantDirectGlobalPermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> requestedResourcePermissions) { __assertResourceExists(connection, accessorResource); // verify that resource class is defined final Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfo(connection, resourceClassName); // verify the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // next ensure that the requested permissions are all in the correct resource class __assertUniqueGlobalResourcePermissionNamesForResourceClass(connection, requestedResourcePermissions, resourceClassInternalInfo); // check for authorization if (!__isSuperUserOfDomain(connection, sessionResource, domainName)) { final Set<ResourcePermission> grantorPermissions = __getEffectiveGlobalResourcePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); final Set<ResourcePermission> unauthorizedPermissions = __subtractResourcePermissionsIfGrantableFrom(requestedResourcePermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "grant the following global permission(s): " + unauthorizedPermissions); } } final Set<ResourcePermission> directAccessorPermissions = __getDirectGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId); final Set<ResourcePermission> addPermissions = new HashSet<>(requestedResourcePermissions.size()); final Set<ResourcePermission> updatePermissions = new HashSet<>(requestedResourcePermissions.size()); for (ResourcePermission requestedPermission : requestedResourcePermissions) { boolean existingPermission = false; for (ResourcePermission existingDirectPermission : directAccessorPermissions) { if (requestedPermission.equalsIgnoreGrant(existingDirectPermission)) { // found a match by name - now let's check if we need to update existing or leave it unchanged if (!requestedPermission.equals(existingDirectPermission) && !requestedPermission.isGrantableFrom(existingDirectPermission)) { // requested permission has higher granting rights than the already existing direct permission, // so we need to update it updatePermissions.add(requestedPermission); } existingPermission = true; break; } } if (!existingPermission) { // couldn't find requested permission in set of already existing direct permissions, by name, so we need to add it addPermissions.add(requestedPermission); } } // update any necessary direct system permissions between the accessor and the accessed resource grantGlobalResourcePermissionSysPersister.updateGlobalSysPermissions(connection, accessorResource, resourceClassId, domainId, updatePermissions, sessionResource); // update any necessary direct non-system permissions between the accessor and the accessed resource grantGlobalResourcePermissionPersister.updateGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId, updatePermissions, sessionResource); // add the new system permissions grantGlobalResourcePermissionSysPersister.addGlobalSysPermissions(connection, accessorResource, resourceClassId, domainId, addPermissions, sessionResource); // add the new non-system permissions grantGlobalResourcePermissionPersister.addGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId, addPermissions, sessionResource); } @Override public void revokeGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); __revokeDirectGlobalPermissions(connection, accessorResource, resourceClassName, domainName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public void revokeGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); __revokeDirectGlobalPermissions(connection, accessorResource, resourceClassName, domainName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private void __revokeDirectGlobalPermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> requestedResourcePermissions) { __assertResourceExists(connection, accessorResource); // verify that resource class is defined final ResourceClassInternalInfo resourceClassInfo = __getResourceClassInternalInfo(connection, resourceClassName); final Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); // next ensure that the requested permissions are valid and unique in name __assertUniqueResourcePermissionsNamesForResourceClass(connection, requestedResourcePermissions, resourceClassInfo); // verify the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // check for authorization if (!__isSuperUserOfDomain(connection, sessionResource, domainName)) { final Set<ResourcePermission> grantorPermissions = __getEffectiveGlobalResourcePermissionsIgnoringSuperUserPrivileges(connection, sessionResource, resourceClassName, domainName); final Set<ResourcePermission> unauthorizedPermissions = __subtractResourcePermissionsIfGrantableFrom(requestedResourcePermissions, grantorPermissions); if (unauthorizedPermissions.size() > 0) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "revoke the following global permission(s): " + unauthorizedPermissions); } } final Set<ResourcePermission> directAccessorPermissions = __getDirectGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId); final Set<ResourcePermission> removePermissions = new HashSet<>(requestedResourcePermissions.size()); for (ResourcePermission requestedPermission : requestedResourcePermissions) { for (ResourcePermission existingDirectPermission : directAccessorPermissions) { if (requestedPermission.equalsIgnoreGrant(existingDirectPermission)) { // requested permission has same name and regardless of granting rights we need to remove it removePermissions.add(requestedPermission); break; } } } // remove any necessary direct system permissions grantGlobalResourcePermissionSysPersister.removeGlobalSysPermissions(connection, accessorResource, resourceClassId, domainId, removePermissions); // remove any necessary direct non-system permissions grantGlobalResourcePermissionPersister.removeGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId, removePermissions); } @Override public Set<ResourcePermission> getGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __getDirectGlobalResourcePermissions(connection, accessorResource, resourceClassName, domainName); } finally { __closeConnection(connection); } } private Set<ResourcePermission> __getDirectGlobalResourcePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName) { // verify that resource class is defined final Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // verify the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } return __getDirectGlobalResourcePermissions(connection, accessorResource, resourceClassId, domainId); } @Override public Set<ResourcePermission> getEffectiveGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __getEffectiveGlobalResourcePermissions(connection, accessorResource, resourceClassName, domainName); } finally { __closeConnection(connection); } } private Set<ResourcePermission> __getEffectiveGlobalResourcePermissionsIgnoringSuperUserPrivileges(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName) { // verify that resource class is defined final Id<ResourceClassId> resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // verify the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } Set<ResourcePermission> resourcePermissions = new HashSet<>(); // first collect the system permissions that the accessor has to the accessed resource resourcePermissions.addAll(grantGlobalResourcePermissionSysPersister .getGlobalSysPermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); // first collect the non-system permissions that the accessor this resource has to the accessor resource resourcePermissions.addAll(grantGlobalResourcePermissionPersister .getGlobalResourcePermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); return __collapseResourcePermissions(resourcePermissions); } private Set<ResourcePermission> __getEffectiveGlobalResourcePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName) { // verify that resource class is defined final ResourceClassInternalInfo resourceClassInternalInfo = __getResourceClassInternalInfo(connection, resourceClassName); // verify the domain final Id<DomainId> domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } if (__isSuperUserOfDomain(connection, accessorResource, domainName)) { return __getApplicableResourcePermissions(connection, resourceClassInternalInfo); } final Id<ResourceClassId> resourceClassId = Id.from(resourceClassInternalInfo.getResourceClassId()); Set<ResourcePermission> resourcePermissions = new HashSet<>(); // first collect the system permissions that the accessor has to the accessed resource resourcePermissions.addAll(grantGlobalResourcePermissionSysPersister .getGlobalSysPermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); // first collect the non-system permissions that the accessor this resource has to the accessor resource resourcePermissions.addAll(grantGlobalResourcePermissionPersister .getGlobalResourcePermissionsIncludeInherited(connection, accessorResource, resourceClassId, domainId)); return __collapseResourcePermissions(resourcePermissions); } private Set<ResourcePermission> __getApplicableResourcePermissions(SQLConnection connection, ResourceClassInternalInfo resourceClassInternalInfo) { final List<String> resourcePermissionNames = __getApplicableResourcePermissionNames(connection, resourceClassInternalInfo); Set<ResourcePermission> superResourcePermissions = new HashSet<>(resourcePermissionNames.size()); for (String permissionName : resourcePermissionNames) { superResourcePermissions.add(ResourcePermissions.getInstance(permissionName, true)); } return superResourcePermissions; } private Set<ResourceCreatePermission> __getApplicableResourceCreatePermissions(SQLConnection connection, ResourceClassInternalInfo resourceClassInternalInfo) { final List<String> resourcePermissionNames = __getApplicableResourcePermissionNames(connection, resourceClassInternalInfo); Set<ResourceCreatePermission> superResourceCreatePermissions = new HashSet<>(resourcePermissionNames.size()+1); superResourceCreatePermissions.add(ResourceCreatePermissions.getInstance(ResourceCreatePermissions.CREATE, true)); for (String permissionName : resourcePermissionNames) { superResourceCreatePermissions.add(ResourceCreatePermissions .getInstance(ResourcePermissions .getInstance(permissionName, true), true)); } return superResourceCreatePermissions; } private Set<ResourcePermission> __collapseResourcePermissions(Set<ResourcePermission> resourcePermissions) { final Set<ResourcePermission> collapsedPermissions = new HashSet<>(resourcePermissions); for (ResourcePermission permission : resourcePermissions) { for (ResourcePermission grantEquivalentPermission : resourcePermissions) { if (permission.isGrantableFrom(grantEquivalentPermission) && !permission.equals(grantEquivalentPermission)) { collapsedPermissions.remove(permission); break; } } } return collapsedPermissions; } @Override public Map<String, Map<String, Set<ResourcePermission>>> getGlobalResourcePermissionsMap(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getDirectGlobalResourcePermissionsMap(connection, accessorResource); } finally { __closeConnection(connection); } } private Map<String, Map<String, Set<ResourcePermission>>> __getDirectGlobalResourcePermissionsMap(SQLConnection connection, Resource accessorResource) { final Map<String, Map<String, Set<ResourcePermission>>> globalALLPermissionsMap = new HashMap<>(); // collect the system permissions that the accessor has and add it into the globalALLPermissionsMap globalALLPermissionsMap .putAll(grantGlobalResourcePermissionSysPersister.getGlobalSysPermissions(connection, accessorResource)); // next collect the non-system permissions that the accessor has and add it into the globalALLPermissionsMap __mergeSourcePermissionsMapIntoTargetPermissionsMap(grantGlobalResourcePermissionPersister .getGlobalResourcePermissions(connection, accessorResource), globalALLPermissionsMap); return __collapseResourcePermissions(globalALLPermissionsMap); } @Override public Map<String, Map<String, Set<ResourcePermission>>> getEffectiveGlobalResourcePermissionsMap(Resource accessorResource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); return __getEffectiveGlobalResourcePermissionsMap(connection, accessorResource); } finally { __closeConnection(connection); } } private Map<String, Map<String, Set<ResourcePermission>>> __getEffectiveGlobalResourcePermissionsMap(SQLConnection connection, Resource accessorResource) { final Map<String, Map<String, Set<ResourcePermission>>> globalALLPermissionsMap = new HashMap<>(); // collect the system permissions that the accessor has and add it into the globalALLPermissionsMap globalALLPermissionsMap .putAll(grantGlobalResourcePermissionSysPersister .getGlobalSysPermissionsIncludeInherited(connection, accessorResource)); // next collect the non-system permissions that the accessor has and add it into the globalALLPermissionsMap __mergeSourcePermissionsMapIntoTargetPermissionsMap( grantGlobalResourcePermissionPersister.getGlobalResourcePermissionsIncludeInherited(connection, accessorResource), globalALLPermissionsMap); // finally, collect all applicable permissions when accessor has super-user privileges to any domain // and add them into the globalALLPermissionsMap final Map<String, Map<String, Set<ResourcePermission>>> superGlobalResourcePermissionsMap = new HashMap<>(); Map<String, Set<ResourcePermission>> superResourcePermissionsMap = null; final Map<String, Set<DomainPermission>> effectiveDomainPermissionsMap = __getEffectiveDomainPermissionsMap(connection, accessorResource); for (String domainName : effectiveDomainPermissionsMap.keySet()) { final Set<DomainPermission> effectiveDomainPermissions = effectiveDomainPermissionsMap.get(domainName); if (effectiveDomainPermissions.contains(DomainPermission_SUPER_USER) || effectiveDomainPermissions.contains(DomainPermission_SUPER_USER_GRANT)) { if (superResourcePermissionsMap == null) { // lazy-construct super-user-privileged resource-permissions map by resource classes final List<String> resourceClassNames = resourceClassPersister.getResourceClassNames(connection); superResourcePermissionsMap = new HashMap<>(resourceClassNames.size()); for (String resourceClassName : resourceClassNames) { final Set<ResourcePermission> applicableResourcePermissions = __getApplicableResourcePermissions(connection, __getResourceClassInternalInfo(connection, resourceClassName)); superResourcePermissionsMap.put(resourceClassName, applicableResourcePermissions); } } superGlobalResourcePermissionsMap.put(domainName, superResourcePermissionsMap); } } __mergeSourcePermissionsMapIntoTargetPermissionsMap(superGlobalResourcePermissionsMap, globalALLPermissionsMap); return __collapseResourcePermissions(globalALLPermissionsMap); } private void __mergeSourcePermissionsMapIntoTargetPermissionsMap(Map<String, Map<String, Set<ResourcePermission>>> sourcePermissionsMap, Map<String, Map<String, Set<ResourcePermission>>> targetPermissionsMap) { for (String domainName : sourcePermissionsMap.keySet()) { Map<String, Set<ResourcePermission>> targetPermsForDomainMap; // does the target map have domain? if ((targetPermsForDomainMap = targetPermissionsMap.get(domainName)) == null) { // no, add the domain targetPermissionsMap.put(domainName, targetPermsForDomainMap = new HashMap<>()); } for (String resourceClassName : sourcePermissionsMap.get(domainName).keySet()) { Set<ResourcePermission> targetPermsForClassSet; // does the target map have the resource class? if ((targetPermsForClassSet = targetPermsForDomainMap.get(resourceClassName)) == null) { // no, add the resource class targetPermsForDomainMap.put(resourceClassName, targetPermsForClassSet = new HashSet<>()); } // get the source permissions for the domain + resource class final Set<ResourcePermission> sourcePermissionsForClassSet = sourcePermissionsMap.get(domainName).get(resourceClassName); // add the source permissions above to the target for the respective domain + resource class targetPermsForClassSet.addAll(sourcePermissionsForClassSet); } } } private Map<String, Map<String, Set<ResourcePermission>>> __collapseResourcePermissions(Map<String, Map<String, Set<ResourcePermission>>> resourcePermissionsMap) { for (String domainName : resourcePermissionsMap.keySet()) { final Map<String, Set<ResourcePermission>> createPermissionsByDomainMap = resourcePermissionsMap.get(domainName); for (String resourceClassName : createPermissionsByDomainMap.keySet()) { final Set<ResourcePermission> createPermissionsByResourceClassMap = createPermissionsByDomainMap.get(resourceClassName); createPermissionsByDomainMap.put(resourceClassName, __collapseResourcePermissions(createPermissionsByResourceClassMap)); } } return resourcePermissionsMap; } @Override public String getDomainNameByResource(Resource resource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(resource); if (sessionResource.equals(resource)) { return sessionResourceDomainName; } else { try { connection = __getConnection(); resource = __resolveResource(connection, resource); return domainPersister.getResourceDomainNameByResourceId(connection, resource); } finally { __closeConnection(connection); } } } @Override public Set<String> getDomainDescendants(String domainName) { SQLConnection connection = null; __assertAuthenticated(); __assertDomainSpecified(domainName); try { connection = __getConnection(); domainName = domainName.trim(); return domainPersister.getResourceDomainNameDescendants(connection, domainName); } finally { __closeConnection(connection); } } @Override public ResourceClassInfo getResourceClassInfo(String resourceClassName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceClassSpecified(resourceClassName); try { connection = __getConnection(); final ResourceClassInternalInfo resourceClassInternalInfo = __getResourceClassInternalInfo(connection, resourceClassName); return new ResourceClassInfo(resourceClassInternalInfo.getResourceClassName(), resourceClassInternalInfo.isAuthenticatable(), resourceClassInternalInfo.isUnauthenticatedCreateAllowed()); } finally { __closeConnection(connection); } } @Override public ResourceClassInfo getResourceClassInfoByResource(Resource resource) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(resource); try { connection = __getConnection(); resource = __resolveResource(connection, resource); final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, resource); return new ResourceClassInfo(resourceClassInternalInfo.getResourceClassName(), resourceClassInternalInfo.isAuthenticatable(), resourceClassInternalInfo.isUnauthenticatedCreateAllowed()); } finally { __closeConnection(connection); } } @Override public Resource getAuthenticatedResource() { __assertAuthenticated(); return authenticatedResource; } @Override public Resource getSessionResource() { __assertAuthenticated(); return sessionResource; } @Override public void assertPostCreateDomainPermissions(Resource accessorResource, Set<DomainPermission> domainPermissions) { if (!hasPostCreateDomainPermissions(accessorResource, domainPermissions)) { throw NotAuthorizedException.newInstanceForPostCreateDomainPermissions(accessorResource, domainPermissions); } } @Override public void assertPostCreateDomainPermissions(Resource accessorResource, DomainPermission domainPermission, DomainPermission... domainPermissions) { if (!hasPostCreateDomainPermissions(accessorResource, domainPermission, domainPermissions)) { throw NotAuthorizedException.newInstanceForPostCreateDomainPermissions(accessorResource, domainPermission, domainPermissions); } } @Override public boolean hasPostCreateDomainPermissions(Resource accessorResource, Set<DomainPermission> domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionsSpecified(domainPermissions); __assertPermissionsSetNotEmpty(domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); return __hasPostCreateDomainPermissions(connection, accessorResource, domainPermissions); } finally { __closeConnection(connection); } } @Override public boolean hasPostCreateDomainPermissions(Resource accessorResource, DomainPermission domainPermission, DomainPermission... domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionSpecified(domainPermission); __assertVarargPermissionsSpecified(domainPermissions); final Set<DomainPermission> requestedDomainPermissions = __getSetWithoutNullsOrDuplicates(domainPermission, domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); return __hasPostCreateDomainPermissions(connection, accessorResource, requestedDomainPermissions); } finally { __closeConnection(connection); } } private boolean __hasPostCreateDomainPermissions(SQLConnection connection, Resource accessorResource, Set<DomainPermission> requestedDomainPermissions) { __assertResourceExists(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); boolean hasPermission = false; // first check if the accessor even has *CREATE permission for domains final Set<DomainCreatePermission> effectiveDomainCreatePermissions = __getEffectiveDomainCreatePermissions(connection, accessorResource); for (DomainCreatePermission domainCreatePermission : effectiveDomainCreatePermissions) { if (domainCreatePermission.isSystemPermission() && DomainCreatePermissions.CREATE.equals(domainCreatePermission.getPermissionName())) { hasPermission = true; break; } } if (hasPermission) { // check if the requested permissions are permissible from the set of effective post-create permissions final Set<DomainPermission> postCreateDomainPermissions = __getPostCreateDomainPermissions(effectiveDomainCreatePermissions); for (DomainPermission requestedDomainPermission : requestedDomainPermissions) { if (!__isPermissible(requestedDomainPermission, postCreateDomainPermissions)) { hasPermission = false; break; } } if (!hasPermission) { hasPermission = postCreateDomainPermissions.contains(DomainPermission_SUPER_USER) || postCreateDomainPermissions.contains(DomainPermission_SUPER_USER_GRANT); } } return hasPermission; } private boolean __isPermissible(DomainPermission queriedDomainPermission, Set<DomainPermission> domainPermissions) { for (DomainPermission domainPermission : domainPermissions) { if (queriedDomainPermission.equals(domainPermission) || queriedDomainPermission.isGrantableFrom(domainPermission)) { return true; } } return false; } @Override public void assertDomainPermissions(Resource accessorResource, String domainName, Set<DomainPermission> domainPermissions) { if (!hasDomainPermissions(accessorResource, domainName, domainPermissions)) { throw NotAuthorizedException.newInstanceForDomainPermissions(accessorResource, domainName, domainPermissions); } } @Override public void assertDomainPermissions(Resource accessorResource, String domainName, DomainPermission domainPermission, DomainPermission... domainPermissions) { if (!hasDomainPermissions(accessorResource, domainName, domainPermission, domainPermissions)) { throw NotAuthorizedException.newInstanceForDomainPermissions(accessorResource, domainName, domainPermission, domainPermissions); } } @Override public boolean hasDomainPermissions(Resource accessorResource, String domainName, Set<DomainPermission> domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionsSpecified(domainPermissions); __assertPermissionsSetNotEmpty(domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); return __hasDomainPermissions(connection, accessorResource, domainName, domainPermissions); } finally { __closeConnection(connection); } } @Override public boolean hasDomainPermissions(Resource accessorResource, String domainName, DomainPermission domainPermission, DomainPermission... domainPermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertDomainSpecified(domainName); __assertPermissionSpecified(domainPermission); __assertVarargPermissionsSpecified(domainPermissions); final Set<DomainPermission> requestedDomainPermissions = __getSetWithoutNullsOrDuplicates(domainPermission, domainPermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); return __hasDomainPermissions(connection, accessorResource, domainName, requestedDomainPermissions); } finally { __closeConnection(connection); } } private boolean __hasDomainPermissions(SQLConnection connection, Resource accessorResource, String domainName, Set<DomainPermission> requestedDomainPermissions) { __assertResourceExists(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); // first check for effective permissions final Set<DomainPermission> effectiveDomainPermissions = __getEffectiveDomainPermissions(connection, accessorResource, domainName); boolean hasPermission = true; for (DomainPermission domainPermission : requestedDomainPermissions) { if (!__isPermissible(domainPermission, effectiveDomainPermissions)) { hasPermission = false; break; } } // next check super-user permissions to the domain of the accessed resource if (!hasPermission) { hasPermission = __isSuperUserOfDomain(connection, accessorResource, domainName); } return hasPermission; } @Override public void assertDomainCreatePermissions(Resource accessorResource, Set<DomainCreatePermission> domainCreatePermissions) { if (!hasDomainCreatePermissions(accessorResource, domainCreatePermissions)) { throw NotAuthorizedException.newInstanceForDomainCreatePermissions(accessorResource, domainCreatePermissions); } } @Override public void assertDomainCreatePermissions(Resource accessorResource, DomainCreatePermission domainCreatePermission, DomainCreatePermission... domainCreatePermissions) { if (!hasDomainCreatePermissions(accessorResource, domainCreatePermission, domainCreatePermissions)) { throw NotAuthorizedException.newInstanceForDomainCreatePermissions(accessorResource, domainCreatePermission, domainCreatePermissions); } } @Override public boolean hasDomainCreatePermissions(Resource accessorResource, Set<DomainCreatePermission> domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionsSpecified(domainCreatePermissions); __assertPermissionsSetNotEmpty(domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); return __hasDomainCreatePermissions(connection, accessorResource, domainCreatePermissions); } finally { __closeConnection(connection); } } @Override public boolean hasDomainCreatePermissions(Resource accessorResource, DomainCreatePermission domainCreatePermission, DomainCreatePermission... domainCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertPermissionSpecified(domainCreatePermission); __assertVarargPermissionsSpecified(domainCreatePermissions); final Set<DomainCreatePermission> requestedDomainCreatePermissions = __getSetWithoutNullsOrDuplicates(domainCreatePermission, domainCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); return __hasDomainCreatePermissions(connection, accessorResource, requestedDomainCreatePermissions); } finally { __closeConnection(connection); } } private boolean __hasDomainCreatePermissions(SQLConnection connection, Resource accessorResource, Set<DomainCreatePermission> queriedDomainCreatePermissions) { __assertResourceExists(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); final Set<DomainCreatePermission> effectiveDomainCreatePermissions = __getEffectiveDomainCreatePermissions(connection, accessorResource); for (DomainCreatePermission domainCreatePermission : queriedDomainCreatePermissions) { if (!__isPermissible(domainCreatePermission, effectiveDomainCreatePermissions)) { return false; } } return true; } private boolean __isPermissible(DomainCreatePermission queriedDomainCreatePermission, Set<DomainCreatePermission> domainCreatePermissions) { for (DomainCreatePermission domainCreatePermission : domainCreatePermissions) { if (queriedDomainCreatePermission.equals(domainCreatePermission) || queriedDomainCreatePermission.isGrantableFrom(domainCreatePermission)) { return true; } } return false; } @Override public void assertPostCreateResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { if (!hasPostCreateResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermissions)) { throw NotAuthorizedException.newInstanceForPostCreateResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermissions); } } @Override public void assertPostCreateResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { if (!hasPostCreateResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermission, resourcePermissions)) { throw NotAuthorizedException.newInstanceForPostCreateResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermission, resourcePermissions); } } @Override public boolean hasPostCreateResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __hasPostCreateResourcePermissions(connection, accessorResource, resourceClassName, domainName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public boolean hasPostCreateResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __hasPostCreateResourcePermissions(connection, accessorResource, resourceClassName, domainName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private boolean __hasPostCreateResourcePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> requestedResourcePermissions) { __assertResourceExists(connection, accessorResource); __assertPermissionsValid(connection, resourceClassName, requestedResourcePermissions); __assertQueryAuthorization(connection, accessorResource); boolean hasPermission = false; // first check if the accessor even has *CREATE permission for the resource class and domain final Set<ResourceCreatePermission> effectiveResourceCreatePermissions = __getEffectiveResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName); for (ResourceCreatePermission resourceCreatePermission : effectiveResourceCreatePermissions) { if (resourceCreatePermission.isSystemPermission() && ResourceCreatePermissions.CREATE.equals(resourceCreatePermission.getPermissionName())) { hasPermission = true; break; } } if (hasPermission) { // check if the requested permission is permissible from the set of effective post-create permissions final Set<ResourcePermission> postCreateResourcePermissions = __getPostCreateResourcePermissions(effectiveResourceCreatePermissions); final Set<ResourcePermission> nonPostCreateResourcePermissions = new HashSet<>(requestedResourcePermissions.size()); for (ResourcePermission requestedResourcePermission : requestedResourcePermissions) { if (!__isPermissible(requestedResourcePermission, postCreateResourcePermissions)) { nonPostCreateResourcePermissions.add(requestedResourcePermission); } } if (!nonPostCreateResourcePermissions.isEmpty()) { // check if the requested permission is permissible from the set of effective global permissions final Set<ResourcePermission> globalResourcePermissions = __getEffectiveGlobalResourcePermissions(connection, accessorResource, resourceClassName, domainName); for (ResourcePermission requestedResourcePermission : nonPostCreateResourcePermissions) { if (!__isPermissible(requestedResourcePermission, globalResourcePermissions)) { hasPermission = false; break; } } } } if (!hasPermission) { hasPermission = __isSuperUserOfDomain(connection, accessorResource, domainName); } return hasPermission; } private boolean __isPermissible(ResourcePermission queriedResourcePermission, Set<ResourcePermission> resourcePermissions) { for (ResourcePermission resourcePermission : resourcePermissions) { if (queriedResourcePermission.equals(resourcePermission) || queriedResourcePermission.isGrantableFrom(resourcePermission)) { return true; } } return false; } @Override public void assertGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { if (!hasGlobalResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermissions)) { throw NotAuthorizedException.newInstanceForGlobalResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermissions); } } @Override public void assertGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { if (!hasGlobalResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermission, resourcePermissions)) { throw NotAuthorizedException.newInstanceForGlobalResourcePermissions(accessorResource, resourceClassName, domainName, resourcePermission, resourcePermissions); } } @Override public boolean hasGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __hasGlobalResourcePermissions(connection, accessorResource, resourceClassName, domainName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public boolean hasGlobalResourcePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __hasGlobalResourcePermissions(connection, accessorResource, resourceClassName, domainName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private boolean __hasGlobalResourcePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> requestedResourcePermissions) { __assertResourceExists(connection, accessorResource); __assertPermissionsValid(connection, resourceClassName, requestedResourcePermissions); __assertQueryAuthorization(connection, accessorResource); final Set<ResourcePermission> globalResourcePermissions = __getEffectiveGlobalResourcePermissions(connection, accessorResource, resourceClassName, domainName); boolean hasPermission = true; for (ResourcePermission requestedResourcePermission : requestedResourcePermissions) { if (!__isPermissible(requestedResourcePermission, globalResourcePermissions)) { hasPermission = false; break; } } if (!hasPermission) { hasPermission = __isSuperUserOfDomain(connection, accessorResource, domainName); } return hasPermission; } @Override public void assertResourcePermissions(Resource accessorResource, Resource accessedResource, Set<ResourcePermission> resourcePermissions) { if (!hasResourcePermissions(accessorResource, accessedResource, resourcePermissions)) { throw NotAuthorizedException.newInstanceForResourcePermissions(accessorResource, accessedResource, resourcePermissions); } } @Override public void assertResourcePermissions(Resource accessorResource, Resource accessedResource, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { if (!hasResourcePermissions(accessorResource, accessedResource, resourcePermission, resourcePermissions)) { throw NotAuthorizedException.newInstanceForResourcePermissions(accessorResource, accessedResource, resourcePermission, resourcePermissions); } } @Override public boolean hasResourcePermissions(Resource accessorResource, Resource accessedResource, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); return __hasResourcePermissions(connection, accessorResource, accessedResource, resourcePermissions); } finally { __closeConnection(connection); } } @Override public boolean hasResourcePermissions(Resource accessorResource, Resource accessedResource, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceSpecified(accessedResource); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); accessedResource = __resolveResource(connection, accessedResource); return __hasResourcePermissions(connection, accessorResource, accessedResource, requestedResourcePermissions); } finally { __closeConnection(connection); } } private boolean __hasResourcePermissions(SQLConnection connection, Resource accessorResource, Resource accessedResource, Set<ResourcePermission> requestedResourcePermissions) { __assertResourceExists(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfoByResourceId(connection, accessedResource); __assertPermissionsValid(connection, resourceClassInternalInfo.getResourceClassName(), requestedResourcePermissions); // first check for effective permissions final Set<ResourcePermission> effectiveResourcePermissions = __getEffectiveResourcePermissions(connection, accessorResource, accessedResource); boolean hasPermission = true; for (ResourcePermission requestedResourcePermission : requestedResourcePermissions) { if (!__isPermissible(requestedResourcePermission, effectiveResourcePermissions)) { hasPermission = false; break; } } // next check super-user permissions to the domain of the accessed resource if (!hasPermission) { final String domainName = domainPersister.getResourceDomainNameByResourceId(connection, accessedResource); hasPermission = __isSuperUserOfDomain(connection, accessorResource, domainName); } return hasPermission; } @Override public void assertResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> resourceCreatePermissions) { if (!hasResourceCreatePermissions(accessorResource, resourceClassName, domainName, resourceCreatePermissions)) { throw NotAuthorizedException.newInstanceForResourceCreatePermissions(accessorResource, resourceCreatePermissions); } } @Override public void assertResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourceCreatePermission resourceCreatePermission, ResourceCreatePermission... resourceCreatePermissions) { if (!hasResourceCreatePermissions(accessorResource, resourceClassName, domainName, resourceCreatePermission, resourceCreatePermissions)) { throw NotAuthorizedException.newInstanceForResourceCreatePermissions(accessorResource, resourceCreatePermission, resourceCreatePermissions); } } @Override public boolean hasResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourceCreatePermissions); __assertPermissionsSetNotEmpty(resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __hasResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, resourceCreatePermissions); } finally { __closeConnection(connection); } } @Override public boolean hasResourceCreatePermissions(Resource accessorResource, String resourceClassName, String domainName, ResourceCreatePermission resourceCreatePermission, ResourceCreatePermission... resourceCreatePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourceCreatePermission); __assertVarargPermissionsSpecified(resourceCreatePermissions); final Set<ResourceCreatePermission> requestedResourceCreatePermissions = __getSetWithoutNullsOrDuplicates(resourceCreatePermission, resourceCreatePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); resourceClassName = resourceClassName.trim(); domainName = domainName.trim(); return __hasResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName, requestedResourceCreatePermissions); } finally { __closeConnection(connection); } } private boolean __hasResourceCreatePermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourceCreatePermission> requestedResourceCreatePermissions) { __assertResourceExists(connection, accessorResource); __assertPermissionsValid(connection, resourceClassName, __getPostCreateResourcePermissions(requestedResourceCreatePermissions)); __assertQueryAuthorization(connection, accessorResource); final Set<ResourceCreatePermission> effectiveResourceCreatePermissions = __getEffectiveResourceCreatePermissions(connection, accessorResource, resourceClassName, domainName); boolean hasPermission = true; // first check for effective create permissions for (ResourceCreatePermission resourceCreatePermission : requestedResourceCreatePermissions) { if (!__isPermissible(resourceCreatePermission, effectiveResourceCreatePermissions)) { hasPermission = false; break; } } // next check super-user permissions to the domain if (!hasPermission) { hasPermission = __isSuperUserOfDomain(connection, accessorResource, domainName); } return hasPermission; } private boolean __isPermissible(ResourceCreatePermission queriedResourceCreatePermission, Set<ResourceCreatePermission> resourceCreatePermissions) { for (ResourceCreatePermission resourceCreatePermission : resourceCreatePermissions) { if (queriedResourceCreatePermission.equals(resourceCreatePermission) || queriedResourceCreatePermission.isGrantableFrom(resourceCreatePermission)) { return true; } } return false; } @Override public Set<Resource> getResourcesByResourcePermissions(Resource accessorResource, String resourceClassName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); return __getResourcesByPermissions(connection, accessorResource, resourceClassName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public Set<Resource> getResourcesByResourcePermissions(Resource accessorResource, String resourceClassName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); return __getResourcesByPermissions(connection, accessorResource, resourceClassName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private Set<Resource> __getResourcesByPermissions(SQLConnection connection, Resource accessorResource, String resourceClassName, Set<ResourcePermission> requestedResourcePermissions) { // first verify that resource class is defined Id<ResourceClassId> resourceClassId; Id<ResourcePermissionId> permissionId; resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // verify permissions are valid for resource class __assertPermissionsValid(connection, resourceClassName, requestedResourcePermissions); Set<Resource> resources = new HashSet<>(); for (ResourcePermission resourcePermission : requestedResourcePermissions) { Set<Resource> currentResources = new HashSet<>(); if (resourcePermission.isSystemPermission()) { // get the list of objects of the specified type that the session has access to via direct permissions currentResources.addAll(grantResourcePermissionSysPersister .getResourcesByResourceSysPermission(connection, accessorResource, resourceClassId, resourcePermission)); // get the list of objects of the specified type that the session has access to via global permissions currentResources.addAll(grantGlobalResourcePermissionSysPersister .getResourcesByGlobalSysPermission(connection, accessorResource, resourceClassId, resourcePermission)); } else { // check if the non-system permission name is valid permissionId = resourceClassPermissionPersister.getResourceClassPermissionId(connection, resourceClassId, resourcePermission .getPermissionName()); if (permissionId == null) { throw new IllegalArgumentException("Permission: " + resourcePermission + " is not defined for resource class: " + resourceClassName); } // get the list of objects of the specified type that the session has access to via direct permissions currentResources.addAll(grantResourcePermissionPersister .getResourcesByResourcePermission(connection, accessorResource, resourceClassId, resourcePermission, permissionId)); // get the list of objects of the specified type that the session has access to via global permissions currentResources.addAll(grantGlobalResourcePermissionPersister .getResourcesByGlobalResourcePermission(connection, accessorResource, resourceClassId, resourcePermission, permissionId)); } if (currentResources.isEmpty()) { // we got an empty set for a permission, we are done since this and all future intersects will be empty resources = currentResources; break; } else { // the only way resources will be empty below is if we never entered this else clause before if (resources.isEmpty()) { resources = currentResources; } else { // compute the intersection of previous iterations and the current resources resources.retainAll(currentResources); if (resources.isEmpty()) { // if intersection with previous results is empty, then all future intersections will be empty, as well break; } } } } // finally get the list of objects of the specified type that the session has access to via super user permissions resources.addAll(grantDomainPermissionSysPersister.getResourcesByDomainSuperUserPermission(connection, accessorResource, resourceClassId)); return resources; } @Override public Set<Resource> getResourcesByResourcePermissionsAndDomain(Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); return __getResourcesByPermissionsAndDomain(connection, accessorResource, resourceClassName, domainName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public Set<Resource> getResourcesByResourcePermissionsAndDomain(Resource accessorResource, String resourceClassName, String domainName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessorResource); __assertResourceClassSpecified(resourceClassName); __assertDomainSpecified(domainName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessorResource = __resolveResource(connection, accessorResource); __assertQueryAuthorization(connection, accessorResource); resourceClassName = resourceClassName.trim(); return __getResourcesByPermissionsAndDomain(connection, accessorResource, resourceClassName, domainName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private Set<Resource> __getResourcesByPermissionsAndDomain(SQLConnection connection, Resource accessorResource, String resourceClassName, String domainName, Set<ResourcePermission> requestedResourcePermissions) { // first verify that resource class and domain is defined Id<ResourceClassId> resourceClassId; Id<DomainId> domainId; Id<ResourcePermissionId> permissionId; resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } domainId = domainPersister.getResourceDomainId(connection, domainName); if (domainId == null) { throw new IllegalArgumentException("Could not find domain: " + domainName); } // verify permissions are valid for resource class __assertPermissionsValid(connection, resourceClassName, requestedResourcePermissions); Set<Resource> resources = new HashSet<>(); for (ResourcePermission resourcePermission : requestedResourcePermissions) { Set<Resource> currentResources = new HashSet<>(); if (resourcePermission.isSystemPermission()) { // get the list of objects of the specified type that the session has access to via direct permissions currentResources.addAll(grantResourcePermissionSysPersister .getResourcesByResourceSysPermission(connection, accessorResource, resourceClassId, domainId, resourcePermission)); // get the list of objects of the specified type that the session has access to via global permissions currentResources.addAll(grantGlobalResourcePermissionSysPersister .getResourcesByGlobalSysPermission(connection, accessorResource, resourceClassId, domainId, resourcePermission)); } else { // check if the non-system permission name is valid permissionId = resourceClassPermissionPersister.getResourceClassPermissionId(connection, resourceClassId, resourcePermission .getPermissionName()); if (permissionId == null) { throw new IllegalArgumentException("Permission: " + resourcePermission + " is not defined for resource class: " + resourceClassName); } // get the list of objects of the specified type that the session has access to via direct permissions currentResources.addAll(grantResourcePermissionPersister .getResourcesByResourcePermission(connection, accessorResource, resourceClassId, domainId, resourcePermission, permissionId)); // get the list of objects of the specified type that the session has access to via global permissions currentResources.addAll(grantGlobalResourcePermissionPersister .getResourcesByGlobalResourcePermission(connection, accessorResource, resourceClassId, domainId, resourcePermission, permissionId)); } if (currentResources.isEmpty()) { // we got an empty set for a permission, we are done since this and all future intersects will be empty resources = currentResources; break; } else { // the only way resources will be empty below is if we never entered this else clause before if (resources.isEmpty()) { resources = currentResources; } else { // compute the intersection of previous iterations and the current resources resources.retainAll(currentResources); if (resources.isEmpty()) { // if intersection with previous results is empty, then all future intersections will be empty, as well break; } } } } // finally get the list of objects of the specified type that the session has access to via super user permissions resources.addAll(grantDomainPermissionSysPersister.getResourcesByDomainSuperUserPermission(connection, accessorResource, resourceClassId, domainId)); return resources; } @Override public Set<Resource> getAccessorResourcesByResourcePermissions(Resource accessedResource, String resourceClassName, Set<ResourcePermission> resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessedResource); __assertResourceClassSpecified(resourceClassName); __assertPermissionsSpecified(resourcePermissions); __assertPermissionsSetNotEmpty(resourcePermissions); try { connection = __getConnection(); accessedResource = __resolveResource(connection, accessedResource); __assertQueryAuthorization(connection, accessedResource); resourceClassName = resourceClassName.trim(); return __getAccessorResourcesByResourcePermissions(connection, accessedResource, resourceClassName, resourcePermissions); } finally { __closeConnection(connection); } } @Override public Set<Resource> getAccessorResourcesByResourcePermissions(Resource accessedResource, String resourceClassName, ResourcePermission resourcePermission, ResourcePermission... resourcePermissions) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceSpecified(accessedResource); __assertResourceClassSpecified(resourceClassName); __assertPermissionSpecified(resourcePermission); __assertVarargPermissionsSpecified(resourcePermissions); final Set<ResourcePermission> requestedResourcePermissions = __getSetWithoutNullsOrDuplicates(resourcePermission, resourcePermissions); try { connection = __getConnection(); accessedResource = __resolveResource(connection, accessedResource); __assertQueryAuthorization(connection, accessedResource); resourceClassName = resourceClassName.trim(); return __getAccessorResourcesByResourcePermissions(connection, accessedResource, resourceClassName, requestedResourcePermissions); } finally { __closeConnection(connection); } } private Set<Resource> __getAccessorResourcesByResourcePermissions(SQLConnection connection, Resource accessedResource, String resourceClassName, Set<ResourcePermission> requestedResourcePermissions) { // first verify that resource class is defined Id<ResourceClassId> resourceClassId; Id<ResourcePermissionId> permissionId; resourceClassId = resourceClassPersister.getResourceClassId(connection, resourceClassName); if (resourceClassId == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } // verify permissions are valid for the resource class __assertPermissionsValid(connection, resourceClassName, requestedResourcePermissions); Set<Resource> resources = new HashSet<>(); for (ResourcePermission resourcePermission : requestedResourcePermissions) { Set<Resource> currentResources = new HashSet<>(); if (resourcePermission.isSystemPermission()) { // get the list of objects of the specified type that the session has access to via direct permissions currentResources.addAll(grantResourcePermissionSysPersister .getAccessorResourcesByResourceSysPermission(connection, accessedResource, resourceClassId, resourcePermission)); } else { // check if the non-system permission name is valid permissionId = resourceClassPermissionPersister.getResourceClassPermissionId(connection, resourceClassId, resourcePermission .getPermissionName()); if (permissionId == null) { throw new IllegalArgumentException("Permission: " + resourcePermission + " is not defined for resource class: " + resourceClassName); } // get the list of objects of the specified type that the session has access to via direct permissions currentResources.addAll(grantResourcePermissionPersister .getAccessorResourcesByResourcePermission(connection, accessedResource, resourceClassId, resourcePermission, permissionId)); } if (currentResources.isEmpty()) { // we got an empty set for a permission, we are done since this and all future intersects will be empty resources = currentResources; break; } else { // the only way resources will be empty below is if we never entered this else clause before if (resources.isEmpty()) { resources = currentResources; } else { // compute the intersection of previous iterations and the current resources resources.retainAll(currentResources); if (resources.isEmpty()) { // if intersection with previous results is empty, then all future intersections will be empty, as well break; } } } } return resources; } @Override public List<String> getResourceClassNames() { SQLConnection connection = null; __assertAuthenticated(); try { connection = __getConnection(); return resourceClassPersister.getResourceClassNames(connection); } finally { __closeConnection(connection); } } @Override public List<String> getResourcePermissionNames(String resourceClassName) { SQLConnection connection = null; __assertAuthenticated(); __assertResourceClassSpecified(resourceClassName); try { connection = __getConnection(); resourceClassName = resourceClassName.trim(); return __getApplicableResourcePermissionNames(connection, resourceClassName); } finally { __closeConnection(connection); } } // private shared helper methods private Resource __resolveResource(SQLConnection connection, Resource resource) { final Resource resolvedResource; if (resource.getId() != null) { if (resource.getExternalId() != null) { // the resource has both internal and external Ids, so let's see if they match resolvedResource = resourcePersister.resolveResourceByExternalId(connection, resource.getExternalId()); if (resolvedResource == null || !resource.equals(resolvedResource)) { throw new IllegalArgumentException("Resource " + resource + "'s id does not resolve to the specified externalId!"); } } else { // ensure that we have a valid internal resource id, so we might as well also fully resolve it resolvedResource = resourcePersister.resolveResourceByResourceId(connection, resource); if (resolvedResource == null) { throw new IllegalArgumentException("Resource " + resource + " not found!"); } } } else if (resource.getExternalId() != null) { // there is no internal resource Id, so we need to look it up resolvedResource = resourcePersister.resolveResourceByExternalId(connection, resource.getExternalId()); if (resolvedResource == null) { throw new IllegalArgumentException("Resource " + resource + " not found!"); } } else { throw new IllegalArgumentException("A resource id and/or external id is required, but neither was specified"); } return resolvedResource; } private List<String> __getApplicableResourcePermissionNames(SQLConnection connection, String resourceClassName) { return __getApplicableResourcePermissionNames(connection, __getResourceClassInternalInfo(connection, resourceClassName)); } private List<String> __getApplicableResourcePermissionNames(SQLConnection connection, ResourceClassInternalInfo resourceClassInternalInfo) { final List<String> permissionNames = resourceClassPermissionPersister.getPermissionNames(connection, resourceClassInternalInfo.getResourceClassName()); permissionNames.add(ResourcePermissions.INHERIT); permissionNames.add(ResourcePermissions.DELETE); permissionNames.add(ResourcePermissions.QUERY); if (resourceClassInternalInfo.isAuthenticatable()) { permissionNames.add(ResourcePermissions.IMPERSONATE); permissionNames.add(ResourcePermissions.RESET_CREDENTIALS); } return permissionNames; } private ResourceClassInternalInfo __getResourceClassInternalInfo(SQLConnection connection, String resourceClassName) { final ResourceClassInternalInfo resourceClassInternalInfo = resourceClassPersister.getResourceClassInfo(connection, resourceClassName); // check if the resource class is valid if (resourceClassInternalInfo == null) { throw new IllegalArgumentException("Could not find resource class: " + resourceClassName); } return resourceClassInternalInfo; } private boolean __isSuperUserOfResource(SQLConnection connection, Resource accessorResource, Resource accessedResource) { return __isSuperUserOfDomain(connection, accessorResource, domainPersister.getResourceDomainNameByResourceId(connection, accessedResource)); } private boolean __isSuperUserOfDomain(SQLConnection connection, Resource accessorResource, String queriedDomain) { Set<DomainPermission> domainPermissions = __getEffectiveDomainPermissions(connection, accessorResource, queriedDomain); return domainPermissions.contains(DomainPermission_SUPER_USER) || domainPermissions.contains(DomainPermission_SUPER_USER_GRANT); } private boolean __isSuperUserOfDomain(SQLConnection connection, Resource accessorResource, Id<DomainId> queriedDomainId) { Set<DomainPermission> domainPermissions = __getEffectiveDomainPermissions(connection, accessorResource, queriedDomainId); return domainPermissions.contains(DomainPermission_SUPER_USER) || domainPermissions.contains(DomainPermission_SUPER_USER_GRANT); } private Set<DomainPermission> __getPostCreateDomainPermissions(Set<DomainCreatePermission> domainCreatePermissions) { Set<DomainPermission> domainPermissions = new HashSet<>(); for (DomainCreatePermission domainCreatePermission : domainCreatePermissions) { if (!domainCreatePermission.isSystemPermission()) { domainPermissions.add(domainCreatePermission.getPostCreateDomainPermission()); } } return domainPermissions; } private Set<ResourcePermission> __getPostCreateResourcePermissions(Set<ResourceCreatePermission> resourceCreatePermissions) { Set<ResourcePermission> resourcePermissions = new HashSet<>(); for (ResourceCreatePermission resourceCreatePermission : resourceCreatePermissions) { if (!resourceCreatePermission.isSystemPermission()) { resourcePermissions.add(resourceCreatePermission.getPostCreateResourcePermission()); } } return resourcePermissions; } // helper methods private void __assertResourceSpecified(Resource resource) { if (resource == null) { throw new NullPointerException("Resource required, none specified"); } } private void __assertCredentialsSpecified(Credentials credentials) { if (credentials == null) { throw new NullPointerException("Credentials required, none specified"); } } private void __assertCredentialsNotSpecified(Credentials credentials) { if (credentials != null) { throw new IllegalArgumentException("Credentials not supported, but specified for unauthenticatable resource class"); } } private void __assertExternalIdSpecified(String externalId) { if (externalId == null) { throw new NullPointerException("External id required, none specified"); } else if (externalId.trim().isEmpty()) { throw new IllegalArgumentException("External id required, none specified"); } } private void __assertDomainSpecified(String domainName) { if (domainName == null) { throw new NullPointerException("Domain required, none specified"); } else if (domainName.trim().isEmpty()) { throw new IllegalArgumentException("Domain required, none specified"); } } private void __assertParentDomainSpecified(String domainName) { if (domainName == null) { throw new NullPointerException("Parent domain required, none specified"); } else if (domainName.trim().isEmpty()) { throw new IllegalArgumentException("Parent domain required, none specified"); } } private void __assertAuthenticatedAsSystemResource() { if (sessionResource == null || !SYSTEM_RESOURCE_ID.equals(sessionResource.getId())) { throw NotAuthorizedException.newInstanceForAction(sessionResource, "perform operation reserved for the system resource"); } } private void __assertAuthenticated() { if (sessionResource == null) { throw new NotAuthenticatedException("Session not authenticated"); } } private void __assertResourceClassSpecified(String resourceClassName) { if (resourceClassName == null) { throw new NullPointerException("Resource class required, none specified"); } else if (resourceClassName.trim().isEmpty()) { throw new IllegalArgumentException("Resource class required, none specified"); } } private void __assertPermissionSpecified(ResourcePermission resourcePermission) { if (resourcePermission == null) { throw new NullPointerException("Resource permission required, none specified"); } } private void __assertVarargPermissionsSpecified(ResourcePermission... resourcePermissions) { if (resourcePermissions == null) { throw new NullPointerException("An array or a sequence of resource permissions are required, but the null value was specified"); } } private void __assertPermissionSpecified(ResourceCreatePermission resourceCreatePermission) { if (resourceCreatePermission == null) { throw new NullPointerException("Resource create permission required, none specified"); } } private void __assertVarargPermissionsSpecified(ResourceCreatePermission... resourceCreatePermissions) { if (resourceCreatePermissions == null) { throw new NullPointerException("An array or a sequence of resource create permissions are required, but the null value was specified"); } } private void __assertPermissionSpecified(DomainCreatePermission domainCreatePermission) { if (domainCreatePermission == null) { throw new NullPointerException("Domain create permission required, none specified"); } } private void __assertVarargPermissionsSpecified(DomainCreatePermission... domainCreatePermissions) { if (domainCreatePermissions == null) { throw new NullPointerException("An array or a sequence of domain create permissions are required, but the null value was specified"); } } private void __assertPermissionSpecified(DomainPermission domainPermission) { if (domainPermission == null) { throw new NullPointerException("Domain permission required, none specified"); } } private void __assertVarargPermissionsSpecified(DomainPermission... domainPermissions) { if (domainPermissions == null) { throw new NullPointerException("An array or a sequence of domain permissions are required, but the null value was specified"); } } private void __assertPermissionsSpecified(Set permissionSet) { if (permissionSet == null) { throw new NullPointerException("Set of permissions required, none specified"); } if (permissionSet.contains(null)) { throw new NullPointerException("Set of permissions contains null element"); } } private void __assertPermissionsSetNotEmpty(Set permissionSet) { if (permissionSet.isEmpty()) { throw new IllegalArgumentException("Set of permissions required, empty set specified"); } } private void __assertPermissionNameValid(String permissionName) { if (permissionName == null) { throw new NullPointerException("Permission name may not be null"); } else if (permissionName.trim().isEmpty()) { throw new IllegalArgumentException("Permission name may not be blank"); } if (permissionName.trim().startsWith("*")) { throw new IllegalArgumentException("Permission name may not start with asterisk '*'"); } } private void __assertResourceClassNameValid(String resourceClassName) { if (resourceClassName == null) { throw new NullPointerException("Resource class name may not be null"); } else if (resourceClassName.trim().isEmpty()) { throw new IllegalArgumentException("Resource class name may not be blank"); } } private void __assertPermissionsValid(SQLConnection connection, String resourceClassName, Set<ResourcePermission> resourcePermissions) { final List<String> permissionNames = __getApplicableResourcePermissionNames(connection, resourceClassName); for (ResourcePermission resourcePermission : resourcePermissions) { if (!permissionNames.contains(resourcePermission.getPermissionName())) { if (resourcePermission.isSystemPermission()) { // currently the only invalid system permissions are for unauthenticatable resource classes throw new IllegalArgumentException("Permission " + resourcePermission.getPermissionName() + " not valid for unauthenticatable resource class " + resourceClassName); } else { throw new IllegalArgumentException("Permission: " + resourcePermission.getPermissionName() + " is not defined for resource class: " + resourceClassName); } } } } private void __assertResourceExists(SQLConnection connection, Resource resource) { // look up resource, but only if it's not the session or authenticated resource (which are already known to exist) if ((authenticatedResource == null) || !(sessionResource.equals(resource) || authenticatedResource.equals(resource))) { // the persister method will throw an IllegalArgumentException if the lookup fails resourcePersister.verifyResourceExists(connection, resource); } } private void __assertQueryAuthorization(SQLConnection connection, Resource accessorResource) { if (!sessionResource.equals(accessorResource)) { final Set<ResourcePermission> effectiveResourcePermissions = __getEffectiveResourcePermissions(connection, sessionResource, accessorResource); if (!effectiveResourcePermissions.contains(ResourcePermission_QUERY) && !effectiveResourcePermissions.contains(ResourcePermission_QUERY_GRANT) && !effectiveResourcePermissions.contains(ResourcePermission_IMPERSONATE) && !effectiveResourcePermissions.contains(ResourcePermission_IMPERSONATE_GRANT)) { throw NotAuthorizedException.newInstanceForActionOnResource(sessionResource, "query", accessorResource); } } } @SafeVarargs private static <T> Set<T> __getSetWithoutNullsOrDuplicates(T firstElement, T... elements) { // not null constraint if (elements == null) { throw new NullPointerException("An array or a sequence of arguments are required, but none were specified"); } final HashSet<T> resultSet = new HashSet<>(elements.length + 1); resultSet.add(firstElement); for (T element : elements) { // non-null elements constraint if (element == null) { throw new NullPointerException("A " + elements.getClass().getSimpleName() + " argument (or sequence of varargs) without null elements is required, but received: " + Arrays.asList(elements)); } // duplicate elements get ignored silently if (!resultSet.add(element)) { throw new IllegalArgumentException("Duplicate element: " + element); } } return resultSet; } // private connection management helper methods private SQLConnection __getConnection() { if (dataSource != null) { try { return new SQLConnection(dataSource.getConnection()); } catch (SQLException e) { throw new RuntimeException(e); } } else if (connection != null) { return new SQLConnection(connection); } else { throw new IllegalStateException("Not initialized! No data source or connection, perhaps missing call to postDeserialize()?"); } } private void __closeConnection(SQLConnection connection) { // only close the connection if we got it from a pool, otherwise just leave the connection open if (dataSource != null) { if (connection != null) { try { connection.close(); } catch (SQLException e) { throw new RuntimeException(e); } } } } }
removes redundant check if resource exists after resolving it, and optimizes resolution to use cached resources, if possible
src/main/java/com/acciente/oacc/sql/internal/SQLAccessControlContext.java
removes redundant check if resource exists after resolving it, and optimizes resolution to use cached resources, if possible
Java
mit
9007f85f1d7db52c3e4806b3103aa295cef8b7c3
0
bptlab/JEngine,bptlab/JEngine,bptlab/JEngine,BP2014W1/JEngine,BP2014W1/JEngine,BP2014W1/JEngine,bptlab/JEngine,BP2014W1/JEngine
package de.uni_potsdam.hpi.bpt.bp2014.jcore.rest; import com.ibatis.common.jdbc.ScriptRunner; import de.uni_potsdam.hpi.bpt.bp2014.AbstractTest; import de.uni_potsdam.hpi.bpt.bp2014.database.Connection; import net.javacrumbs.jsonunit.core.Option; import org.glassfish.jersey.server.ResourceConfig; import org.json.JSONObject; import org.junit.AfterClass; import org.junit.Before; import org.junit.Test; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Application; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import java.io.FileReader; import java.io.IOException; import java.sql.SQLException; import static net.javacrumbs.jsonunit.JsonMatchers.jsonEquals; import static org.junit.Assert.*; /** * This Class extends the {@link de.uni_potsdam.hpi.bpt.bp2014.AbstractTest} * to test the RestInterface of the JEngine core. * In order to do so it uses the functionality of the * {@link org.glassfish.jersey.test.JerseyTest} * There are test methods for every possible REST Call. * In order to stay independent from existing tests, the * database will be set up before and after the execution. * Define the database Properties inside the database_connection file. */ public class RestInterfaceTest extends AbstractTest { private static final String DEVELOPMENT_SQL_SEED_FILE = "src/main/resources/JEngineV2.sql"; /** * Sets up the seed file for the test database. */ static { TEST_SQL_SEED_FILE = "src/test/resources/JEngineV2RESTTest_new.sql"; } /** * The base url of the jcore rest interface. * Allows us to send requests to the {@link de.uni_potsdam.hpi.bpt.bp2014.jcore.rest.RestInterface}. */ private WebTarget base; @AfterClass public static void resetDatabase() throws IOException, SQLException { clearDatabase(); ScriptRunner runner = new ScriptRunner(Connection.getInstance().connect(), false, false); runner.runScript(new FileReader(DEVELOPMENT_SQL_SEED_FILE)); } @Override protected Application configure() { return new ResourceConfig(de.uni_potsdam.hpi.bpt.bp2014.jcore.rest.RestInterface.class); } @Before public void setUpBase() { base = target("interface/v2"); } /** * When you sent a GET to {@link RestInterface#getScenarios(UriInfo, String)} * the media type of the response will be JSON. */ @Test public void testGetScenarioProducesJson() { Response response = base.path("scenario").request().get(); assertEquals("The Response code of get Scenario was not 200", 200, response.getStatus()); assertEquals("Get Scenarios returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); } /** * When you sent a get to {@link RestInterface#getScenarios(UriInfo, String)} * the entity of the response will be a valid JSON array. */ @Test public void testGetScenarioProducesValidJsonArray() { Response response = base.path("scenario").request().get(); assertNotEquals("Get scenarios did not respond with a valid JSON Array", null, new JSONObject(response.readEntity(String.class))); } /** * When you sent a GET to {@link RestInterface#getScenarios(UriInfo, String)} * the returned JSON will contain the latest version of all Scenarios. */ @Test public void testGetScenarioContent() { Response response = base.path("scenario").request().get(); assertThat("Get Scenarios did not contain the expected information", "{\"ids\":[1,2,3,100,101,103,105,111,113,114,115,116,117,118,134,135,136,138,139,140,141,142,143,144],\"links\":{\"140\":\"http://localhost:9998/interface/v2/scenario/140\",\"141\":\"http://localhost:9998/interface/v2/scenario/141\",\"142\":\"http://localhost:9998/interface/v2/scenario/142\",\"143\":\"http://localhost:9998/interface/v2/scenario/143\",\"1\":\"http://localhost:9998/interface/v2/scenario/1\",\"100\":\"http://localhost:9998/interface/v2/scenario/100\",\"111\":\"http://localhost:9998/interface/v2/scenario/111\",\"144\":\"http://localhost:9998/interface/v2/scenario/144\",\"2\":\"http://localhost:9998/interface/v2/scenario/2\",\"101\":\"http://localhost:9998/interface/v2/scenario/101\",\"134\":\"http://localhost:9998/interface/v2/scenario/134\",\"3\":\"http://localhost:9998/interface/v2/scenario/3\",\"113\":\"http://localhost:9998/interface/v2/scenario/113\",\"135\":\"http://localhost:9998/interface/v2/scenario/135\",\"103\":\"http://localhost:9998/interface/v2/scenario/103\",\"114\":\"http://localhost:9998/interface/v2/scenario/114\",\"136\":\"http://localhost:9998/interface/v2/scenario/136\",\"115\":\"http://localhost:9998/interface/v2/scenario/115\",\"105\":\"http://localhost:9998/interface/v2/scenario/105\",\"116\":\"http://localhost:9998/interface/v2/scenario/116\",\"138\":\"http://localhost:9998/interface/v2/scenario/138\",\"117\":\"http://localhost:9998/interface/v2/scenario/117\",\"139\":\"http://localhost:9998/interface/v2/scenario/139\",\"118\":\"http://localhost:9998/interface/v2/scenario/118\"},\"labels\":{\"1\":\"HELLOWORLD\",\"2\":\"helloWorld2\",\"3\":\"EmailTest\",\"100\":\"TestScenario\",\"101\":\"Test Insert Scenario\",\"134\":\"ReiseTestScenario\",\"103\":\"ScenarioTest1\",\"135\":\"ReiseTestScenario\",\"136\":\"TXOR1Scenario\",\"105\":\"TestScenarioTerminationCondition\",\"138\":\"TestEmail1Scenario\",\"139\":\"TestEmail1Scenario\",\"140\":\"TestEmail1Scenario\",\"141\":\"TestEmail2Scenario\",\"142\":\"TestEmail3Scenario\",\"111\":\"Test2_2ReferenceTest\",\"143\":\"TestEmail3Scenario\",\"144\":\"XORTest2Scenario\",\"113\":\"referencetest3_2\",\"114\":\"RT4Scenario\",\"115\":\"TT2Scenario\",\"116\":\"TT2Scenario\",\"117\":\"AT2Scenario\",\"118\":\"AT3Scenario\"}}", jsonEquals(response.readEntity(String.class)).when(Option.IGNORING_ARRAY_ORDER)); } /** * When you sent a GET to {@link RestInterface#getScenarios(UriInfo, String)} and * you use a Filter * then the returned JSON will contain the latest version of all Scenarios with * a name containing the filterString. */ @Test public void testGetScenarioContentWithFilter() { Response response = base.path("scenario").queryParam("filter", "HELLO").request().get(); assertThat("Get Scenarios did not contain the expected information", "{\"ids\":[1,2],\"labels\":{\"1\":\"HELLOWORLD\",\"2\":\"helloWorld2\"},\"links\":{\"1\":\"http://localhost:9998/interface/v2/scenario/1\",\"2\":\"http://localhost:9998/interface/v2/scenario/2\"}}", jsonEquals(response.readEntity(String.class)).when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a GET to {@link RestInterface#getScenario(UriInfo, int)} with an invalid id * a empty JSON with a 404 will be returned. */ @Test public void testGetScenarioInvalidId() { Response response = base.path("scenario/99999").request().get(); assertEquals("Get Scenario returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertEquals("Get scenario returns a not empty JSON, but the id was invalid", 404, response.getStatus()); assertEquals("The content of the invalid request is not an empty JSONObject", "{}", response.readEntity(String.class)); } /** * If you send a GET to {@link RestInterface#getScenario(UriInfo, int)} with an valid id * a JSON containing the id, name and modelversion will be returned. */ @Test public void testGetScenario() { Response response = base.path("scenario/1").request().get(); String responseEntity = response.readEntity(String.class); assertEquals("The Response code of get Scenario was not 200", 200, response.getStatus()); assertEquals("Get Scenarios returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertNotEquals("Get scenarios did not respond with a valid JSON Array", null, new JSONObject(responseEntity)); assertThat("The content of the valid request is not as expected", "{\"modelid\":0,\"instances\":\"http://localhost:9998/interface/v2/scenario/1/instance\",\"name\":\"HELLOWORLD\",\"id\":1,\"modelversion\":0}", jsonEquals(responseEntity).when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getScenarioInstances(UriInfo, int, String)} * with valid params and no filter * then you get 200 a JSON Object. */ @Test public void testGetScenarioInstancesReturnsOkAndJSON() { Response response = base.path("scenario/1/instance").request().get(); assertEquals("The Response code of get get instances was not 200", 200, response.getStatus()); assertEquals("Get instances returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); } /** * When you send a Get to {@link RestInterface#getScenarioInstances(UriInfo, int, String)} * with an invalid scenario * then you get 404 with an error message inside the returned JSON object */ @Test public void testGetScenarioInstancesInvalidScenario() { Response response = base.path("scenario/9999/instance").request().get(); assertEquals("The Response code of get get instances was not 404", 404, response.getStatus()); assertEquals("Get instances returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON is invalid or does not contain the expected message", "{\"error\":\"Scenario not found!\"}", jsonEquals(response.readEntity(String.class))); } /** * When you send a Get to {@link RestInterface#getScenarioInstances(UriInfo, int, String)} * with a valid scenario id * then a json object with all instances, id and label should be returned. * The schema should be: * {"ids": [1,2..], "names":{1: "abc" ...}} */ @Test public void testGetScenarioInstancesReturnsCorrectJson() { Response response = base.path("scenario/1/instance").request().get(); assertThat("The returned JSON does not contain the expected content", "{\"ids\":[47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,92,94,95,97,99,101,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,212,214,215,216,217,218,219,220,221,222,223,224,226,228,244,246,248,250,252,255,257,259,261,262,263,265,266,270,279,281,282,284,285,286,294,296,309,310,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,333,334,346,348,349,351,353,356,358,359,361,363,365,366,367,369,370,371,372,373,374,375,376,377,378,379,380,381,383,385,386,387,388,390,392,394,396,398,400,402,404,406,408,409,410,411,412,413,415,416,417,418,419,420,421,422,430,433,438,441,444,445,446,448,449,452,456,459,460,461,464,467,470,473,476,479,482,485,488,491,492,493,494,497,505,515,519,527,531,552,561,563,572,582,590,598,605,612,614,621,628,635,642,649,656,663,670,677,684,691,698,705,712,719,726,733,740,747,754,756,763,770,777,784,791,809,816,823,836,846,853,860,867,895,915,951,952],\"links\":{\"590\":\"http://localhost:9998/interface/v2/scenario/1/instance/590\",\"470\":\"http://localhost:9998/interface/v2/scenario/1/instance/470\",\"351\":\"http://localhost:9998/interface/v2/scenario/1/instance/351\",\"110\":\"http://localhost:9998/interface/v2/scenario/1/instance/110\",\"473\":\"http://localhost:9998/interface/v2/scenario/1/instance/473\",\"111\":\"http://localhost:9998/interface/v2/scenario/1/instance/111\",\"353\":\"http://localhost:9998/interface/v2/scenario/1/instance/353\",\"112\":\"http://localhost:9998/interface/v2/scenario/1/instance/112\",\"113\":\"http://localhost:9998/interface/v2/scenario/1/instance/113\",\"476\":\"http://localhost:9998/interface/v2/scenario/1/instance/476\",\"114\":\"http://localhost:9998/interface/v2/scenario/1/instance/114\",\"356\":\"http://localhost:9998/interface/v2/scenario/1/instance/356\",\"598\":\"http://localhost:9998/interface/v2/scenario/1/instance/598\",\"115\":\"http://localhost:9998/interface/v2/scenario/1/instance/115\",\"116\":\"http://localhost:9998/interface/v2/scenario/1/instance/116\",\"358\":\"http://localhost:9998/interface/v2/scenario/1/instance/358\",\"479\":\"http://localhost:9998/interface/v2/scenario/1/instance/479\",\"117\":\"http://localhost:9998/interface/v2/scenario/1/instance/117\",\"359\":\"http://localhost:9998/interface/v2/scenario/1/instance/359\",\"118\":\"http://localhost:9998/interface/v2/scenario/1/instance/118\",\"119\":\"http://localhost:9998/interface/v2/scenario/1/instance/119\",\"915\":\"http://localhost:9998/interface/v2/scenario/1/instance/915\",\"361\":\"http://localhost:9998/interface/v2/scenario/1/instance/361\",\"482\":\"http://localhost:9998/interface/v2/scenario/1/instance/482\",\"120\":\"http://localhost:9998/interface/v2/scenario/1/instance/120\",\"121\":\"http://localhost:9998/interface/v2/scenario/1/instance/121\",\"363\":\"http://localhost:9998/interface/v2/scenario/1/instance/363\",\"122\":\"http://localhost:9998/interface/v2/scenario/1/instance/122\",\"485\":\"http://localhost:9998/interface/v2/scenario/1/instance/485\",\"123\":\"http://localhost:9998/interface/v2/scenario/1/instance/123\",\"244\":\"http://localhost:9998/interface/v2/scenario/1/instance/244\",\"365\":\"http://localhost:9998/interface/v2/scenario/1/instance/365\",\"124\":\"http://localhost:9998/interface/v2/scenario/1/instance/124\",\"366\":\"http://localhost:9998/interface/v2/scenario/1/instance/366\",\"125\":\"http://localhost:9998/interface/v2/scenario/1/instance/125\",\"246\":\"http://localhost:9998/interface/v2/scenario/1/instance/246\",\"367\":\"http://localhost:9998/interface/v2/scenario/1/instance/367\",\"488\":\"http://localhost:9998/interface/v2/scenario/1/instance/488\",\"126\":\"http://localhost:9998/interface/v2/scenario/1/instance/126\",\"127\":\"http://localhost:9998/interface/v2/scenario/1/instance/127\",\"248\":\"http://localhost:9998/interface/v2/scenario/1/instance/248\",\"369\":\"http://localhost:9998/interface/v2/scenario/1/instance/369\",\"128\":\"http://localhost:9998/interface/v2/scenario/1/instance/128\",\"129\":\"http://localhost:9998/interface/v2/scenario/1/instance/129\",\"809\":\"http://localhost:9998/interface/v2/scenario/1/instance/809\",\"370\":\"http://localhost:9998/interface/v2/scenario/1/instance/370\",\"491\":\"http://localhost:9998/interface/v2/scenario/1/instance/491\",\"250\":\"http://localhost:9998/interface/v2/scenario/1/instance/250\",\"371\":\"http://localhost:9998/interface/v2/scenario/1/instance/371\",\"492\":\"http://localhost:9998/interface/v2/scenario/1/instance/492\",\"130\":\"http://localhost:9998/interface/v2/scenario/1/instance/130\",\"372\":\"http://localhost:9998/interface/v2/scenario/1/instance/372\",\"493\":\"http://localhost:9998/interface/v2/scenario/1/instance/493\",\"131\":\"http://localhost:9998/interface/v2/scenario/1/instance/131\",\"252\":\"http://localhost:9998/interface/v2/scenario/1/instance/252\",\"373\":\"http://localhost:9998/interface/v2/scenario/1/instance/373\",\"494\":\"http://localhost:9998/interface/v2/scenario/1/instance/494\",\"132\":\"http://localhost:9998/interface/v2/scenario/1/instance/132\",\"374\":\"http://localhost:9998/interface/v2/scenario/1/instance/374\",\"133\":\"http://localhost:9998/interface/v2/scenario/1/instance/133\",\"375\":\"http://localhost:9998/interface/v2/scenario/1/instance/375\",\"134\":\"http://localhost:9998/interface/v2/scenario/1/instance/134\",\"255\":\"http://localhost:9998/interface/v2/scenario/1/instance/255\",\"376\":\"http://localhost:9998/interface/v2/scenario/1/instance/376\",\"497\":\"http://localhost:9998/interface/v2/scenario/1/instance/497\",\"135\":\"http://localhost:9998/interface/v2/scenario/1/instance/135\",\"377\":\"http://localhost:9998/interface/v2/scenario/1/instance/377\",\"136\":\"http://localhost:9998/interface/v2/scenario/1/instance/136\",\"257\":\"http://localhost:9998/interface/v2/scenario/1/instance/257\",\"378\":\"http://localhost:9998/interface/v2/scenario/1/instance/378\",\"137\":\"http://localhost:9998/interface/v2/scenario/1/instance/137\",\"379\":\"http://localhost:9998/interface/v2/scenario/1/instance/379\",\"138\":\"http://localhost:9998/interface/v2/scenario/1/instance/138\",\"259\":\"http://localhost:9998/interface/v2/scenario/1/instance/259\",\"139\":\"http://localhost:9998/interface/v2/scenario/1/instance/139\",\"816\":\"http://localhost:9998/interface/v2/scenario/1/instance/816\",\"380\":\"http://localhost:9998/interface/v2/scenario/1/instance/380\",\"381\":\"http://localhost:9998/interface/v2/scenario/1/instance/381\",\"140\":\"http://localhost:9998/interface/v2/scenario/1/instance/140\",\"261\":\"http://localhost:9998/interface/v2/scenario/1/instance/261\",\"141\":\"http://localhost:9998/interface/v2/scenario/1/instance/141\",\"262\":\"http://localhost:9998/interface/v2/scenario/1/instance/262\",\"383\":\"http://localhost:9998/interface/v2/scenario/1/instance/383\",\"142\":\"http://localhost:9998/interface/v2/scenario/1/instance/142\",\"263\":\"http://localhost:9998/interface/v2/scenario/1/instance/263\",\"143\":\"http://localhost:9998/interface/v2/scenario/1/instance/143\",\"385\":\"http://localhost:9998/interface/v2/scenario/1/instance/385\",\"144\":\"http://localhost:9998/interface/v2/scenario/1/instance/144\",\"265\":\"http://localhost:9998/interface/v2/scenario/1/instance/265\",\"386\":\"http://localhost:9998/interface/v2/scenario/1/instance/386\",\"145\":\"http://localhost:9998/interface/v2/scenario/1/instance/145\",\"266\":\"http://localhost:9998/interface/v2/scenario/1/instance/266\",\"387\":\"http://localhost:9998/interface/v2/scenario/1/instance/387\",\"146\":\"http://localhost:9998/interface/v2/scenario/1/instance/146\",\"388\":\"http://localhost:9998/interface/v2/scenario/1/instance/388\",\"147\":\"http://localhost:9998/interface/v2/scenario/1/instance/147\",\"148\":\"http://localhost:9998/interface/v2/scenario/1/instance/148\",\"149\":\"http://localhost:9998/interface/v2/scenario/1/instance/149\",\"823\":\"http://localhost:9998/interface/v2/scenario/1/instance/823\",\"705\":\"http://localhost:9998/interface/v2/scenario/1/instance/705\",\"47\":\"http://localhost:9998/interface/v2/scenario/1/instance/47\",\"48\":\"http://localhost:9998/interface/v2/scenario/1/instance/48\",\"49\":\"http://localhost:9998/interface/v2/scenario/1/instance/49\",\"390\":\"http://localhost:9998/interface/v2/scenario/1/instance/390\",\"270\":\"http://localhost:9998/interface/v2/scenario/1/instance/270\",\"150\":\"http://localhost:9998/interface/v2/scenario/1/instance/150\",\"392\":\"http://localhost:9998/interface/v2/scenario/1/instance/392\",\"151\":\"http://localhost:9998/interface/v2/scenario/1/instance/151\",\"152\":\"http://localhost:9998/interface/v2/scenario/1/instance/152\",\"394\":\"http://localhost:9998/interface/v2/scenario/1/instance/394\",\"153\":\"http://localhost:9998/interface/v2/scenario/1/instance/153\",\"154\":\"http://localhost:9998/interface/v2/scenario/1/instance/154\",\"396\":\"http://localhost:9998/interface/v2/scenario/1/instance/396\",\"155\":\"http://localhost:9998/interface/v2/scenario/1/instance/155\",\"156\":\"http://localhost:9998/interface/v2/scenario/1/instance/156\",\"398\":\"http://localhost:9998/interface/v2/scenario/1/instance/398\",\"157\":\"http://localhost:9998/interface/v2/scenario/1/instance/157\",\"158\":\"http://localhost:9998/interface/v2/scenario/1/instance/158\",\"279\":\"http://localhost:9998/interface/v2/scenario/1/instance/279\",\"159\":\"http://localhost:9998/interface/v2/scenario/1/instance/159\",\"951\":\"http://localhost:9998/interface/v2/scenario/1/instance/951\",\"952\":\"http://localhost:9998/interface/v2/scenario/1/instance/952\",\"712\":\"http://localhost:9998/interface/v2/scenario/1/instance/712\",\"50\":\"http://localhost:9998/interface/v2/scenario/1/instance/50\",\"836\":\"http://localhost:9998/interface/v2/scenario/1/instance/836\",\"51\":\"http://localhost:9998/interface/v2/scenario/1/instance/51\",\"52\":\"http://localhost:9998/interface/v2/scenario/1/instance/52\",\"53\":\"http://localhost:9998/interface/v2/scenario/1/instance/53\",\"54\":\"http://localhost:9998/interface/v2/scenario/1/instance/54\",\"719\":\"http://localhost:9998/interface/v2/scenario/1/instance/719\",\"55\":\"http://localhost:9998/interface/v2/scenario/1/instance/55\",\"56\":\"http://localhost:9998/interface/v2/scenario/1/instance/56\",\"57\":\"http://localhost:9998/interface/v2/scenario/1/instance/57\",\"58\":\"http://localhost:9998/interface/v2/scenario/1/instance/58\",\"59\":\"http://localhost:9998/interface/v2/scenario/1/instance/59\",\"160\":\"http://localhost:9998/interface/v2/scenario/1/instance/160\",\"281\":\"http://localhost:9998/interface/v2/scenario/1/instance/281\",\"161\":\"http://localhost:9998/interface/v2/scenario/1/instance/161\",\"282\":\"http://localhost:9998/interface/v2/scenario/1/instance/282\",\"162\":\"http://localhost:9998/interface/v2/scenario/1/instance/162\",\"163\":\"http://localhost:9998/interface/v2/scenario/1/instance/163\",\"284\":\"http://localhost:9998/interface/v2/scenario/1/instance/284\",\"164\":\"http://localhost:9998/interface/v2/scenario/1/instance/164\",\"285\":\"http://localhost:9998/interface/v2/scenario/1/instance/285\",\"165\":\"http://localhost:9998/interface/v2/scenario/1/instance/165\",\"286\":\"http://localhost:9998/interface/v2/scenario/1/instance/286\",\"166\":\"http://localhost:9998/interface/v2/scenario/1/instance/166\",\"167\":\"http://localhost:9998/interface/v2/scenario/1/instance/167\",\"168\":\"http://localhost:9998/interface/v2/scenario/1/instance/168\",\"169\":\"http://localhost:9998/interface/v2/scenario/1/instance/169\",\"60\":\"http://localhost:9998/interface/v2/scenario/1/instance/60\",\"846\":\"http://localhost:9998/interface/v2/scenario/1/instance/846\",\"61\":\"http://localhost:9998/interface/v2/scenario/1/instance/61\",\"605\":\"http://localhost:9998/interface/v2/scenario/1/instance/605\",\"726\":\"http://localhost:9998/interface/v2/scenario/1/instance/726\",\"62\":\"http://localhost:9998/interface/v2/scenario/1/instance/62\",\"63\":\"http://localhost:9998/interface/v2/scenario/1/instance/63\",\"64\":\"http://localhost:9998/interface/v2/scenario/1/instance/64\",\"65\":\"http://localhost:9998/interface/v2/scenario/1/instance/65\",\"66\":\"http://localhost:9998/interface/v2/scenario/1/instance/66\",\"67\":\"http://localhost:9998/interface/v2/scenario/1/instance/67\",\"68\":\"http://localhost:9998/interface/v2/scenario/1/instance/68\",\"69\":\"http://localhost:9998/interface/v2/scenario/1/instance/69\",\"170\":\"http://localhost:9998/interface/v2/scenario/1/instance/170\",\"171\":\"http://localhost:9998/interface/v2/scenario/1/instance/171\",\"172\":\"http://localhost:9998/interface/v2/scenario/1/instance/172\",\"173\":\"http://localhost:9998/interface/v2/scenario/1/instance/173\",\"294\":\"http://localhost:9998/interface/v2/scenario/1/instance/294\",\"174\":\"http://localhost:9998/interface/v2/scenario/1/instance/174\",\"175\":\"http://localhost:9998/interface/v2/scenario/1/instance/175\",\"296\":\"http://localhost:9998/interface/v2/scenario/1/instance/296\",\"176\":\"http://localhost:9998/interface/v2/scenario/1/instance/176\",\"177\":\"http://localhost:9998/interface/v2/scenario/1/instance/177\",\"178\":\"http://localhost:9998/interface/v2/scenario/1/instance/178\",\"179\":\"http://localhost:9998/interface/v2/scenario/1/instance/179\",\"853\":\"http://localhost:9998/interface/v2/scenario/1/instance/853\",\"612\":\"http://localhost:9998/interface/v2/scenario/1/instance/612\",\"733\":\"http://localhost:9998/interface/v2/scenario/1/instance/733\",\"70\":\"http://localhost:9998/interface/v2/scenario/1/instance/70\",\"614\":\"http://localhost:9998/interface/v2/scenario/1/instance/614\",\"71\":\"http://localhost:9998/interface/v2/scenario/1/instance/71\",\"72\":\"http://localhost:9998/interface/v2/scenario/1/instance/72\",\"73\":\"http://localhost:9998/interface/v2/scenario/1/instance/73\",\"74\":\"http://localhost:9998/interface/v2/scenario/1/instance/74\",\"75\":\"http://localhost:9998/interface/v2/scenario/1/instance/75\",\"76\":\"http://localhost:9998/interface/v2/scenario/1/instance/76\",\"77\":\"http://localhost:9998/interface/v2/scenario/1/instance/77\",\"78\":\"http://localhost:9998/interface/v2/scenario/1/instance/78\",\"79\":\"http://localhost:9998/interface/v2/scenario/1/instance/79\",\"180\":\"http://localhost:9998/interface/v2/scenario/1/instance/180\",\"181\":\"http://localhost:9998/interface/v2/scenario/1/instance/181\",\"182\":\"http://localhost:9998/interface/v2/scenario/1/instance/182\",\"183\":\"http://localhost:9998/interface/v2/scenario/1/instance/183\",\"184\":\"http://localhost:9998/interface/v2/scenario/1/instance/184\",\"185\":\"http://localhost:9998/interface/v2/scenario/1/instance/185\",\"186\":\"http://localhost:9998/interface/v2/scenario/1/instance/186\",\"187\":\"http://localhost:9998/interface/v2/scenario/1/instance/187\",\"188\":\"http://localhost:9998/interface/v2/scenario/1/instance/188\",\"189\":\"http://localhost:9998/interface/v2/scenario/1/instance/189\",\"860\":\"http://localhost:9998/interface/v2/scenario/1/instance/860\",\"740\":\"http://localhost:9998/interface/v2/scenario/1/instance/740\",\"621\":\"http://localhost:9998/interface/v2/scenario/1/instance/621\",\"80\":\"http://localhost:9998/interface/v2/scenario/1/instance/80\",\"81\":\"http://localhost:9998/interface/v2/scenario/1/instance/81\",\"867\":\"http://localhost:9998/interface/v2/scenario/1/instance/867\",\"82\":\"http://localhost:9998/interface/v2/scenario/1/instance/82\",\"505\":\"http://localhost:9998/interface/v2/scenario/1/instance/505\",\"747\":\"http://localhost:9998/interface/v2/scenario/1/instance/747\",\"83\":\"http://localhost:9998/interface/v2/scenario/1/instance/83\",\"84\":\"http://localhost:9998/interface/v2/scenario/1/instance/84\",\"628\":\"http://localhost:9998/interface/v2/scenario/1/instance/628\",\"190\":\"http://localhost:9998/interface/v2/scenario/1/instance/190\",\"191\":\"http://localhost:9998/interface/v2/scenario/1/instance/191\",\"192\":\"http://localhost:9998/interface/v2/scenario/1/instance/192\",\"193\":\"http://localhost:9998/interface/v2/scenario/1/instance/193\",\"194\":\"http://localhost:9998/interface/v2/scenario/1/instance/194\",\"195\":\"http://localhost:9998/interface/v2/scenario/1/instance/195\",\"196\":\"http://localhost:9998/interface/v2/scenario/1/instance/196\",\"197\":\"http://localhost:9998/interface/v2/scenario/1/instance/197\",\"198\":\"http://localhost:9998/interface/v2/scenario/1/instance/198\",\"199\":\"http://localhost:9998/interface/v2/scenario/1/instance/199\",\"754\":\"http://localhost:9998/interface/v2/scenario/1/instance/754\",\"635\":\"http://localhost:9998/interface/v2/scenario/1/instance/635\",\"756\":\"http://localhost:9998/interface/v2/scenario/1/instance/756\",\"92\":\"http://localhost:9998/interface/v2/scenario/1/instance/92\",\"515\":\"http://localhost:9998/interface/v2/scenario/1/instance/515\",\"94\":\"http://localhost:9998/interface/v2/scenario/1/instance/94\",\"95\":\"http://localhost:9998/interface/v2/scenario/1/instance/95\",\"519\":\"http://localhost:9998/interface/v2/scenario/1/instance/519\",\"97\":\"http://localhost:9998/interface/v2/scenario/1/instance/97\",\"99\":\"http://localhost:9998/interface/v2/scenario/1/instance/99\",\"400\":\"http://localhost:9998/interface/v2/scenario/1/instance/400\",\"642\":\"http://localhost:9998/interface/v2/scenario/1/instance/642\",\"763\":\"http://localhost:9998/interface/v2/scenario/1/instance/763\",\"402\":\"http://localhost:9998/interface/v2/scenario/1/instance/402\",\"404\":\"http://localhost:9998/interface/v2/scenario/1/instance/404\",\"406\":\"http://localhost:9998/interface/v2/scenario/1/instance/406\",\"527\":\"http://localhost:9998/interface/v2/scenario/1/instance/527\",\"649\":\"http://localhost:9998/interface/v2/scenario/1/instance/649\",\"408\":\"http://localhost:9998/interface/v2/scenario/1/instance/408\",\"409\":\"http://localhost:9998/interface/v2/scenario/1/instance/409\",\"770\":\"http://localhost:9998/interface/v2/scenario/1/instance/770\",\"410\":\"http://localhost:9998/interface/v2/scenario/1/instance/410\",\"531\":\"http://localhost:9998/interface/v2/scenario/1/instance/531\",\"411\":\"http://localhost:9998/interface/v2/scenario/1/instance/411\",\"895\":\"http://localhost:9998/interface/v2/scenario/1/instance/895\",\"412\":\"http://localhost:9998/interface/v2/scenario/1/instance/412\",\"413\":\"http://localhost:9998/interface/v2/scenario/1/instance/413\",\"656\":\"http://localhost:9998/interface/v2/scenario/1/instance/656\",\"777\":\"http://localhost:9998/interface/v2/scenario/1/instance/777\",\"415\":\"http://localhost:9998/interface/v2/scenario/1/instance/415\",\"416\":\"http://localhost:9998/interface/v2/scenario/1/instance/416\",\"417\":\"http://localhost:9998/interface/v2/scenario/1/instance/417\",\"418\":\"http://localhost:9998/interface/v2/scenario/1/instance/418\",\"419\":\"http://localhost:9998/interface/v2/scenario/1/instance/419\",\"420\":\"http://localhost:9998/interface/v2/scenario/1/instance/420\",\"421\":\"http://localhost:9998/interface/v2/scenario/1/instance/421\",\"663\":\"http://localhost:9998/interface/v2/scenario/1/instance/663\",\"784\":\"http://localhost:9998/interface/v2/scenario/1/instance/784\",\"422\":\"http://localhost:9998/interface/v2/scenario/1/instance/422\",\"309\":\"http://localhost:9998/interface/v2/scenario/1/instance/309\",\"670\":\"http://localhost:9998/interface/v2/scenario/1/instance/670\",\"791\":\"http://localhost:9998/interface/v2/scenario/1/instance/791\",\"430\":\"http://localhost:9998/interface/v2/scenario/1/instance/430\",\"310\":\"http://localhost:9998/interface/v2/scenario/1/instance/310\",\"552\":\"http://localhost:9998/interface/v2/scenario/1/instance/552\",\"312\":\"http://localhost:9998/interface/v2/scenario/1/instance/312\",\"433\":\"http://localhost:9998/interface/v2/scenario/1/instance/433\",\"313\":\"http://localhost:9998/interface/v2/scenario/1/instance/313\",\"314\":\"http://localhost:9998/interface/v2/scenario/1/instance/314\",\"677\":\"http://localhost:9998/interface/v2/scenario/1/instance/677\",\"315\":\"http://localhost:9998/interface/v2/scenario/1/instance/315\",\"316\":\"http://localhost:9998/interface/v2/scenario/1/instance/316\",\"317\":\"http://localhost:9998/interface/v2/scenario/1/instance/317\",\"438\":\"http://localhost:9998/interface/v2/scenario/1/instance/438\",\"318\":\"http://localhost:9998/interface/v2/scenario/1/instance/318\",\"319\":\"http://localhost:9998/interface/v2/scenario/1/instance/319\",\"561\":\"http://localhost:9998/interface/v2/scenario/1/instance/561\",\"320\":\"http://localhost:9998/interface/v2/scenario/1/instance/320\",\"441\":\"http://localhost:9998/interface/v2/scenario/1/instance/441\",\"200\":\"http://localhost:9998/interface/v2/scenario/1/instance/200\",\"321\":\"http://localhost:9998/interface/v2/scenario/1/instance/321\",\"563\":\"http://localhost:9998/interface/v2/scenario/1/instance/563\",\"684\":\"http://localhost:9998/interface/v2/scenario/1/instance/684\",\"201\":\"http://localhost:9998/interface/v2/scenario/1/instance/201\",\"322\":\"http://localhost:9998/interface/v2/scenario/1/instance/322\",\"202\":\"http://localhost:9998/interface/v2/scenario/1/instance/202\",\"323\":\"http://localhost:9998/interface/v2/scenario/1/instance/323\",\"444\":\"http://localhost:9998/interface/v2/scenario/1/instance/444\",\"203\":\"http://localhost:9998/interface/v2/scenario/1/instance/203\",\"324\":\"http://localhost:9998/interface/v2/scenario/1/instance/324\",\"445\":\"http://localhost:9998/interface/v2/scenario/1/instance/445\",\"204\":\"http://localhost:9998/interface/v2/scenario/1/instance/204\",\"325\":\"http://localhost:9998/interface/v2/scenario/1/instance/325\",\"446\":\"http://localhost:9998/interface/v2/scenario/1/instance/446\",\"205\":\"http://localhost:9998/interface/v2/scenario/1/instance/205\",\"326\":\"http://localhost:9998/interface/v2/scenario/1/instance/326\",\"206\":\"http://localhost:9998/interface/v2/scenario/1/instance/206\",\"327\":\"http://localhost:9998/interface/v2/scenario/1/instance/327\",\"448\":\"http://localhost:9998/interface/v2/scenario/1/instance/448\",\"207\":\"http://localhost:9998/interface/v2/scenario/1/instance/207\",\"328\":\"http://localhost:9998/interface/v2/scenario/1/instance/328\",\"449\":\"http://localhost:9998/interface/v2/scenario/1/instance/449\",\"208\":\"http://localhost:9998/interface/v2/scenario/1/instance/208\",\"209\":\"http://localhost:9998/interface/v2/scenario/1/instance/209\",\"691\":\"http://localhost:9998/interface/v2/scenario/1/instance/691\",\"572\":\"http://localhost:9998/interface/v2/scenario/1/instance/572\",\"210\":\"http://localhost:9998/interface/v2/scenario/1/instance/210\",\"452\":\"http://localhost:9998/interface/v2/scenario/1/instance/452\",\"212\":\"http://localhost:9998/interface/v2/scenario/1/instance/212\",\"333\":\"http://localhost:9998/interface/v2/scenario/1/instance/333\",\"334\":\"http://localhost:9998/interface/v2/scenario/1/instance/334\",\"214\":\"http://localhost:9998/interface/v2/scenario/1/instance/214\",\"456\":\"http://localhost:9998/interface/v2/scenario/1/instance/456\",\"698\":\"http://localhost:9998/interface/v2/scenario/1/instance/698\",\"215\":\"http://localhost:9998/interface/v2/scenario/1/instance/215\",\"216\":\"http://localhost:9998/interface/v2/scenario/1/instance/216\",\"217\":\"http://localhost:9998/interface/v2/scenario/1/instance/217\",\"459\":\"http://localhost:9998/interface/v2/scenario/1/instance/459\",\"218\":\"http://localhost:9998/interface/v2/scenario/1/instance/218\",\"219\":\"http://localhost:9998/interface/v2/scenario/1/instance/219\",\"460\":\"http://localhost:9998/interface/v2/scenario/1/instance/460\",\"461\":\"http://localhost:9998/interface/v2/scenario/1/instance/461\",\"582\":\"http://localhost:9998/interface/v2/scenario/1/instance/582\",\"220\":\"http://localhost:9998/interface/v2/scenario/1/instance/220\",\"221\":\"http://localhost:9998/interface/v2/scenario/1/instance/221\",\"101\":\"http://localhost:9998/interface/v2/scenario/1/instance/101\",\"222\":\"http://localhost:9998/interface/v2/scenario/1/instance/222\",\"464\":\"http://localhost:9998/interface/v2/scenario/1/instance/464\",\"223\":\"http://localhost:9998/interface/v2/scenario/1/instance/223\",\"103\":\"http://localhost:9998/interface/v2/scenario/1/instance/103\",\"224\":\"http://localhost:9998/interface/v2/scenario/1/instance/224\",\"104\":\"http://localhost:9998/interface/v2/scenario/1/instance/104\",\"346\":\"http://localhost:9998/interface/v2/scenario/1/instance/346\",\"467\":\"http://localhost:9998/interface/v2/scenario/1/instance/467\",\"105\":\"http://localhost:9998/interface/v2/scenario/1/instance/105\",\"226\":\"http://localhost:9998/interface/v2/scenario/1/instance/226\",\"106\":\"http://localhost:9998/interface/v2/scenario/1/instance/106\",\"348\":\"http://localhost:9998/interface/v2/scenario/1/instance/348\",\"107\":\"http://localhost:9998/interface/v2/scenario/1/instance/107\",\"228\":\"http://localhost:9998/interface/v2/scenario/1/instance/228\",\"349\":\"http://localhost:9998/interface/v2/scenario/1/instance/349\",\"108\":\"http://localhost:9998/interface/v2/scenario/1/instance/108\",\"109\":\"http://localhost:9998/interface/v2/scenario/1/instance/109\"},\"labels\":{\"515\":\"HELLOWORLD\",\"519\":\"HELLOWORLD\",\"527\":\"HELLOWORLD\",\"531\":\"HELLOWORLD\",\"552\":\"HELLOWORLD\",\"47\":\"HELLOWORLD\",\"48\":\"HELLOWORLD\",\"49\":\"HELLOWORLD\",\"561\":\"HELLOWORLD\",\"50\":\"HELLOWORLD\",\"51\":\"HELLOWORLD\",\"563\":\"HELLOWORLD\",\"52\":\"HELLOWORLD\",\"53\":\"HELLOWORLD\",\"54\":\"HELLOWORLD\",\"55\":\"HELLOWORLD\",\"56\":\"HELLOWORLD\",\"57\":\"HELLOWORLD\",\"58\":\"HELLOWORLD\",\"59\":\"HELLOWORLD\",\"60\":\"HELLOWORLD\",\"572\":\"HELLOWORLD\",\"61\":\"HELLOWORLD\",\"62\":\"HELLOWORLD\",\"63\":\"HELLOWORLD\",\"64\":\"HELLOWORLD\",\"65\":\"HELLOWORLD\",\"66\":\"HELLOWORLD\",\"67\":\"HELLOWORLD\",\"68\":\"HELLOWORLD\",\"69\":\"HELLOWORLD\",\"70\":\"HELLOWORLD\",\"582\":\"HELLOWORLD\",\"71\":\"HELLOWORLD\",\"72\":\"HELLOWORLD\",\"73\":\"HELLOWORLD\",\"74\":\"HELLOWORLD\",\"75\":\"HELLOWORLD\",\"76\":\"HELLOWORLD\",\"77\":\"HELLOWORLD\",\"78\":\"HELLOWORLD\",\"590\":\"HELLOWORLD\",\"79\":\"HELLOWORLD\",\"80\":\"HELLOWORLD\",\"81\":\"HELLOWORLD\",\"82\":\"HELLOWORLD\",\"83\":\"HELLOWORLD\",\"84\":\"HELLOWORLD\",\"598\":\"HELLOWORLD\",\"92\":\"HELLOWORLD\",\"605\":\"HELLOWORLD\",\"94\":\"HELLOWORLD\",\"95\":\"HELLOWORLD\",\"97\":\"HELLOWORLD\",\"99\":\"HELLOWORLD\",\"612\":\"HELLOWORLD\",\"101\":\"HELLOWORLD\",\"614\":\"HELLOWORLD\",\"103\":\"HELLOWORLD\",\"104\":\"HELLOWORLD\",\"105\":\"HELLOWORLD\",\"106\":\"HELLOWORLD\",\"107\":\"HELLOWORLD\",\"108\":\"HELLOWORLD\",\"109\":\"HELLOWORLD\",\"621\":\"HELLOWORLD\",\"110\":\"HELLOWORLD\",\"111\":\"HELLOWORLD\",\"112\":\"HELLOWORLD\",\"113\":\"HELLOWORLD\",\"114\":\"HELLOWORLD\",\"115\":\"HELLOWORLD\",\"116\":\"HELLOWORLD\",\"628\":\"HELLOWORLD\",\"117\":\"HELLOWORLD\",\"118\":\"HELLOWORLD\",\"119\":\"HELLOWORLD\",\"120\":\"HELLOWORLD\",\"121\":\"HELLOWORLD\",\"122\":\"HELLOWORLD\",\"123\":\"HELLOWORLD\",\"635\":\"HELLOWORLD\",\"124\":\"HELLOWORLD\",\"125\":\"HELLOWORLD\",\"126\":\"HELLOWORLD\",\"127\":\"HELLOWORLD\",\"128\":\"HELLOWORLD\",\"129\":\"HELLOWORLD\",\"130\":\"HELLOWORLD\",\"642\":\"HELLOWORLD\",\"131\":\"HELLOWORLD\",\"132\":\"HELLOWORLD\",\"133\":\"HELLOWORLD\",\"134\":\"HELLOWORLD\",\"135\":\"HELLOWORLD\",\"136\":\"HELLOWORLD\",\"137\":\"HELLOWORLD\",\"649\":\"HELLOWORLD\",\"138\":\"HELLOWORLD\",\"139\":\"HELLOWORLD\",\"140\":\"HELLOWORLD\",\"141\":\"HELLOWORLD\",\"142\":\"HELLOWORLD\",\"143\":\"HELLOWORLD\",\"144\":\"HELLOWORLD\",\"656\":\"HELLOWORLD\",\"145\":\"HELLOWORLD\",\"146\":\"HELLOWORLD\",\"147\":\"HELLOWORLD\",\"148\":\"HELLOWORLD\",\"149\":\"HELLOWORLD\",\"150\":\"HELLOWORLD\",\"151\":\"HELLOWORLD\",\"663\":\"HELLOWORLD\",\"152\":\"HELLOWORLD\",\"153\":\"HELLOWORLD\",\"154\":\"HELLOWORLD\",\"155\":\"HELLOWORLD\",\"156\":\"HELLOWORLD\",\"157\":\"HELLOWORLD\",\"158\":\"HELLOWORLD\",\"670\":\"HELLOWORLD\",\"159\":\"HELLOWORLD\",\"160\":\"HELLOWORLD\",\"161\":\"HELLOWORLD\",\"162\":\"HELLOWORLD\",\"163\":\"HELLOWORLD\",\"164\":\"HELLOWORLD\",\"165\":\"HELLOWORLD\",\"677\":\"HELLOWORLD\",\"166\":\"HELLOWORLD\",\"167\":\"HELLOWORLD\",\"168\":\"HELLOWORLD\",\"169\":\"HELLOWORLD\",\"170\":\"HELLOWORLD\",\"171\":\"HELLOWORLD\",\"172\":\"HELLOWORLD\",\"684\":\"HELLOWORLD\",\"173\":\"HELLOWORLD\",\"174\":\"HELLOWORLD\",\"175\":\"HELLOWORLD\",\"176\":\"HELLOWORLD\",\"177\":\"HELLOWORLD\",\"178\":\"HELLOWORLD\",\"179\":\"HELLOWORLD\",\"691\":\"HELLOWORLD\",\"180\":\"HELLOWORLD\",\"181\":\"HELLOWORLD\",\"182\":\"HELLOWORLD\",\"183\":\"HELLOWORLD\",\"184\":\"HELLOWORLD\",\"185\":\"HELLOWORLD\",\"186\":\"HELLOWORLD\",\"698\":\"HELLOWORLD\",\"187\":\"HELLOWORLD\",\"188\":\"HELLOWORLD\",\"189\":\"HELLOWORLD\",\"190\":\"HELLOWORLD\",\"191\":\"HELLOWORLD\",\"192\":\"HELLOWORLD\",\"193\":\"HELLOWORLD\",\"705\":\"HELLOWORLD\",\"194\":\"HELLOWORLD\",\"195\":\"HELLOWORLD\",\"196\":\"HELLOWORLD\",\"197\":\"HELLOWORLD\",\"198\":\"HELLOWORLD\",\"199\":\"HELLOWORLD\",\"200\":\"HELLOWORLD\",\"712\":\"HELLOWORLD\",\"201\":\"HELLOWORLD\",\"202\":\"HELLOWORLD\",\"203\":\"HELLOWORLD\",\"204\":\"HELLOWORLD\",\"205\":\"HELLOWORLD\",\"206\":\"HELLOWORLD\",\"207\":\"HELLOWORLD\",\"719\":\"HELLOWORLD\",\"208\":\"HELLOWORLD\",\"209\":\"HELLOWORLD\",\"210\":\"HELLOWORLD\",\"212\":\"HELLOWORLD\",\"214\":\"HELLOWORLD\",\"726\":\"HELLOWORLD\",\"215\":\"HELLOWORLD\",\"216\":\"HELLOWORLD\",\"217\":\"HELLOWORLD\",\"218\":\"HELLOWORLD\",\"219\":\"HELLOWORLD\",\"220\":\"HELLOWORLD\",\"221\":\"HELLOWORLD\",\"733\":\"HELLOWORLD\",\"222\":\"HELLOWORLD\",\"223\":\"HELLOWORLD\",\"224\":\"HELLOWORLD\",\"226\":\"HELLOWORLD\",\"228\":\"HELLOWORLD\",\"740\":\"HELLOWORLD\",\"747\":\"HELLOWORLD\",\"754\":\"HELLOWORLD\",\"244\":\"HELLOWORLD\",\"756\":\"HELLOWORLD\",\"246\":\"HELLOWORLD\",\"248\":\"HELLOWORLD\",\"250\":\"HELLOWORLD\",\"763\":\"HELLOWORLD\",\"252\":\"HELLOWORLD\",\"255\":\"HELLOWORLD\",\"257\":\"HELLOWORLD\",\"770\":\"HELLOWORLD\",\"259\":\"HELLOWORLD\",\"261\":\"HELLOWORLD\",\"262\":\"HELLOWORLD\",\"263\":\"HELLOWORLD\",\"265\":\"HELLOWORLD\",\"777\":\"HELLOWORLD\",\"266\":\"HELLOWORLD\",\"270\":\"HELLOWORLD\",\"784\":\"HELLOWORLD\",\"279\":\"HELLOWORLD\",\"791\":\"HELLOWORLD\",\"281\":\"HELLOWORLD\",\"282\":\"HELLOWORLD\",\"284\":\"HELLOWORLD\",\"285\":\"HELLOWORLD\",\"286\":\"HELLOWORLD\",\"294\":\"HELLOWORLD\",\"296\":\"HELLOWORLD\",\"809\":\"HELLOWORLD\",\"816\":\"HELLOWORLD\",\"309\":\"HELLOWORLD\",\"310\":\"HELLOWORLD\",\"823\":\"HELLOWORLD\",\"312\":\"HELLOWORLD\",\"313\":\"HELLOWORLD\",\"314\":\"HELLOWORLD\",\"315\":\"HELLOWORLD\",\"316\":\"HELLOWORLD\",\"317\":\"HELLOWORLD\",\"318\":\"HELLOWORLD\",\"319\":\"HELLOWORLD\",\"320\":\"HELLOWORLD\",\"321\":\"HELLOWORLD\",\"322\":\"HELLOWORLD\",\"323\":\"HELLOWORLD\",\"324\":\"HELLOWORLD\",\"836\":\"HELLOWORLD\",\"325\":\"HELLOWORLD\",\"326\":\"HELLOWORLD\",\"327\":\"HELLOWORLD\",\"328\":\"HELLOWORLD\",\"333\":\"HELLOWORLD\",\"334\":\"HELLOWORLD\",\"846\":\"HELLOWORLD\",\"853\":\"HELLOWORLD\",\"346\":\"HELLOWORLD\",\"348\":\"HELLOWORLD\",\"860\":\"HELLOWORLD\",\"349\":\"HELLOWORLD\",\"351\":\"HELLOWORLD\",\"353\":\"HELLOWORLD\",\"867\":\"HELLOWORLD\",\"356\":\"HELLOWORLD\",\"358\":\"HELLOWORLD\",\"359\":\"HELLOWORLD\",\"361\":\"HELLOWORLD\",\"363\":\"HELLOWORLD\",\"365\":\"HELLOWORLD\",\"366\":\"HELLOWORLD\",\"367\":\"HELLOWORLD\",\"369\":\"HELLOWORLD\",\"370\":\"HELLOWORLD\",\"371\":\"HELLOWORLD\",\"372\":\"HELLOWORLD\",\"373\":\"HELLOWORLD\",\"374\":\"HELLOWORLD\",\"375\":\"HELLOWORLD\",\"376\":\"HELLOWORLD\",\"377\":\"HELLOWORLD\",\"378\":\"HELLOWORLD\",\"379\":\"HELLOWORLD\",\"380\":\"HELLOWORLD\",\"381\":\"HELLOWORLD\",\"383\":\"HELLOWORLD\",\"895\":\"HELLOWORLD\",\"385\":\"HELLOWORLD\",\"386\":\"HELLOWORLD\",\"387\":\"HELLOWORLD\",\"388\":\"HELLOWORLD\",\"390\":\"HELLOWORLD\",\"392\":\"HELLOWORLD\",\"394\":\"HELLOWORLD\",\"396\":\"HELLOWORLD\",\"398\":\"HELLOWORLD\",\"400\":\"HELLOWORLD\",\"402\":\"HELLOWORLD\",\"915\":\"HELLOWORLD\",\"404\":\"HELLOWORLD\",\"406\":\"HELLOWORLD\",\"408\":\"HELLOWORLD\",\"409\":\"HELLOWORLD\",\"410\":\"HELLOWORLD\",\"411\":\"HELLOWORLD\",\"412\":\"HELLOWORLD\",\"413\":\"HELLOWORLD\",\"415\":\"HELLOWORLD\",\"416\":\"HELLOWORLD\",\"417\":\"HELLOWORLD\",\"418\":\"HELLOWORLD\",\"419\":\"HELLOWORLD\",\"420\":\"HELLOWORLD\",\"421\":\"HELLOWORLD\",\"422\":\"HELLOWORLD\",\"430\":\"HELLOWORLD\",\"433\":\"HELLOWORLD\",\"438\":\"HELLOWORLD\",\"951\":\"HELLOWORLD\",\"952\":\"HELLOWORLD\",\"441\":\"HELLOWORLD\",\"444\":\"HELLOWORLD\",\"445\":\"HELLOWORLD\",\"446\":\"HELLOWORLD\",\"448\":\"HELLOWORLD\",\"449\":\"HELLOWORLD\",\"452\":\"HELLOWORLD\",\"456\":\"HELLOWORLD\",\"459\":\"HELLOWORLD\",\"460\":\"HELLOWORLD\",\"461\":\"HELLOWORLD\",\"464\":\"HELLOWORLD\",\"467\":\"HELLOWORLD\",\"470\":\"HELLOWORLD\",\"473\":\"HELLOWORLD\",\"476\":\"HELLOWORLD\",\"479\":\"HELLOWORLD\",\"482\":\"HELLOWORLD\",\"485\":\"HELLOWORLD\",\"488\":\"HELLOWORLD\",\"491\":\"HELLOWORLD\",\"492\":\"HELLOWORLD\",\"493\":\"HELLOWORLD\",\"494\":\"HELLOWORLD\",\"497\":\"HELLOWORLD\",\"505\":\"HELLOWORLD\"}}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Post to {@link RestInterface#terminateScenarioInstance(int, int)} * with an valid scenario instance id * the scenario should be terminated and the response is a 201. */ @Test public void terminateScenarioInstance() { Response response = base.path("scenario/1/instance/47").request().put(Entity.json("")); assertEquals("The Response code of terminating an instances was not 200", 200, response.getStatus()); } /** * When you send a Post to {@link RestInterface#terminateScenarioInstance(int, int)} * with an invalid instance id * then the Response should be a 404 with an error message. */ @Test public void terminateScenarioInstanceInvalidId() { Response response = base.path("scenario/1/instance/9999").request().put(Entity.json("")); assertEquals("The Response code of terminating an instances was not 400", 400, response.getStatus()); assertEquals("The Media type of terminating an instance was not JSON", MediaType.APPLICATION_JSON_TYPE, response.getMediaType()); assertThat("The content of the response was not as expected", response.readEntity(String.class), jsonEquals("{\"error\":\"The Scenario instance could not be found!\"}") .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getScenarioInstances(UriInfo, int, String)} * with a valid scenario id and a filter * only instances with names containing this string will be returned. */ @Test public void testGetScenarioInstancesWithFilter() { Response response = base.path("scenario/1/instance").queryParam("filter", "noInstanceLikeThis").request().get(); assertThat("The returned JSON does not contain the expected content", "{\"ids\":[],\"labels\":{},\"links\":{}}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Post to {@link RestInterface#startNewInstance(UriInfo, int)} * then the Response will be a 201 and a json object wit the new id will be returned. */ @Test public void testStartNewInstanceWOName() { Response response = base.path("scenario/1/instance").request().post(null); assertEquals("The Response code of start new instances was not 201", 201, response.getStatus()); assertEquals("Start new isntance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"id\":966,\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/966\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Post to {@link RestInterface#startNewInstance(UriInfo, int)} * then the Response will be a 201 and a json object wit the new id will be returned. */ @Test public void testStartInvalidInstanceWOName() { Response response = base.path("scenario/9999/instance").request().post(null); assertEquals("The Response code of start new instances was not 400", 400, response.getStatus()); assertEquals("Start new isntance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"The Scenario could not be found!\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Put to {@link RestInterface#startNewNamedInstance(UriInfo, int, RestInterface.NamedJaxBean)} * then the Response will be a 201 and a json object wit the new id will be returned. */ @Test public void testStartNewInstanceWName() { RestInterface.NamedJaxBean newName = new RestInterface.NamedJaxBean(); newName.name = "Dies ist ein Test"; Response response = base.path("scenario/1/instance") .request().put(Entity.json(newName)); assertEquals("The Response code of start new instances was not 201", 201, response.getStatus()); assertEquals("Start new instance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"id\":966,\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/966\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Put to {@link RestInterface#startNewNamedInstance(UriInfo, int, RestInterface.NamedJaxBean)} * then the Response will be a 201 and a json object wit the new id will be returned. */ @Test public void testStartInvalidInstanceWName() { RestInterface.NamedJaxBean newName = new RestInterface.NamedJaxBean(); newName.name = "Dies ist ein Test"; Response response = base.path("scenario/9999/instance").request() .put(Entity.json(newName)); assertEquals("The Response code of start new instances was not 400", 400, response.getStatus()); assertEquals("Start new isntance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"The Scenario could not be found!\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Post to {@link RestInterface#getScenarioInstance(int, int)} * with a correct scenario id and a correct instance id * the respond will be a 200 with a JSONObject */ @Test public void testGetScenarioInstanceReturnsJSON() { Response response = base.path("scenario/1/instance/72").request().get(); assertEquals("The Response code of getScenarioInstance was not 200", 200, response.getStatus()); assertEquals("getScenarioInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"name\":\"HELLOWORLD\",\"id\":72,\"terminated\":false,\"scenario_id\":1}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Post to {@link RestInterface#getScenarioInstance(int, int)} * with a wrong scenario id and a correct instance id * the respond will be a 200 with a redirected URI. */ @Test public void testGetScenarioInstanceWithWrongScenarioRedirects() { Response response = base.path("scenario/9999/instance/72").request().get(); assertEquals("The Response code of getScenarioInstance was not 200", 200, response.getStatus()); assertEquals("getScenarioInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"name\":\"HELLOWORLD\",\"id\":72,\"terminated\":false,\"scenario_id\":1}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Post to {@link RestInterface#getScenarioInstance(int, int)} * with a wrong scenario id and a correct instance id * the respond will be a 404 with a redirected URI. */ @Test public void testGetScenarioInstanceWithWrongInstanceThrowsError() { Response response = base.path("scenario/9999/instance/9999").request().get(); assertEquals("The Response code of getScenarioInstance was not 404", 404, response.getStatus()); assertEquals("getScenarioInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"message\":\"There is no instance with the id 9999\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an wrong scenario ID the request should be redirected to the correct one. */ @Test public void testGetActivitiesRedirects() { Response response = base.path("scenario/9999/instance/72/activity").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 200", 200, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"activities\":{\"189\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/189\",\"id\":189,\"label\":\"Activity1Fragment2\",\"state\":\"ready\"},\"6686\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/6686\",\"id\":6686,\"label\":\"ActivityFragment4\",\"state\":\"ready\"},\"186\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/186\",\"id\":186,\"label\":\"Activity1Fragment1\",\"state\":\"ready\"}},\"ids\":[186,189,6686]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an wrong scenario instance ID * then a 404 with error message (inside JSON) should be returned. */ @Test public void testGetActivitiesInvalidInstance() { Response response = base.path("scenario/1/instance/9999/activity").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 404", 404, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"message\":\"There is no instance with id 9999\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an correct parameters a state but no filter * then the request should return all activities with this state. */ @Test public void testGetActivitiesWithState() { Response response = base.path("scenario/1/instance/72/activity") .queryParam("state", "ready").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 200", 200, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"activities\":[{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/186\",\"id\":186,\"label\":\"Activity1Fragment1\",\"state\":\"ready\"},{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/189\",\"id\":189,\"label\":\"Activity1Fragment2\",\"state\":\"ready\"},{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/6686\",\"id\":6686,\"label\":\"ActivityFragment4\",\"state\":\"ready\"}],\"ids\":[186,189,6686]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an correct parameters, an invalid state but no filter * the request should return a 404 with error message */ @Test public void testGetActivitiesWithInvalidState() { Response response = base.path("scenario/1/instance/72/activity") .queryParam("state", "enabled").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 404", 404, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"The state is not allowed enabled\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an correct parameters a state but no filter * then the request should return all activities with this state. */ @Test public void testGetActivitiesWithStateTerminated() { Response response = base.path("scenario/1/instance/72/activity") .queryParam("state", "terminated").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 200", 200, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"activities\":[],\"ids\":[]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an correct parameters a state and a filter * then the request should return all activities with the state who fulfill the filter condition. */ @Test public void testGetActivitiesWithStateAndFilter() { Response response = base.path("scenario/1/instance/72/activity") .queryParam("state", "ready") .queryParam("filter", "2").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 200", 200, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"activities\":[{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/189\",\"id\":189,\"label\":\"Activity1Fragment2\",\"state\":\"ready\"}],\"ids\":[189]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an correct parameters, an invalid state but no filter * the request should return a 404 with error message */ @Test public void testGetActivitiesWithInvalidStateFilter() { Response response = base.path("scenario/1/instance/72/activity") .queryParam("state", "enabled") .queryParam("filter", "1").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 404", 404, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"The state is not allowed enabled\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with correct instance and scenario * a 200 with json content will be returned. */ @Test public void testGetActivitiesCorrect() { Response response = base.path("scenario/1/instance/72/activity").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 200", 200, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"activities\":{\"189\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/189\",\"id\":189,\"label\":\"Activity1Fragment2\",\"state\":\"ready\"},\"6686\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/6686\",\"id\":6686,\"label\":\"ActivityFragment4\",\"state\":\"ready\"},\"186\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/186\",\"id\":186,\"label\":\"Activity1Fragment1\",\"state\":\"ready\"}},\"ids\":[186,189,6686]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with a filter String * then only activities with a label like the filter String will be returned. */ @Test public void testGetActivitiesWithFilter() { Response response = base.path("scenario/1/instance/72/activity").queryParam("filter", "2").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 200", 200, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"activities\":{\"189\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/189\",\"id\":189,\"label\":\"Activity1Fragment2\",\"state\":\"ready\"}},\"ids\":[189]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * WHen you send a Get to {@link RestInterface#getDataObjects(UriInfo, int, int, String)} * with a correct instance id and a wrong scenario ID * you will be redirected automatically. */ @Test public void testGetDataObjectsRedirects() { Response response = base.path("scenario/9999/instance/72/dataobject").request().get(); assertEquals("The Response code of getDataObjects was not 200", 200, response.getStatus()); assertEquals("getDataObjects returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"ids\":[1,2],\"results\":{\"1\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/dataobject/1\",\"id\":1,\"label\":\"object1\",\"state\":\"init\"},\"2\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/dataobject/2\",\"id\":2,\"label\":\"object2\",\"state\":\"init\"}}}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * WHen you send a Get to {@link RestInterface#getDataObjects(UriInfo, int, int, String)} * with an invalid instance * an 404 with error message will be returned */ @Test public void testGetDataObjectsInvalid() { Response response = base.path("scenario/9999/instance/9999/dataobject").request().get(); assertEquals("The Response code of getDataObjects was not 404", 404, response.getStatus()); assertEquals("getDataObjects returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"There is no instance with the id 9999\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getDataObjects(UriInfo, int, int, String)} * with an valid instance and scenario and no filter String * you will get a list of all DataObjects for this scenario. */ @Test public void testGetDataObjectsWOFilter() { Response response = base.path("scenario/1/instance/62/dataobject").request().get(); assertEquals("The Response code of getDataObjects was not 200", 200, response.getStatus()); assertEquals("getDataObjects returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"ids\":[1,2],\"results\":{\"1\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/62/dataobject/1\",\"id\":1,\"label\":\"object1\",\"state\":\"init\"},\"2\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/62/dataobject/2\",\"id\":2,\"label\":\"object2\",\"state\":\"init\"}}}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getDataObjects(UriInfo, int, int, String)} * with an valid instance and scenario and an filter String * you will get a list of all DataObjects with labels like the filter String for this scenario. */ @Test public void testGetDataObjectsWithFilter() { Response response = base.path("scenario/1/instance/62/dataobject") .queryParam("filter", "1").request().get(); assertEquals("The Response code of getDataObjects was not 200", 200, response.getStatus()); assertEquals("getDataObjects returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"ids\":[1],\"results\":{\"1\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/62/dataobject/1\",\"id\":1,\"label\":\"object1\",\"state\":\"init\"}}}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getDataObject(int, int, int)} * with a correct scenario instance id but a wrong scenario id * you will be redirected */ @Test public void testGetDataObjectRedirects() { Response response = base.path("scenario/9999/instance/62/dataobject/1").request().get(); assertEquals("The Response code of getDataObject was not 200", 200, response.getStatus()); assertEquals("getDataObject return a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"label\":\"object1\",\"id\":1,\"state\":\"init\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getDataObject(int, int, int)} * with correct instance and scenario id but a wrong dataobject id * you will get a 404 with an error message. */ @Test public void testGetDataObjectInvalidDoId() { Response response = base.path("scenario/1/instance/62/dataobject/9999").request().get(); assertEquals("The Response code of getDataObject was not 404", 404, response.getStatus()); assertEquals("getDataObject return a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"There is no dataobject with the id 9999 for the scenario instance 62\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getDataObject(int, int, int)} * with correct scenario id but an incorrect instance id * you will get a 404 with an error message */ @Test public void testGetDataObjectInvalidInstanceId() { Response response = base.path("scenario/1/instance/9999/dataobject/1").request().get(); assertEquals("The Response code of getDataObject was not 404", 404, response.getStatus()); assertEquals("getDataObject return a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"There is no instance with the id 9999\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getDataObject(int, int, int)} * with correct instance, scenario and dataobject id * you will get a 200 with an json object. */ @Test public void testGetDataObject() { Response response = base.path("scenario/1/instance/62/dataobject/1").request().get(); assertEquals("The Response code of getDataObject was not 200", 200, response.getStatus()); assertEquals("getDataObject return a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"label\":\"object1\",\"id\":1,\"state\":\"init\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getTerminationCondition(int)} * with an valid id * then a JSON with the termination condition will be returned */ @Test public void testGetTerminationCondition() { Response response = base.path("scenario/105/terminationcondition").request().get(); assertEquals("The Response code of getTermiantionCondition was not 200", 200, response.getStatus()); assertEquals("Get TerminationCondition does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"conditions\":{\"1\":[{\"data_object\":\"A\",\"set_id\":1,\"state\":\"c\"}]},\"setIDs\":[1]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getTerminationCondition(int)} * with an invalid id * then a 404 with an error message should be returned */ @Test public void testInvalidGetTerminationCondition() { Response response = base.path("scenario/102/terminationcondition").request().get(); assertEquals("The Response code of getTermiantionCondition was not 404", 404, response.getStatus()); assertEquals("Get TerminationCondition does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"There is no scenario with the id 102\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#updateActivityState(String, int, int, String)} * with an invalid state * a bad request with an error message should be returned. */ @Test public void testInvalidStateUpdateActivity() { Response response = base.path("scenario/1/instance/72/activity/105") .queryParam("state", "complete").request().put(Entity.json("")); assertEquals("The Response code of getTerminationCondition was not 400", 400, response.getStatus()); assertEquals("Get TerminationCondition does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"The state transition complete is unknown\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * * When you send a Get to {@link RestInterface#updateActivityState(String, int, int, String)} * with an valid state for an invalid activity. * a bad request with an error message should be returned. */ @Test public void testInvalidActivityUpdateActivity() { Response response = base.path("scenario/1/instance/72/activity/105") .queryParam("state", "begin").request().put(Entity.json("")); assertEquals("The Response code of getTerminationCondition was not 400", 400, response.getStatus()); assertEquals("Get TerminationCondition does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"impossible to start activity with id 105\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * * When you send a Get to {@link RestInterface#updateActivityState(String, int, int, String)} * with an valid state and valid activity * then a 201 will be returned with a message inside a JSON-Object. */ @Test public void testUpdateActivity() { Response response = base.path("scenario/1/instance/72/activity/2") .queryParam("state", "begin").request().put(Entity.json("")); assertEquals("The Response code of getTerminationCondition was not 202", 202, response.getStatus()); assertEquals("Get TerminationCondition does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"message\":\"activity state changed.\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); response = base.path("scenario/1/instance/72/activity/2") .queryParam("state", "terminate").request().put(Entity.json("")); assertEquals("The Response code of getTerminationCondition was not 202", 202, response.getStatus()); assertEquals("Get TerminationCondition does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"message\":\"activity state changed.\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * Given is the Rest API * When you send a PUT to {@link RestInterface#terminateScenarioInstance(int, int)} * with an invalid scenario id or instance id * then a 400 will be returned with an error message */ @Test public void testTerminateInvalidScenarioInstance() { Response response = base.path("scenario/9999/instance/72") .queryParam("state", "begin").request().put(Entity.json("{}")); assertEquals("The Response code of terminateScenarioInstance was not 400", 400, response.getStatus()); assertEquals("Get terminateScenarioInstance does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", response.readEntity(String.class), jsonEquals("{\"error\":\"The Scenario instance could not be found!\"}") .when(Option.IGNORING_ARRAY_ORDER)); response = base.path("scenario/1/instance/9999") .queryParam("status", "begin").request().put(Entity.json("{}")); assertEquals("The Response code of terminateScenarioInstance was not 400", 400, response.getStatus()); assertEquals("Get terminateScenarioInstance does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", response.readEntity(String.class), jsonEquals("{\"error\":\"The Scenario instance could not be found!\"}") .when(Option.IGNORING_ARRAY_ORDER)); } /** * Given is the Rest API * When you send a PUT to {@link RestInterface#terminateScenarioInstance(int, int)} * with an valid scenario and instance id * the instance will be terminated. */ @Test public void testTerminateScenarioInstance() { Response response = base.path("scenario/1/instance/72") .queryParam("state", "begin").request().put(Entity.json("{}")); assertEquals("The Response code of terminateScenarioInstance was not 200", 200, response.getStatus()); assertEquals("terminateScenarioInstance does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", response.readEntity(String.class), jsonEquals("{\"message\":\"The is instance has been terminated.\"}") .when(Option.IGNORING_ARRAY_ORDER)); } }
src/test/java/de/uni_potsdam/hpi/bpt/bp2014/jcore/rest/RestInterfaceTest.java
package de.uni_potsdam.hpi.bpt.bp2014.jcore.rest; import com.ibatis.common.jdbc.ScriptRunner; import de.uni_potsdam.hpi.bpt.bp2014.AbstractTest; import de.uni_potsdam.hpi.bpt.bp2014.database.Connection; import net.javacrumbs.jsonunit.core.Option; import org.glassfish.jersey.server.ResourceConfig; import org.json.JSONObject; import org.junit.AfterClass; import org.junit.Before; import org.junit.Test; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Application; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import java.io.FileReader; import java.io.IOException; import java.sql.SQLException; import static net.javacrumbs.jsonunit.JsonMatchers.jsonEquals; import static org.junit.Assert.*; /** * This Class extends the {@link de.uni_potsdam.hpi.bpt.bp2014.AbstractTest} * to test the RestInterface of the JEngine core. * In order to do so it uses the functionality of the * {@link org.glassfish.jersey.test.JerseyTest} * There are test methods for every possible REST Call. * In order to stay independent from existing tests, the * database will be set up before and after the execution. * Define the database Properties inside the database_connection file. */ public class RestInterfaceTest extends AbstractTest { private static final String DEVELOPMENT_SQL_SEED_FILE = "src/main/resources/JEngineV2.sql"; /** * Sets up the seed file for the test database. */ static { TEST_SQL_SEED_FILE = "src/test/resources/JEngineV2RESTTest_new.sql"; } /** * The base url of the jcore rest interface. * Allows us to send requests to the {@link de.uni_potsdam.hpi.bpt.bp2014.jcore.rest.RestInterface}. */ private WebTarget base; @AfterClass public static void resetDatabase() throws IOException, SQLException { clearDatabase(); ScriptRunner runner = new ScriptRunner(Connection.getInstance().connect(), false, false); runner.runScript(new FileReader(DEVELOPMENT_SQL_SEED_FILE)); } @Override protected Application configure() { return new ResourceConfig(de.uni_potsdam.hpi.bpt.bp2014.jcore.rest.RestInterface.class); } @Before public void setUpBase() { base = target("interface/v2"); } /** * When you sent a GET to {@link RestInterface#getScenarios(UriInfo, String)} * the media type of the response will be JSON. */ @Test public void testGetScenarioProducesJson() { Response response = base.path("scenario").request().get(); assertEquals("The Response code of get Scenario was not 200", 200, response.getStatus()); assertEquals("Get Scenarios returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); } /** * When you sent a get to {@link RestInterface#getScenarios(UriInfo, String)} * the entity of the response will be a valid JSON array. */ @Test public void testGetScenarioProducesValidJsonArray() { Response response = base.path("scenario").request().get(); assertNotEquals("Get scenarios did not respond with a valid JSON Array", null, new JSONObject(response.readEntity(String.class))); } /** * When you sent a GET to {@link RestInterface#getScenarios(UriInfo, String)} * the returned JSON will contain the latest version of all Scenarios. */ @Test public void testGetScenarioContent() { Response response = base.path("scenario").request().get(); assertThat("Get Scenarios did not contain the expected information", "{\"ids\":[1,2,3,100,101,103,105,111,113,114,115,116,117,118,134,135,136,138,139,140,141,142,143,144],\"links\":{\"140\":\"http://localhost:9998/interface/v2/scenario/140\",\"141\":\"http://localhost:9998/interface/v2/scenario/141\",\"142\":\"http://localhost:9998/interface/v2/scenario/142\",\"143\":\"http://localhost:9998/interface/v2/scenario/143\",\"1\":\"http://localhost:9998/interface/v2/scenario/1\",\"100\":\"http://localhost:9998/interface/v2/scenario/100\",\"111\":\"http://localhost:9998/interface/v2/scenario/111\",\"144\":\"http://localhost:9998/interface/v2/scenario/144\",\"2\":\"http://localhost:9998/interface/v2/scenario/2\",\"101\":\"http://localhost:9998/interface/v2/scenario/101\",\"134\":\"http://localhost:9998/interface/v2/scenario/134\",\"3\":\"http://localhost:9998/interface/v2/scenario/3\",\"113\":\"http://localhost:9998/interface/v2/scenario/113\",\"135\":\"http://localhost:9998/interface/v2/scenario/135\",\"103\":\"http://localhost:9998/interface/v2/scenario/103\",\"114\":\"http://localhost:9998/interface/v2/scenario/114\",\"136\":\"http://localhost:9998/interface/v2/scenario/136\",\"115\":\"http://localhost:9998/interface/v2/scenario/115\",\"105\":\"http://localhost:9998/interface/v2/scenario/105\",\"116\":\"http://localhost:9998/interface/v2/scenario/116\",\"138\":\"http://localhost:9998/interface/v2/scenario/138\",\"117\":\"http://localhost:9998/interface/v2/scenario/117\",\"139\":\"http://localhost:9998/interface/v2/scenario/139\",\"118\":\"http://localhost:9998/interface/v2/scenario/118\"},\"labels\":{\"1\":\"HELLOWORLD\",\"2\":\"helloWorld2\",\"3\":\"EmailTest\",\"100\":\"TestScenario\",\"101\":\"Test Insert Scenario\",\"134\":\"ReiseTestScenario\",\"103\":\"ScenarioTest1\",\"135\":\"ReiseTestScenario\",\"136\":\"TXOR1Scenario\",\"105\":\"TestScenarioTerminationCondition\",\"138\":\"TestEmail1Scenario\",\"139\":\"TestEmail1Scenario\",\"140\":\"TestEmail1Scenario\",\"141\":\"TestEmail2Scenario\",\"142\":\"TestEmail3Scenario\",\"111\":\"Test2_2ReferenceTest\",\"143\":\"TestEmail3Scenario\",\"144\":\"XORTest2Scenario\",\"113\":\"referencetest3_2\",\"114\":\"RT4Scenario\",\"115\":\"TT2Scenario\",\"116\":\"TT2Scenario\",\"117\":\"AT2Scenario\",\"118\":\"AT3Scenario\"}}", jsonEquals(response.readEntity(String.class)).when(Option.IGNORING_ARRAY_ORDER)); } /** * When you sent a GET to {@link RestInterface#getScenarios(UriInfo, String)} and * you use a Filter * then the returned JSON will contain the latest version of all Scenarios with * a name containing the filterString. */ @Test public void testGetScenarioContentWithFilter() { Response response = base.path("scenario").queryParam("filter", "HELLO").request().get(); assertThat("Get Scenarios did not contain the expected information", "{\"ids\":[1,2],\"labels\":{\"1\":\"HELLOWORLD\",\"2\":\"helloWorld2\"},\"links\":{\"1\":\"http://localhost:9998/interface/v2/scenario/1\",\"2\":\"http://localhost:9998/interface/v2/scenario/2\"}}", jsonEquals(response.readEntity(String.class)).when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a GET to {@link RestInterface#getScenario(UriInfo, int)} with an invalid id * a empty JSON with a 404 will be returned. */ @Test public void testGetScenarioInvalidId() { Response response = base.path("scenario/99999").request().get(); assertEquals("Get Scenario returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertEquals("Get scenario returns a not empty JSON, but the id was invalid", 404, response.getStatus()); assertEquals("The content of the invalid request is not an empty JSONObject", "{}", response.readEntity(String.class)); } /** * If you send a GET to {@link RestInterface#getScenario(UriInfo, int)} with an valid id * a JSON containing the id, name and modelversion will be returned. */ @Test public void testGetScenario() { Response response = base.path("scenario/1").request().get(); String responseEntity = response.readEntity(String.class); assertEquals("The Response code of get Scenario was not 200", 200, response.getStatus()); assertEquals("Get Scenarios returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertNotEquals("Get scenarios did not respond with a valid JSON Array", null, new JSONObject(responseEntity)); assertThat("The content of the valid request is not as expected", "{\"modelid\":0,\"instances\":\"http://localhost:9998/interface/v2/scenario/1/instance\",\"name\":\"HELLOWORLD\",\"id\":1,\"modelversion\":0}", jsonEquals(responseEntity).when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getScenarioInstances(UriInfo, int, String)} * with valid params and no filter * then you get 200 a JSON Object. */ @Test public void testGetScenarioInstancesReturnsOkAndJSON() { Response response = base.path("scenario/1/instance").request().get(); assertEquals("The Response code of get get instances was not 200", 200, response.getStatus()); assertEquals("Get instances returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); } /** * When you send a Get to {@link RestInterface#getScenarioInstances(UriInfo, int, String)} * with an invalid scenario * then you get 404 with an error message inside the returned JSON object */ @Test public void testGetScenarioInstancesInvalidScenario() { Response response = base.path("scenario/9999/instance").request().get(); assertEquals("The Response code of get get instances was not 404", 404, response.getStatus()); assertEquals("Get instances returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON is invalid or does not contain the expected message", "{\"error\":\"Scenario not found!\"}", jsonEquals(response.readEntity(String.class))); } /** * When you send a Get to {@link RestInterface#getScenarioInstances(UriInfo, int, String)} * with a valid scenario id * then a json object with all instances, id and label should be returned. * The schema should be: * {"ids": [1,2..], "names":{1: "abc" ...}} */ @Test public void testGetScenarioInstancesReturnsCorrectJson() { Response response = base.path("scenario/1/instance").request().get(); assertThat("The returned JSON does not contain the expected content", "{\"ids\":[47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,92,94,95,97,99,101,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,212,214,215,216,217,218,219,220,221,222,223,224,226,228,244,246,248,250,252,255,257,259,261,262,263,265,266,270,279,281,282,284,285,286,294,296,309,310,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,333,334,346,348,349,351,353,356,358,359,361,363,365,366,367,369,370,371,372,373,374,375,376,377,378,379,380,381,383,385,386,387,388,390,392,394,396,398,400,402,404,406,408,409,410,411,412,413,415,416,417,418,419,420,421,422,430,433,438,441,444,445,446,448,449,452,456,459,460,461,464,467,470,473,476,479,482,485,488,491,492,493,494,497,505,515,519,527,531,552,561,563,572,582,590,598,605,612,614,621,628,635,642,649,656,663,670,677,684,691,698,705,712,719,726,733,740,747,754,756,763,770,777,784,791,809,816,823,836,846,853,860,867,895,915,951,952],\"links\":{\"590\":\"http://localhost:9998/interface/v2/scenario/1/instance/590\",\"470\":\"http://localhost:9998/interface/v2/scenario/1/instance/470\",\"351\":\"http://localhost:9998/interface/v2/scenario/1/instance/351\",\"110\":\"http://localhost:9998/interface/v2/scenario/1/instance/110\",\"473\":\"http://localhost:9998/interface/v2/scenario/1/instance/473\",\"111\":\"http://localhost:9998/interface/v2/scenario/1/instance/111\",\"353\":\"http://localhost:9998/interface/v2/scenario/1/instance/353\",\"112\":\"http://localhost:9998/interface/v2/scenario/1/instance/112\",\"113\":\"http://localhost:9998/interface/v2/scenario/1/instance/113\",\"476\":\"http://localhost:9998/interface/v2/scenario/1/instance/476\",\"114\":\"http://localhost:9998/interface/v2/scenario/1/instance/114\",\"356\":\"http://localhost:9998/interface/v2/scenario/1/instance/356\",\"598\":\"http://localhost:9998/interface/v2/scenario/1/instance/598\",\"115\":\"http://localhost:9998/interface/v2/scenario/1/instance/115\",\"116\":\"http://localhost:9998/interface/v2/scenario/1/instance/116\",\"358\":\"http://localhost:9998/interface/v2/scenario/1/instance/358\",\"479\":\"http://localhost:9998/interface/v2/scenario/1/instance/479\",\"117\":\"http://localhost:9998/interface/v2/scenario/1/instance/117\",\"359\":\"http://localhost:9998/interface/v2/scenario/1/instance/359\",\"118\":\"http://localhost:9998/interface/v2/scenario/1/instance/118\",\"119\":\"http://localhost:9998/interface/v2/scenario/1/instance/119\",\"915\":\"http://localhost:9998/interface/v2/scenario/1/instance/915\",\"361\":\"http://localhost:9998/interface/v2/scenario/1/instance/361\",\"482\":\"http://localhost:9998/interface/v2/scenario/1/instance/482\",\"120\":\"http://localhost:9998/interface/v2/scenario/1/instance/120\",\"121\":\"http://localhost:9998/interface/v2/scenario/1/instance/121\",\"363\":\"http://localhost:9998/interface/v2/scenario/1/instance/363\",\"122\":\"http://localhost:9998/interface/v2/scenario/1/instance/122\",\"485\":\"http://localhost:9998/interface/v2/scenario/1/instance/485\",\"123\":\"http://localhost:9998/interface/v2/scenario/1/instance/123\",\"244\":\"http://localhost:9998/interface/v2/scenario/1/instance/244\",\"365\":\"http://localhost:9998/interface/v2/scenario/1/instance/365\",\"124\":\"http://localhost:9998/interface/v2/scenario/1/instance/124\",\"366\":\"http://localhost:9998/interface/v2/scenario/1/instance/366\",\"125\":\"http://localhost:9998/interface/v2/scenario/1/instance/125\",\"246\":\"http://localhost:9998/interface/v2/scenario/1/instance/246\",\"367\":\"http://localhost:9998/interface/v2/scenario/1/instance/367\",\"488\":\"http://localhost:9998/interface/v2/scenario/1/instance/488\",\"126\":\"http://localhost:9998/interface/v2/scenario/1/instance/126\",\"127\":\"http://localhost:9998/interface/v2/scenario/1/instance/127\",\"248\":\"http://localhost:9998/interface/v2/scenario/1/instance/248\",\"369\":\"http://localhost:9998/interface/v2/scenario/1/instance/369\",\"128\":\"http://localhost:9998/interface/v2/scenario/1/instance/128\",\"129\":\"http://localhost:9998/interface/v2/scenario/1/instance/129\",\"809\":\"http://localhost:9998/interface/v2/scenario/1/instance/809\",\"370\":\"http://localhost:9998/interface/v2/scenario/1/instance/370\",\"491\":\"http://localhost:9998/interface/v2/scenario/1/instance/491\",\"250\":\"http://localhost:9998/interface/v2/scenario/1/instance/250\",\"371\":\"http://localhost:9998/interface/v2/scenario/1/instance/371\",\"492\":\"http://localhost:9998/interface/v2/scenario/1/instance/492\",\"130\":\"http://localhost:9998/interface/v2/scenario/1/instance/130\",\"372\":\"http://localhost:9998/interface/v2/scenario/1/instance/372\",\"493\":\"http://localhost:9998/interface/v2/scenario/1/instance/493\",\"131\":\"http://localhost:9998/interface/v2/scenario/1/instance/131\",\"252\":\"http://localhost:9998/interface/v2/scenario/1/instance/252\",\"373\":\"http://localhost:9998/interface/v2/scenario/1/instance/373\",\"494\":\"http://localhost:9998/interface/v2/scenario/1/instance/494\",\"132\":\"http://localhost:9998/interface/v2/scenario/1/instance/132\",\"374\":\"http://localhost:9998/interface/v2/scenario/1/instance/374\",\"133\":\"http://localhost:9998/interface/v2/scenario/1/instance/133\",\"375\":\"http://localhost:9998/interface/v2/scenario/1/instance/375\",\"134\":\"http://localhost:9998/interface/v2/scenario/1/instance/134\",\"255\":\"http://localhost:9998/interface/v2/scenario/1/instance/255\",\"376\":\"http://localhost:9998/interface/v2/scenario/1/instance/376\",\"497\":\"http://localhost:9998/interface/v2/scenario/1/instance/497\",\"135\":\"http://localhost:9998/interface/v2/scenario/1/instance/135\",\"377\":\"http://localhost:9998/interface/v2/scenario/1/instance/377\",\"136\":\"http://localhost:9998/interface/v2/scenario/1/instance/136\",\"257\":\"http://localhost:9998/interface/v2/scenario/1/instance/257\",\"378\":\"http://localhost:9998/interface/v2/scenario/1/instance/378\",\"137\":\"http://localhost:9998/interface/v2/scenario/1/instance/137\",\"379\":\"http://localhost:9998/interface/v2/scenario/1/instance/379\",\"138\":\"http://localhost:9998/interface/v2/scenario/1/instance/138\",\"259\":\"http://localhost:9998/interface/v2/scenario/1/instance/259\",\"139\":\"http://localhost:9998/interface/v2/scenario/1/instance/139\",\"816\":\"http://localhost:9998/interface/v2/scenario/1/instance/816\",\"380\":\"http://localhost:9998/interface/v2/scenario/1/instance/380\",\"381\":\"http://localhost:9998/interface/v2/scenario/1/instance/381\",\"140\":\"http://localhost:9998/interface/v2/scenario/1/instance/140\",\"261\":\"http://localhost:9998/interface/v2/scenario/1/instance/261\",\"141\":\"http://localhost:9998/interface/v2/scenario/1/instance/141\",\"262\":\"http://localhost:9998/interface/v2/scenario/1/instance/262\",\"383\":\"http://localhost:9998/interface/v2/scenario/1/instance/383\",\"142\":\"http://localhost:9998/interface/v2/scenario/1/instance/142\",\"263\":\"http://localhost:9998/interface/v2/scenario/1/instance/263\",\"143\":\"http://localhost:9998/interface/v2/scenario/1/instance/143\",\"385\":\"http://localhost:9998/interface/v2/scenario/1/instance/385\",\"144\":\"http://localhost:9998/interface/v2/scenario/1/instance/144\",\"265\":\"http://localhost:9998/interface/v2/scenario/1/instance/265\",\"386\":\"http://localhost:9998/interface/v2/scenario/1/instance/386\",\"145\":\"http://localhost:9998/interface/v2/scenario/1/instance/145\",\"266\":\"http://localhost:9998/interface/v2/scenario/1/instance/266\",\"387\":\"http://localhost:9998/interface/v2/scenario/1/instance/387\",\"146\":\"http://localhost:9998/interface/v2/scenario/1/instance/146\",\"388\":\"http://localhost:9998/interface/v2/scenario/1/instance/388\",\"147\":\"http://localhost:9998/interface/v2/scenario/1/instance/147\",\"148\":\"http://localhost:9998/interface/v2/scenario/1/instance/148\",\"149\":\"http://localhost:9998/interface/v2/scenario/1/instance/149\",\"823\":\"http://localhost:9998/interface/v2/scenario/1/instance/823\",\"705\":\"http://localhost:9998/interface/v2/scenario/1/instance/705\",\"47\":\"http://localhost:9998/interface/v2/scenario/1/instance/47\",\"48\":\"http://localhost:9998/interface/v2/scenario/1/instance/48\",\"49\":\"http://localhost:9998/interface/v2/scenario/1/instance/49\",\"390\":\"http://localhost:9998/interface/v2/scenario/1/instance/390\",\"270\":\"http://localhost:9998/interface/v2/scenario/1/instance/270\",\"150\":\"http://localhost:9998/interface/v2/scenario/1/instance/150\",\"392\":\"http://localhost:9998/interface/v2/scenario/1/instance/392\",\"151\":\"http://localhost:9998/interface/v2/scenario/1/instance/151\",\"152\":\"http://localhost:9998/interface/v2/scenario/1/instance/152\",\"394\":\"http://localhost:9998/interface/v2/scenario/1/instance/394\",\"153\":\"http://localhost:9998/interface/v2/scenario/1/instance/153\",\"154\":\"http://localhost:9998/interface/v2/scenario/1/instance/154\",\"396\":\"http://localhost:9998/interface/v2/scenario/1/instance/396\",\"155\":\"http://localhost:9998/interface/v2/scenario/1/instance/155\",\"156\":\"http://localhost:9998/interface/v2/scenario/1/instance/156\",\"398\":\"http://localhost:9998/interface/v2/scenario/1/instance/398\",\"157\":\"http://localhost:9998/interface/v2/scenario/1/instance/157\",\"158\":\"http://localhost:9998/interface/v2/scenario/1/instance/158\",\"279\":\"http://localhost:9998/interface/v2/scenario/1/instance/279\",\"159\":\"http://localhost:9998/interface/v2/scenario/1/instance/159\",\"951\":\"http://localhost:9998/interface/v2/scenario/1/instance/951\",\"952\":\"http://localhost:9998/interface/v2/scenario/1/instance/952\",\"712\":\"http://localhost:9998/interface/v2/scenario/1/instance/712\",\"50\":\"http://localhost:9998/interface/v2/scenario/1/instance/50\",\"836\":\"http://localhost:9998/interface/v2/scenario/1/instance/836\",\"51\":\"http://localhost:9998/interface/v2/scenario/1/instance/51\",\"52\":\"http://localhost:9998/interface/v2/scenario/1/instance/52\",\"53\":\"http://localhost:9998/interface/v2/scenario/1/instance/53\",\"54\":\"http://localhost:9998/interface/v2/scenario/1/instance/54\",\"719\":\"http://localhost:9998/interface/v2/scenario/1/instance/719\",\"55\":\"http://localhost:9998/interface/v2/scenario/1/instance/55\",\"56\":\"http://localhost:9998/interface/v2/scenario/1/instance/56\",\"57\":\"http://localhost:9998/interface/v2/scenario/1/instance/57\",\"58\":\"http://localhost:9998/interface/v2/scenario/1/instance/58\",\"59\":\"http://localhost:9998/interface/v2/scenario/1/instance/59\",\"160\":\"http://localhost:9998/interface/v2/scenario/1/instance/160\",\"281\":\"http://localhost:9998/interface/v2/scenario/1/instance/281\",\"161\":\"http://localhost:9998/interface/v2/scenario/1/instance/161\",\"282\":\"http://localhost:9998/interface/v2/scenario/1/instance/282\",\"162\":\"http://localhost:9998/interface/v2/scenario/1/instance/162\",\"163\":\"http://localhost:9998/interface/v2/scenario/1/instance/163\",\"284\":\"http://localhost:9998/interface/v2/scenario/1/instance/284\",\"164\":\"http://localhost:9998/interface/v2/scenario/1/instance/164\",\"285\":\"http://localhost:9998/interface/v2/scenario/1/instance/285\",\"165\":\"http://localhost:9998/interface/v2/scenario/1/instance/165\",\"286\":\"http://localhost:9998/interface/v2/scenario/1/instance/286\",\"166\":\"http://localhost:9998/interface/v2/scenario/1/instance/166\",\"167\":\"http://localhost:9998/interface/v2/scenario/1/instance/167\",\"168\":\"http://localhost:9998/interface/v2/scenario/1/instance/168\",\"169\":\"http://localhost:9998/interface/v2/scenario/1/instance/169\",\"60\":\"http://localhost:9998/interface/v2/scenario/1/instance/60\",\"846\":\"http://localhost:9998/interface/v2/scenario/1/instance/846\",\"61\":\"http://localhost:9998/interface/v2/scenario/1/instance/61\",\"605\":\"http://localhost:9998/interface/v2/scenario/1/instance/605\",\"726\":\"http://localhost:9998/interface/v2/scenario/1/instance/726\",\"62\":\"http://localhost:9998/interface/v2/scenario/1/instance/62\",\"63\":\"http://localhost:9998/interface/v2/scenario/1/instance/63\",\"64\":\"http://localhost:9998/interface/v2/scenario/1/instance/64\",\"65\":\"http://localhost:9998/interface/v2/scenario/1/instance/65\",\"66\":\"http://localhost:9998/interface/v2/scenario/1/instance/66\",\"67\":\"http://localhost:9998/interface/v2/scenario/1/instance/67\",\"68\":\"http://localhost:9998/interface/v2/scenario/1/instance/68\",\"69\":\"http://localhost:9998/interface/v2/scenario/1/instance/69\",\"170\":\"http://localhost:9998/interface/v2/scenario/1/instance/170\",\"171\":\"http://localhost:9998/interface/v2/scenario/1/instance/171\",\"172\":\"http://localhost:9998/interface/v2/scenario/1/instance/172\",\"173\":\"http://localhost:9998/interface/v2/scenario/1/instance/173\",\"294\":\"http://localhost:9998/interface/v2/scenario/1/instance/294\",\"174\":\"http://localhost:9998/interface/v2/scenario/1/instance/174\",\"175\":\"http://localhost:9998/interface/v2/scenario/1/instance/175\",\"296\":\"http://localhost:9998/interface/v2/scenario/1/instance/296\",\"176\":\"http://localhost:9998/interface/v2/scenario/1/instance/176\",\"177\":\"http://localhost:9998/interface/v2/scenario/1/instance/177\",\"178\":\"http://localhost:9998/interface/v2/scenario/1/instance/178\",\"179\":\"http://localhost:9998/interface/v2/scenario/1/instance/179\",\"853\":\"http://localhost:9998/interface/v2/scenario/1/instance/853\",\"612\":\"http://localhost:9998/interface/v2/scenario/1/instance/612\",\"733\":\"http://localhost:9998/interface/v2/scenario/1/instance/733\",\"70\":\"http://localhost:9998/interface/v2/scenario/1/instance/70\",\"614\":\"http://localhost:9998/interface/v2/scenario/1/instance/614\",\"71\":\"http://localhost:9998/interface/v2/scenario/1/instance/71\",\"72\":\"http://localhost:9998/interface/v2/scenario/1/instance/72\",\"73\":\"http://localhost:9998/interface/v2/scenario/1/instance/73\",\"74\":\"http://localhost:9998/interface/v2/scenario/1/instance/74\",\"75\":\"http://localhost:9998/interface/v2/scenario/1/instance/75\",\"76\":\"http://localhost:9998/interface/v2/scenario/1/instance/76\",\"77\":\"http://localhost:9998/interface/v2/scenario/1/instance/77\",\"78\":\"http://localhost:9998/interface/v2/scenario/1/instance/78\",\"79\":\"http://localhost:9998/interface/v2/scenario/1/instance/79\",\"180\":\"http://localhost:9998/interface/v2/scenario/1/instance/180\",\"181\":\"http://localhost:9998/interface/v2/scenario/1/instance/181\",\"182\":\"http://localhost:9998/interface/v2/scenario/1/instance/182\",\"183\":\"http://localhost:9998/interface/v2/scenario/1/instance/183\",\"184\":\"http://localhost:9998/interface/v2/scenario/1/instance/184\",\"185\":\"http://localhost:9998/interface/v2/scenario/1/instance/185\",\"186\":\"http://localhost:9998/interface/v2/scenario/1/instance/186\",\"187\":\"http://localhost:9998/interface/v2/scenario/1/instance/187\",\"188\":\"http://localhost:9998/interface/v2/scenario/1/instance/188\",\"189\":\"http://localhost:9998/interface/v2/scenario/1/instance/189\",\"860\":\"http://localhost:9998/interface/v2/scenario/1/instance/860\",\"740\":\"http://localhost:9998/interface/v2/scenario/1/instance/740\",\"621\":\"http://localhost:9998/interface/v2/scenario/1/instance/621\",\"80\":\"http://localhost:9998/interface/v2/scenario/1/instance/80\",\"81\":\"http://localhost:9998/interface/v2/scenario/1/instance/81\",\"867\":\"http://localhost:9998/interface/v2/scenario/1/instance/867\",\"82\":\"http://localhost:9998/interface/v2/scenario/1/instance/82\",\"505\":\"http://localhost:9998/interface/v2/scenario/1/instance/505\",\"747\":\"http://localhost:9998/interface/v2/scenario/1/instance/747\",\"83\":\"http://localhost:9998/interface/v2/scenario/1/instance/83\",\"84\":\"http://localhost:9998/interface/v2/scenario/1/instance/84\",\"628\":\"http://localhost:9998/interface/v2/scenario/1/instance/628\",\"190\":\"http://localhost:9998/interface/v2/scenario/1/instance/190\",\"191\":\"http://localhost:9998/interface/v2/scenario/1/instance/191\",\"192\":\"http://localhost:9998/interface/v2/scenario/1/instance/192\",\"193\":\"http://localhost:9998/interface/v2/scenario/1/instance/193\",\"194\":\"http://localhost:9998/interface/v2/scenario/1/instance/194\",\"195\":\"http://localhost:9998/interface/v2/scenario/1/instance/195\",\"196\":\"http://localhost:9998/interface/v2/scenario/1/instance/196\",\"197\":\"http://localhost:9998/interface/v2/scenario/1/instance/197\",\"198\":\"http://localhost:9998/interface/v2/scenario/1/instance/198\",\"199\":\"http://localhost:9998/interface/v2/scenario/1/instance/199\",\"754\":\"http://localhost:9998/interface/v2/scenario/1/instance/754\",\"635\":\"http://localhost:9998/interface/v2/scenario/1/instance/635\",\"756\":\"http://localhost:9998/interface/v2/scenario/1/instance/756\",\"92\":\"http://localhost:9998/interface/v2/scenario/1/instance/92\",\"515\":\"http://localhost:9998/interface/v2/scenario/1/instance/515\",\"94\":\"http://localhost:9998/interface/v2/scenario/1/instance/94\",\"95\":\"http://localhost:9998/interface/v2/scenario/1/instance/95\",\"519\":\"http://localhost:9998/interface/v2/scenario/1/instance/519\",\"97\":\"http://localhost:9998/interface/v2/scenario/1/instance/97\",\"99\":\"http://localhost:9998/interface/v2/scenario/1/instance/99\",\"400\":\"http://localhost:9998/interface/v2/scenario/1/instance/400\",\"642\":\"http://localhost:9998/interface/v2/scenario/1/instance/642\",\"763\":\"http://localhost:9998/interface/v2/scenario/1/instance/763\",\"402\":\"http://localhost:9998/interface/v2/scenario/1/instance/402\",\"404\":\"http://localhost:9998/interface/v2/scenario/1/instance/404\",\"406\":\"http://localhost:9998/interface/v2/scenario/1/instance/406\",\"527\":\"http://localhost:9998/interface/v2/scenario/1/instance/527\",\"649\":\"http://localhost:9998/interface/v2/scenario/1/instance/649\",\"408\":\"http://localhost:9998/interface/v2/scenario/1/instance/408\",\"409\":\"http://localhost:9998/interface/v2/scenario/1/instance/409\",\"770\":\"http://localhost:9998/interface/v2/scenario/1/instance/770\",\"410\":\"http://localhost:9998/interface/v2/scenario/1/instance/410\",\"531\":\"http://localhost:9998/interface/v2/scenario/1/instance/531\",\"411\":\"http://localhost:9998/interface/v2/scenario/1/instance/411\",\"895\":\"http://localhost:9998/interface/v2/scenario/1/instance/895\",\"412\":\"http://localhost:9998/interface/v2/scenario/1/instance/412\",\"413\":\"http://localhost:9998/interface/v2/scenario/1/instance/413\",\"656\":\"http://localhost:9998/interface/v2/scenario/1/instance/656\",\"777\":\"http://localhost:9998/interface/v2/scenario/1/instance/777\",\"415\":\"http://localhost:9998/interface/v2/scenario/1/instance/415\",\"416\":\"http://localhost:9998/interface/v2/scenario/1/instance/416\",\"417\":\"http://localhost:9998/interface/v2/scenario/1/instance/417\",\"418\":\"http://localhost:9998/interface/v2/scenario/1/instance/418\",\"419\":\"http://localhost:9998/interface/v2/scenario/1/instance/419\",\"420\":\"http://localhost:9998/interface/v2/scenario/1/instance/420\",\"421\":\"http://localhost:9998/interface/v2/scenario/1/instance/421\",\"663\":\"http://localhost:9998/interface/v2/scenario/1/instance/663\",\"784\":\"http://localhost:9998/interface/v2/scenario/1/instance/784\",\"422\":\"http://localhost:9998/interface/v2/scenario/1/instance/422\",\"309\":\"http://localhost:9998/interface/v2/scenario/1/instance/309\",\"670\":\"http://localhost:9998/interface/v2/scenario/1/instance/670\",\"791\":\"http://localhost:9998/interface/v2/scenario/1/instance/791\",\"430\":\"http://localhost:9998/interface/v2/scenario/1/instance/430\",\"310\":\"http://localhost:9998/interface/v2/scenario/1/instance/310\",\"552\":\"http://localhost:9998/interface/v2/scenario/1/instance/552\",\"312\":\"http://localhost:9998/interface/v2/scenario/1/instance/312\",\"433\":\"http://localhost:9998/interface/v2/scenario/1/instance/433\",\"313\":\"http://localhost:9998/interface/v2/scenario/1/instance/313\",\"314\":\"http://localhost:9998/interface/v2/scenario/1/instance/314\",\"677\":\"http://localhost:9998/interface/v2/scenario/1/instance/677\",\"315\":\"http://localhost:9998/interface/v2/scenario/1/instance/315\",\"316\":\"http://localhost:9998/interface/v2/scenario/1/instance/316\",\"317\":\"http://localhost:9998/interface/v2/scenario/1/instance/317\",\"438\":\"http://localhost:9998/interface/v2/scenario/1/instance/438\",\"318\":\"http://localhost:9998/interface/v2/scenario/1/instance/318\",\"319\":\"http://localhost:9998/interface/v2/scenario/1/instance/319\",\"561\":\"http://localhost:9998/interface/v2/scenario/1/instance/561\",\"320\":\"http://localhost:9998/interface/v2/scenario/1/instance/320\",\"441\":\"http://localhost:9998/interface/v2/scenario/1/instance/441\",\"200\":\"http://localhost:9998/interface/v2/scenario/1/instance/200\",\"321\":\"http://localhost:9998/interface/v2/scenario/1/instance/321\",\"563\":\"http://localhost:9998/interface/v2/scenario/1/instance/563\",\"684\":\"http://localhost:9998/interface/v2/scenario/1/instance/684\",\"201\":\"http://localhost:9998/interface/v2/scenario/1/instance/201\",\"322\":\"http://localhost:9998/interface/v2/scenario/1/instance/322\",\"202\":\"http://localhost:9998/interface/v2/scenario/1/instance/202\",\"323\":\"http://localhost:9998/interface/v2/scenario/1/instance/323\",\"444\":\"http://localhost:9998/interface/v2/scenario/1/instance/444\",\"203\":\"http://localhost:9998/interface/v2/scenario/1/instance/203\",\"324\":\"http://localhost:9998/interface/v2/scenario/1/instance/324\",\"445\":\"http://localhost:9998/interface/v2/scenario/1/instance/445\",\"204\":\"http://localhost:9998/interface/v2/scenario/1/instance/204\",\"325\":\"http://localhost:9998/interface/v2/scenario/1/instance/325\",\"446\":\"http://localhost:9998/interface/v2/scenario/1/instance/446\",\"205\":\"http://localhost:9998/interface/v2/scenario/1/instance/205\",\"326\":\"http://localhost:9998/interface/v2/scenario/1/instance/326\",\"206\":\"http://localhost:9998/interface/v2/scenario/1/instance/206\",\"327\":\"http://localhost:9998/interface/v2/scenario/1/instance/327\",\"448\":\"http://localhost:9998/interface/v2/scenario/1/instance/448\",\"207\":\"http://localhost:9998/interface/v2/scenario/1/instance/207\",\"328\":\"http://localhost:9998/interface/v2/scenario/1/instance/328\",\"449\":\"http://localhost:9998/interface/v2/scenario/1/instance/449\",\"208\":\"http://localhost:9998/interface/v2/scenario/1/instance/208\",\"209\":\"http://localhost:9998/interface/v2/scenario/1/instance/209\",\"691\":\"http://localhost:9998/interface/v2/scenario/1/instance/691\",\"572\":\"http://localhost:9998/interface/v2/scenario/1/instance/572\",\"210\":\"http://localhost:9998/interface/v2/scenario/1/instance/210\",\"452\":\"http://localhost:9998/interface/v2/scenario/1/instance/452\",\"212\":\"http://localhost:9998/interface/v2/scenario/1/instance/212\",\"333\":\"http://localhost:9998/interface/v2/scenario/1/instance/333\",\"334\":\"http://localhost:9998/interface/v2/scenario/1/instance/334\",\"214\":\"http://localhost:9998/interface/v2/scenario/1/instance/214\",\"456\":\"http://localhost:9998/interface/v2/scenario/1/instance/456\",\"698\":\"http://localhost:9998/interface/v2/scenario/1/instance/698\",\"215\":\"http://localhost:9998/interface/v2/scenario/1/instance/215\",\"216\":\"http://localhost:9998/interface/v2/scenario/1/instance/216\",\"217\":\"http://localhost:9998/interface/v2/scenario/1/instance/217\",\"459\":\"http://localhost:9998/interface/v2/scenario/1/instance/459\",\"218\":\"http://localhost:9998/interface/v2/scenario/1/instance/218\",\"219\":\"http://localhost:9998/interface/v2/scenario/1/instance/219\",\"460\":\"http://localhost:9998/interface/v2/scenario/1/instance/460\",\"461\":\"http://localhost:9998/interface/v2/scenario/1/instance/461\",\"582\":\"http://localhost:9998/interface/v2/scenario/1/instance/582\",\"220\":\"http://localhost:9998/interface/v2/scenario/1/instance/220\",\"221\":\"http://localhost:9998/interface/v2/scenario/1/instance/221\",\"101\":\"http://localhost:9998/interface/v2/scenario/1/instance/101\",\"222\":\"http://localhost:9998/interface/v2/scenario/1/instance/222\",\"464\":\"http://localhost:9998/interface/v2/scenario/1/instance/464\",\"223\":\"http://localhost:9998/interface/v2/scenario/1/instance/223\",\"103\":\"http://localhost:9998/interface/v2/scenario/1/instance/103\",\"224\":\"http://localhost:9998/interface/v2/scenario/1/instance/224\",\"104\":\"http://localhost:9998/interface/v2/scenario/1/instance/104\",\"346\":\"http://localhost:9998/interface/v2/scenario/1/instance/346\",\"467\":\"http://localhost:9998/interface/v2/scenario/1/instance/467\",\"105\":\"http://localhost:9998/interface/v2/scenario/1/instance/105\",\"226\":\"http://localhost:9998/interface/v2/scenario/1/instance/226\",\"106\":\"http://localhost:9998/interface/v2/scenario/1/instance/106\",\"348\":\"http://localhost:9998/interface/v2/scenario/1/instance/348\",\"107\":\"http://localhost:9998/interface/v2/scenario/1/instance/107\",\"228\":\"http://localhost:9998/interface/v2/scenario/1/instance/228\",\"349\":\"http://localhost:9998/interface/v2/scenario/1/instance/349\",\"108\":\"http://localhost:9998/interface/v2/scenario/1/instance/108\",\"109\":\"http://localhost:9998/interface/v2/scenario/1/instance/109\"},\"labels\":{\"515\":\"HELLOWORLD\",\"519\":\"HELLOWORLD\",\"527\":\"HELLOWORLD\",\"531\":\"HELLOWORLD\",\"552\":\"HELLOWORLD\",\"47\":\"HELLOWORLD\",\"48\":\"HELLOWORLD\",\"49\":\"HELLOWORLD\",\"561\":\"HELLOWORLD\",\"50\":\"HELLOWORLD\",\"51\":\"HELLOWORLD\",\"563\":\"HELLOWORLD\",\"52\":\"HELLOWORLD\",\"53\":\"HELLOWORLD\",\"54\":\"HELLOWORLD\",\"55\":\"HELLOWORLD\",\"56\":\"HELLOWORLD\",\"57\":\"HELLOWORLD\",\"58\":\"HELLOWORLD\",\"59\":\"HELLOWORLD\",\"60\":\"HELLOWORLD\",\"572\":\"HELLOWORLD\",\"61\":\"HELLOWORLD\",\"62\":\"HELLOWORLD\",\"63\":\"HELLOWORLD\",\"64\":\"HELLOWORLD\",\"65\":\"HELLOWORLD\",\"66\":\"HELLOWORLD\",\"67\":\"HELLOWORLD\",\"68\":\"HELLOWORLD\",\"69\":\"HELLOWORLD\",\"70\":\"HELLOWORLD\",\"582\":\"HELLOWORLD\",\"71\":\"HELLOWORLD\",\"72\":\"HELLOWORLD\",\"73\":\"HELLOWORLD\",\"74\":\"HELLOWORLD\",\"75\":\"HELLOWORLD\",\"76\":\"HELLOWORLD\",\"77\":\"HELLOWORLD\",\"78\":\"HELLOWORLD\",\"590\":\"HELLOWORLD\",\"79\":\"HELLOWORLD\",\"80\":\"HELLOWORLD\",\"81\":\"HELLOWORLD\",\"82\":\"HELLOWORLD\",\"83\":\"HELLOWORLD\",\"84\":\"HELLOWORLD\",\"598\":\"HELLOWORLD\",\"92\":\"HELLOWORLD\",\"605\":\"HELLOWORLD\",\"94\":\"HELLOWORLD\",\"95\":\"HELLOWORLD\",\"97\":\"HELLOWORLD\",\"99\":\"HELLOWORLD\",\"612\":\"HELLOWORLD\",\"101\":\"HELLOWORLD\",\"614\":\"HELLOWORLD\",\"103\":\"HELLOWORLD\",\"104\":\"HELLOWORLD\",\"105\":\"HELLOWORLD\",\"106\":\"HELLOWORLD\",\"107\":\"HELLOWORLD\",\"108\":\"HELLOWORLD\",\"109\":\"HELLOWORLD\",\"621\":\"HELLOWORLD\",\"110\":\"HELLOWORLD\",\"111\":\"HELLOWORLD\",\"112\":\"HELLOWORLD\",\"113\":\"HELLOWORLD\",\"114\":\"HELLOWORLD\",\"115\":\"HELLOWORLD\",\"116\":\"HELLOWORLD\",\"628\":\"HELLOWORLD\",\"117\":\"HELLOWORLD\",\"118\":\"HELLOWORLD\",\"119\":\"HELLOWORLD\",\"120\":\"HELLOWORLD\",\"121\":\"HELLOWORLD\",\"122\":\"HELLOWORLD\",\"123\":\"HELLOWORLD\",\"635\":\"HELLOWORLD\",\"124\":\"HELLOWORLD\",\"125\":\"HELLOWORLD\",\"126\":\"HELLOWORLD\",\"127\":\"HELLOWORLD\",\"128\":\"HELLOWORLD\",\"129\":\"HELLOWORLD\",\"130\":\"HELLOWORLD\",\"642\":\"HELLOWORLD\",\"131\":\"HELLOWORLD\",\"132\":\"HELLOWORLD\",\"133\":\"HELLOWORLD\",\"134\":\"HELLOWORLD\",\"135\":\"HELLOWORLD\",\"136\":\"HELLOWORLD\",\"137\":\"HELLOWORLD\",\"649\":\"HELLOWORLD\",\"138\":\"HELLOWORLD\",\"139\":\"HELLOWORLD\",\"140\":\"HELLOWORLD\",\"141\":\"HELLOWORLD\",\"142\":\"HELLOWORLD\",\"143\":\"HELLOWORLD\",\"144\":\"HELLOWORLD\",\"656\":\"HELLOWORLD\",\"145\":\"HELLOWORLD\",\"146\":\"HELLOWORLD\",\"147\":\"HELLOWORLD\",\"148\":\"HELLOWORLD\",\"149\":\"HELLOWORLD\",\"150\":\"HELLOWORLD\",\"151\":\"HELLOWORLD\",\"663\":\"HELLOWORLD\",\"152\":\"HELLOWORLD\",\"153\":\"HELLOWORLD\",\"154\":\"HELLOWORLD\",\"155\":\"HELLOWORLD\",\"156\":\"HELLOWORLD\",\"157\":\"HELLOWORLD\",\"158\":\"HELLOWORLD\",\"670\":\"HELLOWORLD\",\"159\":\"HELLOWORLD\",\"160\":\"HELLOWORLD\",\"161\":\"HELLOWORLD\",\"162\":\"HELLOWORLD\",\"163\":\"HELLOWORLD\",\"164\":\"HELLOWORLD\",\"165\":\"HELLOWORLD\",\"677\":\"HELLOWORLD\",\"166\":\"HELLOWORLD\",\"167\":\"HELLOWORLD\",\"168\":\"HELLOWORLD\",\"169\":\"HELLOWORLD\",\"170\":\"HELLOWORLD\",\"171\":\"HELLOWORLD\",\"172\":\"HELLOWORLD\",\"684\":\"HELLOWORLD\",\"173\":\"HELLOWORLD\",\"174\":\"HELLOWORLD\",\"175\":\"HELLOWORLD\",\"176\":\"HELLOWORLD\",\"177\":\"HELLOWORLD\",\"178\":\"HELLOWORLD\",\"179\":\"HELLOWORLD\",\"691\":\"HELLOWORLD\",\"180\":\"HELLOWORLD\",\"181\":\"HELLOWORLD\",\"182\":\"HELLOWORLD\",\"183\":\"HELLOWORLD\",\"184\":\"HELLOWORLD\",\"185\":\"HELLOWORLD\",\"186\":\"HELLOWORLD\",\"698\":\"HELLOWORLD\",\"187\":\"HELLOWORLD\",\"188\":\"HELLOWORLD\",\"189\":\"HELLOWORLD\",\"190\":\"HELLOWORLD\",\"191\":\"HELLOWORLD\",\"192\":\"HELLOWORLD\",\"193\":\"HELLOWORLD\",\"705\":\"HELLOWORLD\",\"194\":\"HELLOWORLD\",\"195\":\"HELLOWORLD\",\"196\":\"HELLOWORLD\",\"197\":\"HELLOWORLD\",\"198\":\"HELLOWORLD\",\"199\":\"HELLOWORLD\",\"200\":\"HELLOWORLD\",\"712\":\"HELLOWORLD\",\"201\":\"HELLOWORLD\",\"202\":\"HELLOWORLD\",\"203\":\"HELLOWORLD\",\"204\":\"HELLOWORLD\",\"205\":\"HELLOWORLD\",\"206\":\"HELLOWORLD\",\"207\":\"HELLOWORLD\",\"719\":\"HELLOWORLD\",\"208\":\"HELLOWORLD\",\"209\":\"HELLOWORLD\",\"210\":\"HELLOWORLD\",\"212\":\"HELLOWORLD\",\"214\":\"HELLOWORLD\",\"726\":\"HELLOWORLD\",\"215\":\"HELLOWORLD\",\"216\":\"HELLOWORLD\",\"217\":\"HELLOWORLD\",\"218\":\"HELLOWORLD\",\"219\":\"HELLOWORLD\",\"220\":\"HELLOWORLD\",\"221\":\"HELLOWORLD\",\"733\":\"HELLOWORLD\",\"222\":\"HELLOWORLD\",\"223\":\"HELLOWORLD\",\"224\":\"HELLOWORLD\",\"226\":\"HELLOWORLD\",\"228\":\"HELLOWORLD\",\"740\":\"HELLOWORLD\",\"747\":\"HELLOWORLD\",\"754\":\"HELLOWORLD\",\"244\":\"HELLOWORLD\",\"756\":\"HELLOWORLD\",\"246\":\"HELLOWORLD\",\"248\":\"HELLOWORLD\",\"250\":\"HELLOWORLD\",\"763\":\"HELLOWORLD\",\"252\":\"HELLOWORLD\",\"255\":\"HELLOWORLD\",\"257\":\"HELLOWORLD\",\"770\":\"HELLOWORLD\",\"259\":\"HELLOWORLD\",\"261\":\"HELLOWORLD\",\"262\":\"HELLOWORLD\",\"263\":\"HELLOWORLD\",\"265\":\"HELLOWORLD\",\"777\":\"HELLOWORLD\",\"266\":\"HELLOWORLD\",\"270\":\"HELLOWORLD\",\"784\":\"HELLOWORLD\",\"279\":\"HELLOWORLD\",\"791\":\"HELLOWORLD\",\"281\":\"HELLOWORLD\",\"282\":\"HELLOWORLD\",\"284\":\"HELLOWORLD\",\"285\":\"HELLOWORLD\",\"286\":\"HELLOWORLD\",\"294\":\"HELLOWORLD\",\"296\":\"HELLOWORLD\",\"809\":\"HELLOWORLD\",\"816\":\"HELLOWORLD\",\"309\":\"HELLOWORLD\",\"310\":\"HELLOWORLD\",\"823\":\"HELLOWORLD\",\"312\":\"HELLOWORLD\",\"313\":\"HELLOWORLD\",\"314\":\"HELLOWORLD\",\"315\":\"HELLOWORLD\",\"316\":\"HELLOWORLD\",\"317\":\"HELLOWORLD\",\"318\":\"HELLOWORLD\",\"319\":\"HELLOWORLD\",\"320\":\"HELLOWORLD\",\"321\":\"HELLOWORLD\",\"322\":\"HELLOWORLD\",\"323\":\"HELLOWORLD\",\"324\":\"HELLOWORLD\",\"836\":\"HELLOWORLD\",\"325\":\"HELLOWORLD\",\"326\":\"HELLOWORLD\",\"327\":\"HELLOWORLD\",\"328\":\"HELLOWORLD\",\"333\":\"HELLOWORLD\",\"334\":\"HELLOWORLD\",\"846\":\"HELLOWORLD\",\"853\":\"HELLOWORLD\",\"346\":\"HELLOWORLD\",\"348\":\"HELLOWORLD\",\"860\":\"HELLOWORLD\",\"349\":\"HELLOWORLD\",\"351\":\"HELLOWORLD\",\"353\":\"HELLOWORLD\",\"867\":\"HELLOWORLD\",\"356\":\"HELLOWORLD\",\"358\":\"HELLOWORLD\",\"359\":\"HELLOWORLD\",\"361\":\"HELLOWORLD\",\"363\":\"HELLOWORLD\",\"365\":\"HELLOWORLD\",\"366\":\"HELLOWORLD\",\"367\":\"HELLOWORLD\",\"369\":\"HELLOWORLD\",\"370\":\"HELLOWORLD\",\"371\":\"HELLOWORLD\",\"372\":\"HELLOWORLD\",\"373\":\"HELLOWORLD\",\"374\":\"HELLOWORLD\",\"375\":\"HELLOWORLD\",\"376\":\"HELLOWORLD\",\"377\":\"HELLOWORLD\",\"378\":\"HELLOWORLD\",\"379\":\"HELLOWORLD\",\"380\":\"HELLOWORLD\",\"381\":\"HELLOWORLD\",\"383\":\"HELLOWORLD\",\"895\":\"HELLOWORLD\",\"385\":\"HELLOWORLD\",\"386\":\"HELLOWORLD\",\"387\":\"HELLOWORLD\",\"388\":\"HELLOWORLD\",\"390\":\"HELLOWORLD\",\"392\":\"HELLOWORLD\",\"394\":\"HELLOWORLD\",\"396\":\"HELLOWORLD\",\"398\":\"HELLOWORLD\",\"400\":\"HELLOWORLD\",\"402\":\"HELLOWORLD\",\"915\":\"HELLOWORLD\",\"404\":\"HELLOWORLD\",\"406\":\"HELLOWORLD\",\"408\":\"HELLOWORLD\",\"409\":\"HELLOWORLD\",\"410\":\"HELLOWORLD\",\"411\":\"HELLOWORLD\",\"412\":\"HELLOWORLD\",\"413\":\"HELLOWORLD\",\"415\":\"HELLOWORLD\",\"416\":\"HELLOWORLD\",\"417\":\"HELLOWORLD\",\"418\":\"HELLOWORLD\",\"419\":\"HELLOWORLD\",\"420\":\"HELLOWORLD\",\"421\":\"HELLOWORLD\",\"422\":\"HELLOWORLD\",\"430\":\"HELLOWORLD\",\"433\":\"HELLOWORLD\",\"438\":\"HELLOWORLD\",\"951\":\"HELLOWORLD\",\"952\":\"HELLOWORLD\",\"441\":\"HELLOWORLD\",\"444\":\"HELLOWORLD\",\"445\":\"HELLOWORLD\",\"446\":\"HELLOWORLD\",\"448\":\"HELLOWORLD\",\"449\":\"HELLOWORLD\",\"452\":\"HELLOWORLD\",\"456\":\"HELLOWORLD\",\"459\":\"HELLOWORLD\",\"460\":\"HELLOWORLD\",\"461\":\"HELLOWORLD\",\"464\":\"HELLOWORLD\",\"467\":\"HELLOWORLD\",\"470\":\"HELLOWORLD\",\"473\":\"HELLOWORLD\",\"476\":\"HELLOWORLD\",\"479\":\"HELLOWORLD\",\"482\":\"HELLOWORLD\",\"485\":\"HELLOWORLD\",\"488\":\"HELLOWORLD\",\"491\":\"HELLOWORLD\",\"492\":\"HELLOWORLD\",\"493\":\"HELLOWORLD\",\"494\":\"HELLOWORLD\",\"497\":\"HELLOWORLD\",\"505\":\"HELLOWORLD\"}}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Post to {@link RestInterface#terminateScenarioInstance(int, int)} * with an valid scenario instance id * the scenario should be terminated and the response is a 201. */ @Test public void terminateScenarioInstance() { Response response = base.path("scenario/1/instance/47").request().put(Entity.json("")); assertEquals("The Response code of terminating an instances was not 200", 200, response.getStatus()); } /** * When you send a Post to {@link RestInterface#terminateScenarioInstance(int, int)} * with an invalid instance id * then the Response should be a 404 with an error message. */ @Test public void terminateScenarioInstanceInvalidId() { Response response = base.path("scenario/1/instance/9999").request().put(Entity.json("")); assertEquals("The Response code of terminating an instances was not 400", 400, response.getStatus()); assertEquals("The Media type of terminating an instance was not JSON", MediaType.APPLICATION_JSON_TYPE, response.getMediaType()); assertThat("The content of the response was not as expected", response.readEntity(String.class), jsonEquals("{\"error\":\"The Scenario instance could not be found!\"}") .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getScenarioInstances(UriInfo, int, String)} * with a valid scenario id and a filter * only instances with names containing this string will be returned. */ @Test public void testGetScenarioInstancesWithFilter() { Response response = base.path("scenario/1/instance").queryParam("filter", "noInstanceLikeThis").request().get(); assertThat("The returned JSON does not contain the expected content", "{\"ids\":[],\"labels\":{},\"links\":{}}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Post to {@link RestInterface#startNewInstance(UriInfo, int)} * then the Response will be a 201 and a json object wit the new id will be returned. */ @Test public void testStartNewInstanceWOName() { Response response = base.path("scenario/1/instance").request().post(null); assertEquals("The Response code of start new instances was not 201", 201, response.getStatus()); assertEquals("Start new isntance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"id\":966,\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/966\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Post to {@link RestInterface#startNewInstance(UriInfo, int)} * then the Response will be a 201 and a json object wit the new id will be returned. */ @Test public void testStartInvalidInstanceWOName() { Response response = base.path("scenario/9999/instance").request().post(null); assertEquals("The Response code of start new instances was not 400", 400, response.getStatus()); assertEquals("Start new isntance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"The Scenario could not be found!\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Put to {@link RestInterface#startNewNamedInstance(UriInfo, int, RestInterface.NamedJaxBean)} * then the Response will be a 201 and a json object wit the new id will be returned. */ @Test public void testStartNewInstanceWName() { RestInterface.NamedJaxBean newName = new RestInterface.NamedJaxBean(); newName.name = "Dies ist ein Test"; Response response = base.path("scenario/1/instance") .request().put(Entity.json(newName)); assertEquals("The Response code of start new instances was not 201", 201, response.getStatus()); assertEquals("Start new instance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"id\":966,\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/966\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Put to {@link RestInterface#startNewNamedInstance(UriInfo, int, RestInterface.NamedJaxBean)} * then the Response will be a 201 and a json object wit the new id will be returned. */ @Test public void testStartInvalidInstanceWName() { RestInterface.NamedJaxBean newName = new RestInterface.NamedJaxBean(); newName.name = "Dies ist ein Test"; Response response = base.path("scenario/9999/instance").request() .put(Entity.json(newName)); assertEquals("The Response code of start new instances was not 400", 400, response.getStatus()); assertEquals("Start new isntance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"The Scenario could not be found!\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Post to {@link RestInterface#getScenarioInstance(int, int)} * with a correct scenario id and a correct instance id * the respond will be a 200 with a JSONObject */ @Test public void testGetScenarioInstanceReturnsJSON() { Response response = base.path("scenario/1/instance/72").request().get(); assertEquals("The Response code of getScenarioInstance was not 200", 200, response.getStatus()); assertEquals("getScenarioInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"name\":\"HELLOWORLD\",\"id\":72,\"terminated\":false,\"scenario_id\":1}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Post to {@link RestInterface#getScenarioInstance(int, int)} * with a wrong scenario id and a correct instance id * the respond will be a 200 with a redirected URI. */ @Test public void testGetScenarioInstanceWithWrongScenarioRedirects() { Response response = base.path("scenario/9999/instance/72").request().get(); assertEquals("The Response code of getScenarioInstance was not 200", 200, response.getStatus()); assertEquals("getScenarioInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"name\":\"HELLOWORLD\",\"id\":72,\"terminated\":false,\"scenario_id\":1}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Post to {@link RestInterface#getScenarioInstance(int, int)} * with a wrong scenario id and a correct instance id * the respond will be a 404 with a redirected URI. */ @Test public void testGetScenarioInstanceWithWrongInstanceThrowsError() { Response response = base.path("scenario/9999/instance/9999").request().get(); assertEquals("The Response code of getScenarioInstance was not 404", 404, response.getStatus()); assertEquals("getScenarioInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"message\":\"There is no instance with the id 9999\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an wrong scenario ID the request should be redirected to the correct one. */ @Test public void testGetActivitiesRedirects() { Response response = base.path("scenario/9999/instance/72/activity").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 200", 200, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"activities\":{\"189\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/189\",\"id\":189,\"label\":\"Activity1Fragment2\",\"state\":\"ready\"},\"6686\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/6686\",\"id\":6686,\"label\":\"ActivityFragment4\",\"state\":\"ready\"},\"186\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/186\",\"id\":186,\"label\":\"Activity1Fragment1\",\"state\":\"ready\"}},\"ids\":[186,189,6686]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an wrong scenario instance ID * then a 404 with error message (inside JSON) should be returned. */ @Test public void testGetActivitiesInvalidInstance() { Response response = base.path("scenario/1/instance/9999/activity").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 404", 404, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"message\":\"There is no instance with id 9999\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an correct parameters a state but no filter * then the request should return all activities with this state. */ @Test public void testGetActivitiesWithState() { Response response = base.path("scenario/1/instance/72/activity") .queryParam("status", "ready").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 200", 200, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"activities\":[{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/186\",\"id\":186,\"label\":\"Activity1Fragment1\",\"state\":\"ready\"},{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/189\",\"id\":189,\"label\":\"Activity1Fragment2\",\"state\":\"ready\"},{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/6686\",\"id\":6686,\"label\":\"ActivityFragment4\",\"state\":\"ready\"}],\"ids\":[186,189,6686]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an correct parameters, an invalid state but no filter * the request should return a 404 with error message */ @Test public void testGetActivitiesWithInvalidState() { Response response = base.path("scenario/1/instance/72/activity") .queryParam("status", "enabled").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 404", 404, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"The state is not allowed enabled\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an correct parameters a state but no filter * then the request should return all activities with this state. */ @Test public void testGetActivitiesWithStateTerminated() { Response response = base.path("scenario/1/instance/72/activity") .queryParam("status", "terminated").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 200", 200, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"activities\":[],\"ids\":[]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an correct parameters a state and a filter * then the request should return all activities with the state who fulfill the filter condition. */ @Test public void testGetActivitiesWithStateAndFilter() { Response response = base.path("scenario/1/instance/72/activity") .queryParam("status", "ready") .queryParam("filter", "2").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 200", 200, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"activities\":[{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/189\",\"id\":189,\"label\":\"Activity1Fragment2\",\"state\":\"ready\"}],\"ids\":[189]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with an correct parameters, an invalid state but no filter * the request should return a 404 with error message */ @Test public void testGetActivitiesWithInvalidStateFilter() { Response response = base.path("scenario/1/instance/72/activity") .queryParam("status", "enabled") .queryParam("filter", "1").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 404", 404, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"The state is not allowed enabled\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with correct instance and scenario * a 200 with json content will be returned. */ @Test public void testGetActivitiesCorrect() { Response response = base.path("scenario/1/instance/72/activity").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 200", 200, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"activities\":{\"189\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/189\",\"id\":189,\"label\":\"Activity1Fragment2\",\"state\":\"ready\"},\"6686\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/6686\",\"id\":6686,\"label\":\"ActivityFragment4\",\"state\":\"ready\"},\"186\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/186\",\"id\":186,\"label\":\"Activity1Fragment1\",\"state\":\"ready\"}},\"ids\":[186,189,6686]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getActivitiesOfInstance(UriInfo, int, int, String, String)} * with a filter String * then only activities with a label like the filter String will be returned. */ @Test public void testGetActivitiesWithFilter() { Response response = base.path("scenario/1/instance/72/activity").queryParam("filter", "2").request().get(); assertEquals("The Response code of getActivitiesOfInstance was not 200", 200, response.getStatus()); assertEquals("GetActivitiesOfInstance returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"activities\":{\"189\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/activity/189\",\"id\":189,\"label\":\"Activity1Fragment2\",\"state\":\"ready\"}},\"ids\":[189]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * WHen you send a Get to {@link RestInterface#getDataObjects(UriInfo, int, int, String)} * with a correct instance id and a wrong scenario ID * you will be redirected automatically. */ @Test public void testGetDataObjectsRedirects() { Response response = base.path("scenario/9999/instance/72/dataobject").request().get(); assertEquals("The Response code of getDataObjects was not 200", 200, response.getStatus()); assertEquals("getDataObjects returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"ids\":[1,2],\"results\":{\"1\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/dataobject/1\",\"id\":1,\"label\":\"object1\",\"state\":\"init\"},\"2\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/72/dataobject/2\",\"id\":2,\"label\":\"object2\",\"state\":\"init\"}}}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * WHen you send a Get to {@link RestInterface#getDataObjects(UriInfo, int, int, String)} * with an invalid instance * an 404 with error message will be returned */ @Test public void testGetDataObjectsInvalid() { Response response = base.path("scenario/9999/instance/9999/dataobject").request().get(); assertEquals("The Response code of getDataObjects was not 404", 404, response.getStatus()); assertEquals("getDataObjects returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"There is no instance with the id 9999\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getDataObjects(UriInfo, int, int, String)} * with an valid instance and scenario and no filter String * you will get a list of all DataObjects for this scenario. */ @Test public void testGetDataObjectsWOFilter() { Response response = base.path("scenario/1/instance/62/dataobject").request().get(); assertEquals("The Response code of getDataObjects was not 200", 200, response.getStatus()); assertEquals("getDataObjects returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"ids\":[1,2],\"results\":{\"1\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/62/dataobject/1\",\"id\":1,\"label\":\"object1\",\"state\":\"init\"},\"2\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/62/dataobject/2\",\"id\":2,\"label\":\"object2\",\"state\":\"init\"}}}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getDataObjects(UriInfo, int, int, String)} * with an valid instance and scenario and an filter String * you will get a list of all DataObjects with labels like the filter String for this scenario. */ @Test public void testGetDataObjectsWithFilter() { Response response = base.path("scenario/1/instance/62/dataobject") .queryParam("filter", "1").request().get(); assertEquals("The Response code of getDataObjects was not 200", 200, response.getStatus()); assertEquals("getDataObjects returns a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"ids\":[1],\"results\":{\"1\":{\"link\":\"http://localhost:9998/interface/v2/scenario/1/instance/62/dataobject/1\",\"id\":1,\"label\":\"object1\",\"state\":\"init\"}}}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getDataObject(int, int, int)} * with a correct scenario instance id but a wrong scenario id * you will be redirected */ @Test public void testGetDataObjectRedirects() { Response response = base.path("scenario/9999/instance/62/dataobject/1").request().get(); assertEquals("The Response code of getDataObject was not 200", 200, response.getStatus()); assertEquals("getDataObject return a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"label\":\"object1\",\"id\":1,\"state\":\"init\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getDataObject(int, int, int)} * with correct instance and scenario id but a wrong dataobject id * you will get a 404 with an error message. */ @Test public void testGetDataObjectInvalidDoId() { Response response = base.path("scenario/1/instance/62/dataobject/9999").request().get(); assertEquals("The Response code of getDataObject was not 404", 404, response.getStatus()); assertEquals("getDataObject return a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"There is no dataobject with the id 9999 for the scenario instance 62\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getDataObject(int, int, int)} * with correct scenario id but an incorrect instance id * you will get a 404 with an error message */ @Test public void testGetDataObjectInvalidInstanceId() { Response response = base.path("scenario/1/instance/9999/dataobject/1").request().get(); assertEquals("The Response code of getDataObject was not 404", 404, response.getStatus()); assertEquals("getDataObject return a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"There is no instance with the id 9999\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getDataObject(int, int, int)} * with correct instance, scenario and dataobject id * you will get a 200 with an json object. */ @Test public void testGetDataObject() { Response response = base.path("scenario/1/instance/62/dataobject/1").request().get(); assertEquals("The Response code of getDataObject was not 200", 200, response.getStatus()); assertEquals("getDataObject return a Response with the wrong media Type", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"label\":\"object1\",\"id\":1,\"state\":\"init\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getTerminationCondition(int)} * with an valid id * then a JSON with the termination condition will be returned */ @Test public void testGetTerminationCondition() { Response response = base.path("scenario/105/terminationcondition").request().get(); assertEquals("The Response code of getTermiantionCondition was not 200", 200, response.getStatus()); assertEquals("Get TerminationCondition does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"conditions\":{\"1\":[{\"data_object\":\"A\",\"set_id\":1,\"state\":\"c\"}]},\"setIDs\":[1]}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#getTerminationCondition(int)} * with an invalid id * then a 404 with an error message should be returned */ @Test public void testInvalidGetTerminationCondition() { Response response = base.path("scenario/102/terminationcondition").request().get(); assertEquals("The Response code of getTermiantionCondition was not 404", 404, response.getStatus()); assertEquals("Get TerminationCondition does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"There is no scenario with the id 102\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * When you send a Get to {@link RestInterface#updateActivityState(String, int, int, String)} * with an invalid state * a bad request with an error message should be returned. */ @Test public void testInvalidStateUpdateActivity() { Response response = base.path("scenario/1/instance/72/activity/105") .queryParam("state", "complete").request().put(Entity.json("")); assertEquals("The Response code of getTerminationCondition was not 400", 400, response.getStatus()); assertEquals("Get TerminationCondition does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"The state transition complete is unknown\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * * When you send a Get to {@link RestInterface#updateActivityState(String, int, int, String)} * with an valid state for an invalid activity. * a bad request with an error message should be returned. */ @Test public void testInvalidActivityUpdateActivity() { Response response = base.path("scenario/1/instance/72/activity/105") .queryParam("state", "begin").request().put(Entity.json("")); assertEquals("The Response code of getTerminationCondition was not 400", 400, response.getStatus()); assertEquals("Get TerminationCondition does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"error\":\"impossible to start activity with id 105\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * * When you send a Get to {@link RestInterface#updateActivityState(String, int, int, String)} * with an valid state and valid activity * then a 201 will be returned with a message inside a JSON-Object. */ @Test public void testUpdateActivity() { Response response = base.path("scenario/1/instance/72/activity/2") .queryParam("state", "begin").request().put(Entity.json("")); assertEquals("The Response code of getTerminationCondition was not 202", 202, response.getStatus()); assertEquals("Get TerminationCondition does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"message\":\"activity state changed.\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); response = base.path("scenario/1/instance/72/activity/2") .queryParam("state", "terminate").request().put(Entity.json("")); assertEquals("The Response code of getTerminationCondition was not 202", 202, response.getStatus()); assertEquals("Get TerminationCondition does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", "{\"message\":\"activity state changed.\"}", jsonEquals(response.readEntity(String.class)) .when(Option.IGNORING_ARRAY_ORDER)); } /** * Given is the Rest API * When you send a PUT to {@link RestInterface#terminateScenarioInstance(int, int)} * with an invalid scenario id or instance id * then a 400 will be returned with an error message */ @Test public void testTerminateInvalidScenarioInstance() { Response response = base.path("scenario/9999/instance/72") .queryParam("state", "begin").request().put(Entity.json("{}")); assertEquals("The Response code of terminateScenarioInstance was not 400", 400, response.getStatus()); assertEquals("Get terminateScenarioInstance does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", response.readEntity(String.class), jsonEquals("{\"error\":\"The Scenario instance could not be found!\"}") .when(Option.IGNORING_ARRAY_ORDER)); response = base.path("scenario/1/instance/9999") .queryParam("status", "begin").request().put(Entity.json("{}")); assertEquals("The Response code of terminateScenarioInstance was not 400", 400, response.getStatus()); assertEquals("Get terminateScenarioInstance does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", response.readEntity(String.class), jsonEquals("{\"error\":\"The Scenario instance could not be found!\"}") .when(Option.IGNORING_ARRAY_ORDER)); } /** * Given is the Rest API * When you send a PUT to {@link RestInterface#terminateScenarioInstance(int, int)} * with an valid scenario and instance id * the instance will be terminated. */ @Test public void testTerminateScenarioInstance() { Response response = base.path("scenario/1/instance/72") .queryParam("state", "begin").request().put(Entity.json("{}")); assertEquals("The Response code of terminateScenarioInstance was not 200", 200, response.getStatus()); assertEquals("terminateScenarioInstance does not return a JSON", MediaType.APPLICATION_JSON, response.getMediaType().toString()); assertThat("The returned JSON does not contain the expected content", response.readEntity(String.class), jsonEquals("{\"message\":\"The is instance has been terminated.\"}") .when(Option.IGNORING_ARRAY_ORDER)); } }
state fixes
src/test/java/de/uni_potsdam/hpi/bpt/bp2014/jcore/rest/RestInterfaceTest.java
state fixes
Java
mit
093f6eaf0633e443bf144589ae282cf86c15b705
0
fclairamb/m2mp,fclairamb/m2mp,fclairamb/m2mp,fclairamb/m2mp
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.m2mp.common.io; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; /** * * @author florent */ public class Streams { private static final int IO_BUFFER_SIZE = 8 * 1024; public static void copy(InputStream in, OutputStream out) throws IOException { byte[] b = new byte[IO_BUFFER_SIZE]; int read; while ((read = in.read(b)) != -1) { out.write(b, 0, read); } } /** * Copy a stream to an other stream. * @param in Input stream * @param out Output stream * @param length Bytes to copy * @return Remaining bytes * @throws IOException */ public static long copy(InputStream in, OutputStream out, long length) throws IOException { byte[] b = new byte[IO_BUFFER_SIZE]; int read; while ((read = in.read(b)) != -1 && length > 0) { if (read > length) { read = (int) length; } length -= read; out.write(b, 0, read); } return length; } }
org.m2mp.common/src/main/java/org/m2mp/common/io/Streams.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.m2mp.common.io; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; /** * * @author florent */ public class Streams { private static final int IO_BUFFER_SIZE = 8 * 1024; public static void copy(InputStream in, OutputStream out) throws IOException { byte[] b = new byte[IO_BUFFER_SIZE]; int read; while ((read = in.read(b)) != -1) { out.write(b, 0, read); } } public static void copy(InputStream in, OutputStream out, long length) throws IOException { byte[] b = new byte[IO_BUFFER_SIZE]; int read; while ((read = in.read(b)) != -1 && length > 0) { if (read > length) { read = (int) length; } length -= read; out.write(b, 0, read); } } }
Improving the Streams.copy with length method to report the number of bytes left.
org.m2mp.common/src/main/java/org/m2mp/common/io/Streams.java
Improving the Streams.copy with length method to report the number of bytes left.
Java
mit
df178f18d4caa593eeaba896a72fb3d4c76633f3
0
tsdl2013/YouTubePlayerActivity,hilpitome/YouTubePlayerActivity,lstNull/YouTubePlayerActivity,TheFinestArtist/YouTubePlayerActivity,honeyflyfish/YouTubePlayerActivity,akhilesh0707/YouTubePlayerActivity,0359xiaodong/YouTubePlayerActivity
package com.thefinestartist.simpleyoutubeplayer; import android.annotation.TargetApi; import android.app.Activity; import android.content.Intent; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.view.MotionEvent; import android.view.View; import android.view.View.OnClickListener; import com.thefinestartist.simpleyoutubeplayer.util.SystemUiHider; /** * An example full-screen activity that shows and hides the system UI (i.e. * status bar and navigation/system bar) with user interaction. * * @see SystemUiHider */ public class MainActivity extends Activity { /** * Whether or not the system UI should be auto-hidden after * {@link #AUTO_HIDE_DELAY_MILLIS} milliseconds. */ private static final boolean AUTO_HIDE = false; /** * If {@link #AUTO_HIDE} is set, the number of milliseconds to wait after * user interaction before hiding the system UI. */ private static final int AUTO_HIDE_DELAY_MILLIS = 3000; /** * If set, will toggle the system UI visibility upon interaction. Otherwise, * will show the system UI visibility upon interaction. */ private static final boolean TOGGLE_ON_CLICK = true; /** * The flags to pass to {@link SystemUiHider#getInstance}. */ private static final int HIDER_FLAGS = SystemUiHider.FLAG_HIDE_NAVIGATION; /** * The instance of the {@link SystemUiHider} for this activity. */ private SystemUiHider mSystemUiHider; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); final View controlsView = findViewById(R.id.fullscreen_content_controls); final View contentView = findViewById(R.id.fullscreen_content); // Set up an instance of SystemUiHider to control the system UI for // this activity. mSystemUiHider = SystemUiHider.getInstance(this, contentView, HIDER_FLAGS); mSystemUiHider.setup(); mSystemUiHider .setOnVisibilityChangeListener(new SystemUiHider.OnVisibilityChangeListener() { // Cached values. int mControlsHeight; int mShortAnimTime; @SuppressWarnings("unused") @Override @TargetApi(Build.VERSION_CODES.HONEYCOMB_MR2) public void onVisibilityChange(boolean visible) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR2) { // If the ViewPropertyAnimator API is available // (Honeycomb MR2 and later), use it to animate the // in-layout UI controls at the bottom of the // screen. if (mControlsHeight == 0) { mControlsHeight = controlsView.getHeight(); } if (mShortAnimTime == 0) { mShortAnimTime = getResources().getInteger( android.R.integer.config_shortAnimTime); } controlsView.animate() .translationY(visible ? 0 : mControlsHeight) .setDuration(mShortAnimTime); } else { // If the ViewPropertyAnimator APIs aren't // available, simply show or hide the in-layout UI // controls. controlsView.setVisibility(visible ? View.VISIBLE : View.GONE); } if (visible && AUTO_HIDE) { // Schedule a hide(). delayedHide(AUTO_HIDE_DELAY_MILLIS); } } }); // Set up the user interaction to manually show or hide the system UI. contentView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (TOGGLE_ON_CLICK) { mSystemUiHider.toggle(); } else { mSystemUiHider.show(); } } }); // Upon interacting with UI controls, delay any scheduled hide() // operations to prevent the jarring behavior of controls going away // while interacting with the UI. findViewById(R.id.dummy_button).setOnTouchListener(mDelayHideTouchListener); final String videoId = YouTubePlayerActivity.getYouTubeVideoId("http://www.youtube.com/watch?v=9bZkp7q19f0"); findViewById(R.id.dummy_button).setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(MainActivity.this, YouTubePlayerActivity.class); intent.putExtra(YouTubePlayerActivity.EXTRA_VIDEO_ID, videoId); //This Flag might cause the video to turned off automatically on phonecall intent.setFlags(Intent.FLAG_ACTIVITY_NO_HISTORY); startActivity(intent); } }); } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); // Trigger the initial hide() shortly after the activity has been // created, to briefly hint to the user that UI controls // are available. delayedHide(100); } /** * Touch listener to use for in-layout UI controls to delay hiding the * system UI. This is to prevent the jarring behavior of controls going away * while interacting with activity UI. */ View.OnTouchListener mDelayHideTouchListener = new View.OnTouchListener() { @Override public boolean onTouch(View view, MotionEvent motionEvent) { if (AUTO_HIDE) { delayedHide(AUTO_HIDE_DELAY_MILLIS); } return false; } }; Handler mHideHandler = new Handler(); Runnable mHideRunnable = new Runnable() { @Override public void run() { mSystemUiHider.hide(); } }; /** * Schedules a call to hide() in [delay] milliseconds, canceling any * previously scheduled calls. */ private void delayedHide(int delayMillis) { mHideHandler.removeCallbacks(mHideRunnable); mHideHandler.postDelayed(mHideRunnable, delayMillis); } }
src/com/thefinestartist/simpleyoutubeplayer/MainActivity.java
package com.thefinestartist.simpleyoutubeplayer; import android.annotation.TargetApi; import android.app.Activity; import android.content.Intent; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.view.MotionEvent; import android.view.View; import android.view.View.OnClickListener; import com.thefinestartist.simpleyoutubeplayer.util.SystemUiHider; /** * An example full-screen activity that shows and hides the system UI (i.e. * status bar and navigation/system bar) with user interaction. * * @see SystemUiHider */ public class MainActivity extends Activity { /** * Whether or not the system UI should be auto-hidden after * {@link #AUTO_HIDE_DELAY_MILLIS} milliseconds. */ private static final boolean AUTO_HIDE = false; /** * If {@link #AUTO_HIDE} is set, the number of milliseconds to wait after * user interaction before hiding the system UI. */ private static final int AUTO_HIDE_DELAY_MILLIS = 3000; /** * If set, will toggle the system UI visibility upon interaction. Otherwise, * will show the system UI visibility upon interaction. */ private static final boolean TOGGLE_ON_CLICK = true; /** * The flags to pass to {@link SystemUiHider#getInstance}. */ private static final int HIDER_FLAGS = SystemUiHider.FLAG_HIDE_NAVIGATION; /** * The instance of the {@link SystemUiHider} for this activity. */ private SystemUiHider mSystemUiHider; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); final View controlsView = findViewById(R.id.fullscreen_content_controls); final View contentView = findViewById(R.id.fullscreen_content); // Set up an instance of SystemUiHider to control the system UI for // this activity. mSystemUiHider = SystemUiHider.getInstance(this, contentView, HIDER_FLAGS); mSystemUiHider.setup(); mSystemUiHider .setOnVisibilityChangeListener(new SystemUiHider.OnVisibilityChangeListener() { // Cached values. int mControlsHeight; int mShortAnimTime; @SuppressWarnings("unused") @Override @TargetApi(Build.VERSION_CODES.HONEYCOMB_MR2) public void onVisibilityChange(boolean visible) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR2) { // If the ViewPropertyAnimator API is available // (Honeycomb MR2 and later), use it to animate the // in-layout UI controls at the bottom of the // screen. if (mControlsHeight == 0) { mControlsHeight = controlsView.getHeight(); } if (mShortAnimTime == 0) { mShortAnimTime = getResources().getInteger( android.R.integer.config_shortAnimTime); } controlsView.animate() .translationY(visible ? 0 : mControlsHeight) .setDuration(mShortAnimTime); } else { // If the ViewPropertyAnimator APIs aren't // available, simply show or hide the in-layout UI // controls. controlsView.setVisibility(visible ? View.VISIBLE : View.GONE); } if (visible && AUTO_HIDE) { // Schedule a hide(). delayedHide(AUTO_HIDE_DELAY_MILLIS); } } }); // Set up the user interaction to manually show or hide the system UI. contentView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (TOGGLE_ON_CLICK) { mSystemUiHider.toggle(); } else { mSystemUiHider.show(); } } }); // Upon interacting with UI controls, delay any scheduled hide() // operations to prevent the jarring behavior of controls going away // while interacting with the UI. findViewById(R.id.dummy_button).setOnTouchListener(mDelayHideTouchListener); final String videoId = YouTubePlayerActivity.getYouTubeVideoId("http://www.youtube.com/watch?v=9bZkp7q19f0"); findViewById(R.id.dummy_button).setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(MainActivity.this, YouTubePlayerActivity.class); intent.putExtra(YouTubePlayerActivity.EXTRA_VIDEO_ID, videoId); intent.setFlags(Intent.FLAG_ACTIVITY_NO_HISTORY); startActivity(intent); } }); } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); // Trigger the initial hide() shortly after the activity has been // created, to briefly hint to the user that UI controls // are available. delayedHide(100); } /** * Touch listener to use for in-layout UI controls to delay hiding the * system UI. This is to prevent the jarring behavior of controls going away * while interacting with activity UI. */ View.OnTouchListener mDelayHideTouchListener = new View.OnTouchListener() { @Override public boolean onTouch(View view, MotionEvent motionEvent) { if (AUTO_HIDE) { delayedHide(AUTO_HIDE_DELAY_MILLIS); } return false; } }; Handler mHideHandler = new Handler(); Runnable mHideRunnable = new Runnable() { @Override public void run() { mSystemUiHider.hide(); } }; /** * Schedules a call to hide() in [delay] milliseconds, canceling any * previously scheduled calls. */ private void delayedHide(int delayMillis) { mHideHandler.removeCallbacks(mHideRunnable); mHideHandler.postDelayed(mHideRunnable, delayMillis); } }
Update MainActivity.java
src/com/thefinestartist/simpleyoutubeplayer/MainActivity.java
Update MainActivity.java
Java
mit
de77b844686c78049890b4a0fa6b9f2bd4123fba
0
chpoon92/vector-model-search-engine-java
package hk.ust.comp4321.database; import java.io.IOException; import java.util.ArrayList; import java.util.List; import jdbm.helper.FastIterator; import jdbm.htree.HTree; import jdbm.RecordManager; import jdbm.RecordManagerFactory; /** * * @author Alex Poon * */ public class ForwardIndexTable { // Constants and fields. // ------------------------------------------------------------------------- private static ForwardIndexTable forwardIndexTable = new ForwardIndexTable(); private static RecordManager recman; private static HTree hashtable; // Constructors. // ------------------------------------------------------------------------- private ForwardIndexTable() { // Create a RecordManager name "SearchEngineDatabase" recman = RecordManagerFactory.createRecordManager ("SearchEngineDatabase"); // get the record id of the object named "ForwardIndexTable" long recid = recman.getNamedObject ("ForwardIndexTable"); if (recid != 0) { // load the hash table named"ForwardIndexTable"from the RecordManager hashtable = HTree.load (recman, recid); } else { // create a hash table in the RecordManager hashtable = HTree.createInstance (recman); // set the name of the hash table to "ForwardIndexTable" recman.setNamedObject ( "ForwardIndexTable", hashtable.getRecid() ); } } // Class methods. // ------------------------------------------------------------------------- /** * The class has only one instance. This method returns that unique one * instance of forward index table. * * @return the unique forward index table */ public static ForwardIndexTable getIndexTable() { return ForwardIndexTable.forwardIndexTable; } // Instance methods. // ------------------------------------------------------------------------- /** * This method returns the doc info with the given page id and word id. If * the doc info does not exist, it returns null. * * @param pageId the page id containing the doc info * @return wordId the word id of the doc info * @throws IOException */ @SuppressWarnings("unchecked") public DocInfo getDocInfo (int pageId, int wordId) throws IOException { List<DocInfo> docInfoList = (List<DocInfo>)ForwardIndexTable. hashtable.get (pageId); // Check if the list does not exist if(docInfoList == null) return null; else { // Find the doc info for(int i = 0; i < docInfoList.size(); i++) if(docInfoList.get (i).getId() = wordId) return docInfo = docInfoList.get (i); return null; } } /** * This method inserts a doc info, if the doc info exists, it will be * replaced. * * @param id the doc id to be inserted * @param pageInfo the associated doc info to be inserted * @throws IOException */ public void insertDocInfo (int id, DocInfo docInfo) throws IOException { @SuppressWarnings("unchecked") List<DocInfo> docInfoList = (List<DocInfo>)ForwardIndexTable. hashtable.get (id); // Check if the list does not exist if(docInfoList == null) docInfoList = new ArrayList<DocInfo>(); // Check if the doc info has already existed, if yes then remove it for(int i = 0; i < docInfoList.size(); i++) { if(docInfoList.get (i).getId() = docInfo.getId()) { indexInfoList.remove (docInfoList.get (i)); break; } } // Add doc info to the list docInfoList.add (docInfo); // Add the list to the database ForwardIndexTable.hashtable.remove (id); ForwardIndexTable.hashtable.put (id, docInfoList); } /** * This method commits all changes since beginning of transaction and * terminates. * * @throws IOException */ public void terminate() throws IOException { recman.commit(); recman.close(); } }
src/main/java/hk/ust/comp4321/database/ForwardIndexTable.java
package hk.ust.comp4321.database; import jdbm.helper.FastIterator; import jdbm.htree.HTree; import jdbm.RecordManager; import jdbm.RecordManagerFactory; /** * * @author Alex Poon * */ public class ForwardIndexTable { // Constants and fields. // ------------------------------------------------------------------------- private static ForwardIndexTable forwardIndexTable = new ForwardIndexTable(); private static RecordManager recman; private static HTree hashtable; // Constructors. // ------------------------------------------------------------------------- private ForwardIndexTable() { // Create a RecordManager name "SearchEngineDatabase" recman = RecordManagerFactory.createRecordManager ("SearchEngineDatabase"); // get the record id of the object named "ForwardIndexTable" long recid = recman.getNamedObject ("ForwardIndexTable"); if (recid != 0) { // load the hash table named"ForwardIndexTable"from the RecordManager hashtable = HTree.load (recman, recid); } else { // create a hash table in the RecordManager hashtable = HTree.createInstance (recman); // set the name of the hash table to "ForwardIndexTable" recman.setNamedObject ( "ForwardIndexTable", hashtable.getRecid() ); } } // Class methods. // ------------------------------------------------------------------------- /** * The class has only one instance. This method returns that unique one * instance of forward index table. * * @return the unique forward index table */ public static ForwardIndexTable getIndexTable() { return ForwardIndexTable.forwardIndexTable; } // Instance methods. // ------------------------------------------------------------------------- /** * This method returns the page id with the given url which has been inserted * in the database. * * @param url the absolute url of the page * @return the page id of the given url */ public Integer getPageID (String url) { return ForwardIndexTable.hashtable.get (url); } /** * This method inserts a url which is not in the database and its associated * page id. * * @param url the url to be inserted * @param id the associated id to be inserted */ public void insertURL (String url, int id) { ForwardIndexTable.hashtable.put (url, id); } /** * This method checks a page url whether it has been already inserted in the * page table. * * @param url check the url whether it exists * @return true if the url exists */ public boolean hasURL (String url) { return (ForwardIndexTable.hashtable.get (url) != null); } }
Fix the whole structure
src/main/java/hk/ust/comp4321/database/ForwardIndexTable.java
Fix the whole structure
Java
mit
50c2b9110a257dba21d3ba7eec74e839f4bb78f7
0
SpongePowered/Sponge,JBYoshi/SpongeCommon,sanman00/SpongeCommon,SpongePowered/SpongeCommon,Grinch/SpongeCommon,JBYoshi/SpongeCommon,sanman00/SpongeCommon,SpongePowered/Sponge,SpongePowered/SpongeCommon,SpongePowered/Sponge,Grinch/SpongeCommon
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.network; import com.flowpowered.math.vector.Vector3d; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.item.ItemStack; import net.minecraft.network.INetHandler; import net.minecraft.network.NetHandlerPlayServer; import net.minecraft.network.Packet; import net.minecraft.network.play.client.CPacketAnimation; import net.minecraft.network.play.client.CPacketClientSettings; import net.minecraft.network.play.client.CPacketClientStatus; import net.minecraft.network.play.client.CPacketCreativeInventoryAction; import net.minecraft.network.play.client.CPacketPlayer; import net.minecraft.network.play.client.CPacketPlayerDigging; import net.minecraft.network.play.client.CPacketPlayerTryUseItem; import net.minecraft.network.play.client.CPacketPlayerTryUseItemOnBlock; import net.minecraft.network.play.server.SPacketBlockChange; import net.minecraft.util.EnumHand; import net.minecraft.util.math.BlockPos; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.data.type.HandType; import org.spongepowered.api.data.type.HandTypes; import org.spongepowered.api.entity.living.Humanoid; import org.spongepowered.api.entity.living.player.Player; import org.spongepowered.api.event.SpongeEventFactory; import org.spongepowered.api.event.cause.Cause; import org.spongepowered.api.event.cause.NamedCause; import org.spongepowered.api.event.entity.living.humanoid.AnimateHandEvent; import org.spongepowered.api.item.inventory.ItemStackSnapshot; import org.spongepowered.api.util.Direction; import org.spongepowered.api.world.Location; import org.spongepowered.api.world.World; import org.spongepowered.common.SpongeImpl; import org.spongepowered.common.SpongeImplHooks; import org.spongepowered.common.block.BlockUtil; import org.spongepowered.common.event.InternalNamedCauses; import org.spongepowered.common.event.SpongeCommonEventFactory; import org.spongepowered.common.event.tracking.CauseTracker; import org.spongepowered.common.event.tracking.PhaseContext; import org.spongepowered.common.event.tracking.phase.packet.IPacketState; import org.spongepowered.common.event.tracking.phase.TrackingPhases; import org.spongepowered.common.event.tracking.phase.packet.PacketPhase; import org.spongepowered.common.interfaces.entity.player.IMixinEntityPlayerMP; import org.spongepowered.common.interfaces.world.IMixinWorldServer; import org.spongepowered.common.item.inventory.util.ItemStackUtil; import org.spongepowered.common.util.VecHelper; import java.lang.ref.WeakReference; import java.util.Optional; public class PacketUtil { private static final PhaseContext EMPTY_INVALID = PhaseContext.start().complete(); private static long lastInventoryOpenPacketTimeStamp = 0; private static long lastTryBlockPacketTimeStamp = 0; private static boolean lastTryBlockPacketItemResult = true; @SuppressWarnings({"rawtypes", "unchecked"}) public static void onProcessPacket(Packet packetIn, INetHandler netHandler) { if (netHandler instanceof NetHandlerPlayServer) { EntityPlayerMP packetPlayer = ((NetHandlerPlayServer) netHandler).playerEntity; // If true, logic was handled in Pre so return if (firePreEvents(packetIn, packetPlayer)) { return; } boolean ignoreCreative = false; // This is another horrible hack required since the client sends a C10 packet for every slot // containing an itemstack after a C16 packet in the following scenarios : // 1. Opening creative inventory after initial server join. // 2. Opening creative inventory again after making a change in previous inventory open. // // This is done in order to sync client inventory to server and would be fine if the C10 packet // included an Enum of some sort that defined what type of sync was happening. if (packetPlayer.interactionManager.isCreative() && (packetIn instanceof CPacketClientStatus && ((CPacketClientStatus) packetIn).getStatus() == CPacketClientStatus.State.OPEN_INVENTORY_ACHIEVEMENT)) { lastInventoryOpenPacketTimeStamp = System.currentTimeMillis(); } else if (creativeCheck(packetIn, packetPlayer)) { long packetDiff = System.currentTimeMillis() - lastInventoryOpenPacketTimeStamp; // If the time between packets is small enough, mark the current packet to be ignored for our event handler. if (packetDiff < 100) { ignoreCreative = true; } } // Don't process movement capture logic if player hasn't moved boolean ignoreMovementCapture = false; if (packetIn instanceof CPacketPlayer) { CPacketPlayer movingPacket = ((CPacketPlayer) packetIn); if (movingPacket instanceof CPacketPlayer.Rotation) { ignoreMovementCapture = true; } else if (packetPlayer.posX == movingPacket.x && packetPlayer.posY == movingPacket.y && packetPlayer.posZ == movingPacket.z) { ignoreMovementCapture = true; } } if (!CauseTracker.ENABLED || ignoreMovementCapture || (packetIn instanceof CPacketClientSettings)) { packetIn.processPacket(netHandler); } else { final ItemStackSnapshot cursor = ItemStackUtil.snapshotOf(packetPlayer.inventory.getItemStack()); final IMixinWorldServer world = (IMixinWorldServer) packetPlayer.world; final CauseTracker causeTracker = world.getCauseTracker(); final IPacketState packetState = TrackingPhases.PACKET.getStateForPacket(packetIn); if (packetState == null) { throw new IllegalArgumentException("Found a null packet phase for packet: " + packetIn.getClass()); } if (!TrackingPhases.PACKET.isPacketInvalid(packetIn, packetPlayer, packetState)) { PhaseContext context = PhaseContext.start() .add(NamedCause.source(packetPlayer)) .add(NamedCause.of(InternalNamedCauses.Packet.PACKET_PLAYER, packetPlayer)) .add(NamedCause.of(InternalNamedCauses.Packet.CAPTURED_PACKET, packetIn)) .add(NamedCause.of(InternalNamedCauses.Packet.CURSOR, cursor)) .add(NamedCause.of(InternalNamedCauses.Packet.IGNORING_CREATIVE, ignoreCreative)); TrackingPhases.PACKET.populateContext(packetIn, packetPlayer, packetState, context); context.owner((Player) packetPlayer); context.notifier((Player) packetPlayer); context.complete(); causeTracker.switchToPhase(packetState, context); } else { causeTracker.switchToPhase(PacketPhase.General.INVALID, EMPTY_INVALID); } packetIn.processPacket(netHandler); if (packetIn instanceof CPacketClientStatus) { // update the reference of player packetPlayer = ((NetHandlerPlayServer) netHandler).playerEntity; } causeTracker.completePhase(); ((IMixinEntityPlayerMP) packetPlayer).setPacketItem(null); } } else { // client packetIn.processPacket(netHandler); } } private static boolean creativeCheck(Packet<?> packetIn, EntityPlayerMP playerMP) { return packetIn instanceof CPacketCreativeInventoryAction; } private static boolean firePreEvents(Packet<?> packetIn, EntityPlayerMP playerMP) { if (packetIn instanceof CPacketAnimation) { CPacketAnimation packet = (CPacketAnimation) packetIn; SpongeCommonEventFactory.lastAnimationPacketTick = SpongeImpl.getServer().getTickCounter(); SpongeCommonEventFactory.lastAnimationPlayer = new WeakReference<>(playerMP); HandType handType = packet.getHand() == EnumHand.MAIN_HAND ? HandTypes.MAIN_HAND : HandTypes.OFF_HAND; AnimateHandEvent event = SpongeEventFactory.createAnimateHandEvent(Cause.of(NamedCause.source(playerMP)), handType, (Humanoid) playerMP); if (SpongeImpl.postEvent(event)) { return true; } return false; } else if (packetIn instanceof CPacketPlayerDigging) { SpongeCommonEventFactory.lastPrimaryPacketTick = SpongeImpl.getServer().getTickCounter(); CPacketPlayerDigging packet = (CPacketPlayerDigging) packetIn; ItemStack stack = playerMP.getHeldItemMainhand(); switch (packet.getAction()) { case DROP_ITEM: case DROP_ALL_ITEMS: if (!stack.isEmpty() && !playerMP.isSpectator()) { ((IMixinEntityPlayerMP) playerMP).setPacketItem(stack.copy()); } return false; case START_DESTROY_BLOCK: case ABORT_DESTROY_BLOCK: case STOP_DESTROY_BLOCK: Vector3d interactionPoint = VecHelper.toVector3d(packet.getPosition()); BlockSnapshot blockSnapshot = new Location<>((World) playerMP.world, interactionPoint).createSnapshot(); if(SpongeCommonEventFactory.callInteractItemEventPrimary(playerMP, stack, EnumHand.MAIN_HAND, Optional.of(interactionPoint), blockSnapshot).isCancelled()) { BlockUtil.sendClientBlockChange(playerMP, packet.getPosition()); return true; } BlockPos pos = packet.getPosition(); double d0 = playerMP.posX - ((double)pos.getX() + 0.5D); double d1 = playerMP.posY - ((double)pos.getY() + 0.5D) + 1.5D; double d2 = playerMP.posZ - ((double)pos.getZ() + 0.5D); double d3 = d0 * d0 + d1 * d1 + d2 * d2; double dist = SpongeImplHooks.getBlockReachDistance(playerMP)+ 1; dist *= dist; if (d3 > dist) { return true; } else if (pos.getY() >= SpongeImpl.getServer().getBuildLimit()) { return true; } if (packet.getAction() == CPacketPlayerDigging.Action.START_DESTROY_BLOCK) { if (SpongeCommonEventFactory.callInteractBlockEventPrimary(playerMP, blockSnapshot, EnumHand.MAIN_HAND, packet.getFacing()).isCancelled()) { BlockUtil.sendClientBlockChange(playerMP, pos); return true; } } return false; default: break; } } else if (packetIn instanceof CPacketPlayerTryUseItem) { CPacketPlayerTryUseItem packet = (CPacketPlayerTryUseItem) packetIn; SpongeCommonEventFactory.lastSecondaryPacketTick = SpongeImpl.getServer().getTickCounter(); long packetDiff = System.currentTimeMillis() - lastTryBlockPacketTimeStamp; // If the time between packets is small enough, use the last result. if (packetDiff < 100) { // Use previous result and avoid firing a second event return lastTryBlockPacketItemResult; } boolean isCancelled = SpongeCommonEventFactory.callInteractItemEventSecondary(playerMP, playerMP.getHeldItem(packet.getHand()), packet.getHand(), Optional.empty(), BlockSnapshot.NONE).isCancelled(); SpongeCommonEventFactory.callInteractBlockEventSecondary(Cause.of(NamedCause.source(playerMP)), Optional.empty(), BlockSnapshot.NONE, Direction.NONE, packet.getHand()); return isCancelled; } else if (packetIn instanceof CPacketPlayerTryUseItemOnBlock) { CPacketPlayerTryUseItemOnBlock packet = (CPacketPlayerTryUseItemOnBlock) packetIn; lastTryBlockPacketTimeStamp = System.currentTimeMillis(); SpongeCommonEventFactory.lastSecondaryPacketTick = SpongeImpl.getServer().getTickCounter(); Vector3d interactionPoint = VecHelper.toVector3d(packet.getPos()); BlockSnapshot blockSnapshot = new Location<>((World) playerMP.world, interactionPoint).createSnapshot(); boolean isCancelled = SpongeCommonEventFactory.callInteractItemEventSecondary(playerMP, playerMP.getHeldItem(packet.getHand()), packet.getHand(), Optional.of(interactionPoint), blockSnapshot).isCancelled(); lastTryBlockPacketItemResult = isCancelled; if(isCancelled) { // update client BlockPos pos = packet.getPos(); playerMP.connection.sendPacket(new SPacketBlockChange(playerMP.world, pos)); playerMP.connection.sendPacket(new SPacketBlockChange(playerMP.world, pos.offset(packet.getDirection()))); return true; } } return false; } }
src/main/java/org/spongepowered/common/network/PacketUtil.java
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.network; import com.flowpowered.math.vector.Vector3d; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.item.ItemStack; import net.minecraft.network.INetHandler; import net.minecraft.network.NetHandlerPlayServer; import net.minecraft.network.Packet; import net.minecraft.network.play.client.CPacketAnimation; import net.minecraft.network.play.client.CPacketClientSettings; import net.minecraft.network.play.client.CPacketClientStatus; import net.minecraft.network.play.client.CPacketCreativeInventoryAction; import net.minecraft.network.play.client.CPacketPlayerDigging; import net.minecraft.network.play.client.CPacketPlayerTryUseItem; import net.minecraft.network.play.client.CPacketPlayerTryUseItemOnBlock; import net.minecraft.network.play.server.SPacketBlockChange; import net.minecraft.util.EnumHand; import net.minecraft.util.math.BlockPos; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.data.type.HandType; import org.spongepowered.api.data.type.HandTypes; import org.spongepowered.api.entity.living.Humanoid; import org.spongepowered.api.entity.living.player.Player; import org.spongepowered.api.event.SpongeEventFactory; import org.spongepowered.api.event.cause.Cause; import org.spongepowered.api.event.cause.NamedCause; import org.spongepowered.api.event.entity.living.humanoid.AnimateHandEvent; import org.spongepowered.api.item.inventory.ItemStackSnapshot; import org.spongepowered.api.util.Direction; import org.spongepowered.api.world.Location; import org.spongepowered.api.world.World; import org.spongepowered.common.SpongeImpl; import org.spongepowered.common.SpongeImplHooks; import org.spongepowered.common.block.BlockUtil; import org.spongepowered.common.event.InternalNamedCauses; import org.spongepowered.common.event.SpongeCommonEventFactory; import org.spongepowered.common.event.tracking.CauseTracker; import org.spongepowered.common.event.tracking.PhaseContext; import org.spongepowered.common.event.tracking.phase.packet.IPacketState; import org.spongepowered.common.event.tracking.phase.TrackingPhases; import org.spongepowered.common.event.tracking.phase.packet.PacketPhase; import org.spongepowered.common.interfaces.entity.player.IMixinEntityPlayerMP; import org.spongepowered.common.interfaces.world.IMixinWorldServer; import org.spongepowered.common.item.inventory.util.ItemStackUtil; import org.spongepowered.common.util.VecHelper; import java.lang.ref.WeakReference; import java.util.Optional; public class PacketUtil { private static final PhaseContext EMPTY_INVALID = PhaseContext.start().complete(); private static long lastInventoryOpenPacketTimeStamp = 0; private static long lastTryBlockPacketTimeStamp = 0; private static boolean lastTryBlockPacketItemResult = true; @SuppressWarnings({"rawtypes", "unchecked"}) public static void onProcessPacket(Packet packetIn, INetHandler netHandler) { if (netHandler instanceof NetHandlerPlayServer) { EntityPlayerMP packetPlayer = ((NetHandlerPlayServer) netHandler).playerEntity; // If true, logic was handled in Pre so return if (firePreEvents(packetIn, packetPlayer)) { return; } boolean ignoreCreative = false; // This is another horrible hack required since the client sends a C10 packet for every slot // containing an itemstack after a C16 packet in the following scenarios : // 1. Opening creative inventory after initial server join. // 2. Opening creative inventory again after making a change in previous inventory open. // // This is done in order to sync client inventory to server and would be fine if the C10 packet // included an Enum of some sort that defined what type of sync was happening. if (packetPlayer.interactionManager.isCreative() && (packetIn instanceof CPacketClientStatus && ((CPacketClientStatus) packetIn).getStatus() == CPacketClientStatus.State.OPEN_INVENTORY_ACHIEVEMENT)) { lastInventoryOpenPacketTimeStamp = System.currentTimeMillis(); } else if (creativeCheck(packetIn, packetPlayer)) { long packetDiff = System.currentTimeMillis() - lastInventoryOpenPacketTimeStamp; // If the time between packets is small enough, mark the current packet to be ignored for our event handler. if (packetDiff < 100) { ignoreCreative = true; } } if (!CauseTracker.ENABLED || (packetIn instanceof CPacketClientSettings)) { packetIn.processPacket(netHandler); } else { final ItemStackSnapshot cursor = ItemStackUtil.snapshotOf(packetPlayer.inventory.getItemStack()); final IMixinWorldServer world = (IMixinWorldServer) packetPlayer.world; final CauseTracker causeTracker = world.getCauseTracker(); final IPacketState packetState = TrackingPhases.PACKET.getStateForPacket(packetIn); if (packetState == null) { throw new IllegalArgumentException("Found a null packet phase for packet: " + packetIn.getClass()); } if (!TrackingPhases.PACKET.isPacketInvalid(packetIn, packetPlayer, packetState)) { PhaseContext context = PhaseContext.start() .add(NamedCause.source(packetPlayer)) .add(NamedCause.of(InternalNamedCauses.Packet.PACKET_PLAYER, packetPlayer)) .add(NamedCause.of(InternalNamedCauses.Packet.CAPTURED_PACKET, packetIn)) .add(NamedCause.of(InternalNamedCauses.Packet.CURSOR, cursor)) .add(NamedCause.of(InternalNamedCauses.Packet.IGNORING_CREATIVE, ignoreCreative)); TrackingPhases.PACKET.populateContext(packetIn, packetPlayer, packetState, context); context.owner((Player) packetPlayer); context.notifier((Player) packetPlayer); context.complete(); causeTracker.switchToPhase(packetState, context); } else { causeTracker.switchToPhase(PacketPhase.General.INVALID, EMPTY_INVALID); } packetIn.processPacket(netHandler); if (packetIn instanceof CPacketClientStatus) { // update the reference of player packetPlayer = ((NetHandlerPlayServer) netHandler).playerEntity; } causeTracker.completePhase(); ((IMixinEntityPlayerMP) packetPlayer).setPacketItem(null); } } else { // client packetIn.processPacket(netHandler); } } private static boolean creativeCheck(Packet<?> packetIn, EntityPlayerMP playerMP) { return packetIn instanceof CPacketCreativeInventoryAction; } private static boolean firePreEvents(Packet<?> packetIn, EntityPlayerMP playerMP) { if (packetIn instanceof CPacketAnimation) { CPacketAnimation packet = (CPacketAnimation) packetIn; SpongeCommonEventFactory.lastAnimationPacketTick = SpongeImpl.getServer().getTickCounter(); SpongeCommonEventFactory.lastAnimationPlayer = new WeakReference<>(playerMP); HandType handType = packet.getHand() == EnumHand.MAIN_HAND ? HandTypes.MAIN_HAND : HandTypes.OFF_HAND; AnimateHandEvent event = SpongeEventFactory.createAnimateHandEvent(Cause.of(NamedCause.source(playerMP)), handType, (Humanoid) playerMP); if (SpongeImpl.postEvent(event)) { return true; } return false; } else if (packetIn instanceof CPacketPlayerDigging) { SpongeCommonEventFactory.lastPrimaryPacketTick = SpongeImpl.getServer().getTickCounter(); CPacketPlayerDigging packet = (CPacketPlayerDigging) packetIn; ItemStack stack = playerMP.getHeldItemMainhand(); switch (packet.getAction()) { case DROP_ITEM: case DROP_ALL_ITEMS: if (!stack.isEmpty() && !playerMP.isSpectator()) { ((IMixinEntityPlayerMP) playerMP).setPacketItem(stack.copy()); } return false; case START_DESTROY_BLOCK: case ABORT_DESTROY_BLOCK: case STOP_DESTROY_BLOCK: Vector3d interactionPoint = VecHelper.toVector3d(packet.getPosition()); BlockSnapshot blockSnapshot = new Location<>((World) playerMP.world, interactionPoint).createSnapshot(); if(SpongeCommonEventFactory.callInteractItemEventPrimary(playerMP, stack, EnumHand.MAIN_HAND, Optional.of(interactionPoint), blockSnapshot).isCancelled()) { BlockUtil.sendClientBlockChange(playerMP, packet.getPosition()); return true; } BlockPos pos = packet.getPosition(); double d0 = playerMP.posX - ((double)pos.getX() + 0.5D); double d1 = playerMP.posY - ((double)pos.getY() + 0.5D) + 1.5D; double d2 = playerMP.posZ - ((double)pos.getZ() + 0.5D); double d3 = d0 * d0 + d1 * d1 + d2 * d2; double dist = SpongeImplHooks.getBlockReachDistance(playerMP)+ 1; dist *= dist; if (d3 > dist) { return true; } else if (pos.getY() >= SpongeImpl.getServer().getBuildLimit()) { return true; } if (packet.getAction() == CPacketPlayerDigging.Action.START_DESTROY_BLOCK) { if (SpongeCommonEventFactory.callInteractBlockEventPrimary(playerMP, blockSnapshot, EnumHand.MAIN_HAND, packet.getFacing()).isCancelled()) { BlockUtil.sendClientBlockChange(playerMP, pos); return true; } } return false; default: break; } } else if (packetIn instanceof CPacketPlayerTryUseItem) { CPacketPlayerTryUseItem packet = (CPacketPlayerTryUseItem) packetIn; SpongeCommonEventFactory.lastSecondaryPacketTick = SpongeImpl.getServer().getTickCounter(); long packetDiff = System.currentTimeMillis() - lastTryBlockPacketTimeStamp; // If the time between packets is small enough, use the last result. if (packetDiff < 100) { // Use previous result and avoid firing a second event return lastTryBlockPacketItemResult; } boolean isCancelled = SpongeCommonEventFactory.callInteractItemEventSecondary(playerMP, playerMP.getHeldItem(packet.getHand()), packet.getHand(), Optional.empty(), BlockSnapshot.NONE).isCancelled(); SpongeCommonEventFactory.callInteractBlockEventSecondary(Cause.of(NamedCause.source(playerMP)), Optional.empty(), BlockSnapshot.NONE, Direction.NONE, packet.getHand()); return isCancelled; } else if (packetIn instanceof CPacketPlayerTryUseItemOnBlock) { CPacketPlayerTryUseItemOnBlock packet = (CPacketPlayerTryUseItemOnBlock) packetIn; lastTryBlockPacketTimeStamp = System.currentTimeMillis(); SpongeCommonEventFactory.lastSecondaryPacketTick = SpongeImpl.getServer().getTickCounter(); Vector3d interactionPoint = VecHelper.toVector3d(packet.getPos()); BlockSnapshot blockSnapshot = new Location<>((World) playerMP.world, interactionPoint).createSnapshot(); boolean isCancelled = SpongeCommonEventFactory.callInteractItemEventSecondary(playerMP, playerMP.getHeldItem(packet.getHand()), packet.getHand(), Optional.of(interactionPoint), blockSnapshot).isCancelled(); lastTryBlockPacketItemResult = isCancelled; if(isCancelled) { // update client BlockPos pos = packet.getPos(); playerMP.connection.sendPacket(new SPacketBlockChange(playerMP.world, pos)); playerMP.connection.sendPacket(new SPacketBlockChange(playerMP.world, pos.offset(packet.getDirection()))); return true; } } return false; } }
Don't capture player movement packets if no movement occured.
src/main/java/org/spongepowered/common/network/PacketUtil.java
Don't capture player movement packets if no movement occured.
Java
agpl-3.0
9627360fac05b8d13830660acceaa8f4f7a824ca
0
Freeyourgadget/Gadgetbridge,Freeyourgadget/Gadgetbridge,Freeyourgadget/Gadgetbridge,Freeyourgadget/Gadgetbridge
/* Copyright (C) 2015-2020 Andreas Shimokawa, boun, Carsten Pfeiffer, Daniel Dakhno, Daniele Gobbetti, JohnnySun, jonnsoft, Lem Dulfo, Taavi Eomäe, Uwe Hermann This file is part of Gadgetbridge. Gadgetbridge is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Gadgetbridge is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package nodomain.freeyourgadget.gadgetbridge.activities; import android.Manifest; import android.app.Activity; import android.app.AlertDialog; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothManager; import android.bluetooth.le.BluetoothLeScanner; import android.bluetooth.le.ScanCallback; import android.bluetooth.le.ScanFilter; import android.bluetooth.le.ScanRecord; import android.bluetooth.le.ScanResult; import android.bluetooth.le.ScanSettings; import android.companion.AssociationRequest; import android.companion.BluetoothDeviceFilter; import android.companion.CompanionDeviceManager; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.content.IntentSender; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.location.LocationManager; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.os.ParcelUuid; import android.os.Parcelable; import android.provider.Settings; import android.view.View; import android.widget.AdapterView; import android.widget.Button; import android.widget.ListView; import android.widget.ProgressBar; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.annotation.RequiresApi; import androidx.core.app.ActivityCompat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; import java.util.Objects; import nodomain.freeyourgadget.gadgetbridge.GBApplication; import nodomain.freeyourgadget.gadgetbridge.R; import nodomain.freeyourgadget.gadgetbridge.activities.devicesettings.DeviceSettingsActivity; import nodomain.freeyourgadget.gadgetbridge.adapter.DeviceCandidateAdapter; import nodomain.freeyourgadget.gadgetbridge.devices.DeviceCoordinator; import nodomain.freeyourgadget.gadgetbridge.impl.GBDevice; import nodomain.freeyourgadget.gadgetbridge.impl.GBDeviceCandidate; import nodomain.freeyourgadget.gadgetbridge.model.DeviceType; import nodomain.freeyourgadget.gadgetbridge.util.AndroidUtils; import nodomain.freeyourgadget.gadgetbridge.util.DeviceHelper; import nodomain.freeyourgadget.gadgetbridge.util.GB; import nodomain.freeyourgadget.gadgetbridge.util.Prefs; import static nodomain.freeyourgadget.gadgetbridge.util.GB.toast; public class DiscoveryActivity extends AbstractGBActivity implements AdapterView.OnItemClickListener, AdapterView.OnItemLongClickListener { private static final Logger LOG = LoggerFactory.getLogger(DiscoveryActivity.class); private static final long SCAN_DURATION = 30000; // 30s private static final int REQUEST_CODE = 1; private final Handler handler = new Handler(); private final ArrayList<GBDeviceCandidate> deviceCandidates = new ArrayList<>(); private ScanCallback newBLEScanCallback = null; /** * Use old BLE scanning **/ private boolean oldBleScanning = false; /** * If already bonded devices are to be ignored when scanning */ private boolean ignoreBonded = true; /** * If new CompanionDevice-type pairing is enabled on newer Androids **/ private boolean enableCompanionDevicePairing = false; private ProgressBar bluetoothProgress; private ProgressBar bluetoothLEProgress; private DeviceCandidateAdapter deviceCandidateAdapter; private final BluetoothAdapter.LeScanCallback leScanCallback = new BluetoothAdapter.LeScanCallback() { @Override public void onLeScan(BluetoothDevice device, int rssi, byte[] scanRecord) { //logMessageContent(scanRecord); handleDeviceFound(device, (short) rssi); } }; private BluetoothAdapter adapter; private Button startButton; private Scanning isScanning = Scanning.SCANNING_OFF; private final Runnable stopRunnable = new Runnable() { @Override public void run() { if (isScanning == Scanning.SCANNING_BT_NEXT_BLE) { // Start the next scan in the series stopDiscovery(); startDiscovery(Scanning.SCANNING_BLE); } else { stopDiscovery(); } } }; private GBDeviceCandidate bondingDevice; private final BroadcastReceiver bluetoothReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { switch (Objects.requireNonNull(intent.getAction())) { case BluetoothAdapter.ACTION_DISCOVERY_STARTED: if (isScanning != Scanning.SCANNING_BLE) { if (isScanning != Scanning.SCANNING_BT_NEXT_BLE) { setIsScanning(Scanning.SCANNING_BT); } startButton.setText(getString(R.string.discovery_stop_scanning)); } break; case BluetoothAdapter.ACTION_DISCOVERY_FINISHED: handler.post(new Runnable() { @Override public void run() { // continue with LE scan, if available if (isScanning == Scanning.SCANNING_BT || isScanning == Scanning.SCANNING_BT_NEXT_BLE) { checkAndRequestLocationPermission(); stopDiscovery(); startDiscovery(Scanning.SCANNING_BLE); } else { discoveryFinished(); } } }); break; case BluetoothAdapter.ACTION_STATE_CHANGED: int newState = intent.getIntExtra(BluetoothAdapter.EXTRA_STATE, BluetoothAdapter.STATE_OFF); bluetoothStateChanged(newState); break; case BluetoothDevice.ACTION_FOUND: { BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); short rssi = intent.getShortExtra(BluetoothDevice.EXTRA_RSSI, GBDevice.RSSI_UNKNOWN); handleDeviceFound(device, rssi); break; } case BluetoothDevice.ACTION_UUID: { BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); short rssi = intent.getShortExtra(BluetoothDevice.EXTRA_RSSI, GBDevice.RSSI_UNKNOWN); Parcelable[] uuids = intent.getParcelableArrayExtra(BluetoothDevice.EXTRA_UUID); ParcelUuid[] uuids2 = AndroidUtils.toParcelUuids(uuids); handleDeviceFound(device, rssi, uuids2); break; } case BluetoothDevice.ACTION_BOND_STATE_CHANGED: { BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); if (device != null && bondingDevice != null && device.getAddress().equals(bondingDevice.getMacAddress())) { int bondState = intent.getIntExtra(BluetoothDevice.EXTRA_BOND_STATE, BluetoothDevice.BOND_NONE); if (bondState == BluetoothDevice.BOND_BONDED) { handleDeviceBonded(); } } } } } }; private void connectAndFinish(GBDevice device) { toast(DiscoveryActivity.this, getString(R.string.discovery_trying_to_connect_to, device.getName()), Toast.LENGTH_SHORT, GB.INFO); GBApplication.deviceService().connect(device, true); finish(); } private void createBond(final GBDeviceCandidate deviceCandidate, int bondingStyle) { if (bondingStyle == DeviceCoordinator.BONDING_STYLE_NONE) { // Do nothing return; } else if (bondingStyle == DeviceCoordinator.BONDING_STYLE_ASK) { new AlertDialog.Builder(this) .setCancelable(true) .setTitle(DiscoveryActivity.this.getString(R.string.discovery_pair_title, deviceCandidate.getName())) .setMessage(DiscoveryActivity.this.getString(R.string.discovery_pair_question)) .setPositiveButton(DiscoveryActivity.this.getString(R.string.discovery_yes_pair), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { doCreatePair(deviceCandidate); } }) .setNegativeButton(R.string.discovery_dont_pair, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { GBDevice device = DeviceHelper.getInstance().toSupportedDevice(deviceCandidate); connectAndFinish(device); } }) .show(); } else { doCreatePair(deviceCandidate); } LOG.debug("Bonding initiated"); } private void doCreatePair(GBDeviceCandidate deviceCandidate) { toast(DiscoveryActivity.this, getString(R.string.discovery_attempting_to_pair, deviceCandidate.getName()), Toast.LENGTH_SHORT, GB.INFO); if (enableCompanionDevicePairing && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { companionDevicePair(deviceCandidate); } else { deviceBond(deviceCandidate); } } private void deviceBond(GBDeviceCandidate deviceCandidate) { if (deviceCandidate.getDevice().createBond()) { // Async, wait for bonding event to finish this activity LOG.info("Bonding in progress..."); bondingDevice = deviceCandidate; } else { toast(DiscoveryActivity.this, getString(R.string.discovery_bonding_failed_immediately, deviceCandidate.getName()), Toast.LENGTH_SHORT, GB.ERROR); } } public void logMessageContent(byte[] value) { if (value != null) { LOG.warn("DATA: " + GB.hexdump(value, 0, value.length)); } } @RequiresApi(Build.VERSION_CODES.O) private void companionDevicePair(final GBDeviceCandidate deviceCandidate) { CompanionDeviceManager deviceManager = getSystemService(CompanionDeviceManager.class); BluetoothDeviceFilter deviceFilter = new BluetoothDeviceFilter.Builder() .setAddress(deviceCandidate.getMacAddress()) .build(); AssociationRequest pairingRequest = new AssociationRequest.Builder() .addDeviceFilter(deviceFilter) .setSingleDevice(true) .build(); deviceManager.associate(pairingRequest, new CompanionDeviceManager.Callback() { @Override public void onFailure(CharSequence error) { toast(DiscoveryActivity.this, getString(R.string.discovery_bonding_failed_immediately, deviceCandidate.getName()), Toast.LENGTH_SHORT, GB.ERROR); } @Override public void onDeviceFound(IntentSender chooserLauncher) { try { startIntentSenderForResult(chooserLauncher, REQUEST_CODE, null, 0, 0, 0); } catch (IntentSender.SendIntentException e) { e.printStackTrace(); } } }, null ); } @RequiresApi(Build.VERSION_CODES.O) @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == REQUEST_CODE && resultCode == Activity.RESULT_OK) { BluetoothDevice deviceToPair = data.getParcelableExtra(CompanionDeviceManager.EXTRA_DEVICE); if (deviceToPair != null) { deviceBond(new GBDeviceCandidate(deviceToPair, (short) 0, null)); handleDeviceBonded(); } } } private void handleDeviceBonded() { if (bondingDevice == null) { LOG.error("deviceCandidate was null! Can't handle bonded device!"); return; } toast(DiscoveryActivity.this, getString(R.string.discovery_successfully_bonded, bondingDevice.getName()), Toast.LENGTH_SHORT, GB.INFO); GBDevice device = DeviceHelper.getInstance().toSupportedDevice(bondingDevice); connectAndFinish(device); } @RequiresApi(Build.VERSION_CODES.LOLLIPOP) private ScanCallback getScanCallback() { if (newBLEScanCallback != null) { return newBLEScanCallback; } newBLEScanCallback = new ScanCallback() { @RequiresApi(Build.VERSION_CODES.LOLLIPOP) @Override public void onScanResult(int callbackType, ScanResult result) { super.onScanResult(callbackType, result); try { ScanRecord scanRecord = result.getScanRecord(); ParcelUuid[] uuids = null; if (scanRecord != null) { //logMessageContent(scanRecord.getBytes()); List<ParcelUuid> serviceUuids = scanRecord.getServiceUuids(); if (serviceUuids != null) { uuids = serviceUuids.toArray(new ParcelUuid[0]); } } LOG.warn(result.getDevice().getName() + ": " + ((scanRecord != null) ? scanRecord.getBytes().length : -1)); handleDeviceFound(result.getDevice(), (short) result.getRssi(), uuids); } catch (NullPointerException e) { LOG.warn("Error handling scan result", e); } } }; return newBLEScanCallback; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Prefs prefs = GBApplication.getPrefs(); ignoreBonded = prefs.getBoolean("ignore_bonded_devices", true); oldBleScanning = prefs.getBoolean("disable_new_ble_scanning", false); if (oldBleScanning) { LOG.info("New BLE scanning disabled via settings, using old method"); } enableCompanionDevicePairing = prefs.getBoolean("enable_companiondevice_pairing", true); if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) { enableCompanionDevicePairing = false; // No support below 26 } setContentView(R.layout.activity_discovery); startButton = findViewById(R.id.discovery_start); startButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { onStartButtonClick(startButton); } }); bluetoothProgress = findViewById(R.id.discovery_progressbar); bluetoothProgress.setProgress(0); bluetoothProgress.setIndeterminate(true); bluetoothProgress.setVisibility(View.GONE); ListView deviceCandidatesView = findViewById(R.id.discovery_device_candidates_list); bluetoothLEProgress = findViewById(R.id.discovery_ble_progressbar); bluetoothLEProgress.setProgress(0); bluetoothLEProgress.setIndeterminate(true); bluetoothLEProgress.setVisibility(View.GONE); deviceCandidateAdapter = new DeviceCandidateAdapter(this, deviceCandidates); deviceCandidatesView.setAdapter(deviceCandidateAdapter); deviceCandidatesView.setOnItemClickListener(this); deviceCandidatesView.setOnItemLongClickListener(this); IntentFilter bluetoothIntents = new IntentFilter(); bluetoothIntents.addAction(BluetoothDevice.ACTION_FOUND); bluetoothIntents.addAction(BluetoothDevice.ACTION_UUID); bluetoothIntents.addAction(BluetoothDevice.ACTION_BOND_STATE_CHANGED); bluetoothIntents.addAction(BluetoothAdapter.ACTION_DISCOVERY_STARTED); bluetoothIntents.addAction(BluetoothAdapter.ACTION_DISCOVERY_FINISHED); bluetoothIntents.addAction(BluetoothAdapter.ACTION_STATE_CHANGED); registerReceiver(bluetoothReceiver, bluetoothIntents); checkAndRequestLocationPermission(); startDiscovery(Scanning.SCANNING_BT_NEXT_BLE); } public void onStartButtonClick(View button) { LOG.debug("Start button clicked"); if (isScanning()) { stopDiscovery(); } else { if (GB.supportsBluetoothLE()) { startDiscovery(Scanning.SCANNING_BT_NEXT_BLE); } else { startDiscovery(Scanning.SCANNING_BT); } } } @Override protected void onSaveInstanceState(@NonNull Bundle outState) { super.onSaveInstanceState(outState); outState.putParcelableArrayList("deviceCandidates", deviceCandidates); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); ArrayList<Parcelable> restoredCandidates = savedInstanceState.getParcelableArrayList("deviceCandidates"); if (restoredCandidates != null) { deviceCandidates.clear(); for (Parcelable p : restoredCandidates) { deviceCandidates.add((GBDeviceCandidate) p); } } } @Override protected void onDestroy() { try { unregisterReceiver(bluetoothReceiver); } catch (IllegalArgumentException e) { LOG.warn("Tried to unregister Bluetooth Receiver that wasn't registered"); LOG.warn(e.getMessage()); } super.onDestroy(); } private void handleDeviceFound(BluetoothDevice device, short rssi) { if (device.getName() != null) { if (handleDeviceFound(device, rssi, null)) { LOG.info("found supported device " + device.getName() + " without scanning services, skipping service scan."); return; } } ParcelUuid[] uuids = device.getUuids(); if (uuids == null) { if (device.fetchUuidsWithSdp()) { return; } } handleDeviceFound(device, rssi, uuids); } private boolean handleDeviceFound(BluetoothDevice device, short rssi, ParcelUuid[] uuids) { LOG.debug("found device: " + device.getName() + ", " + device.getAddress()); if (LOG.isDebugEnabled()) { if (uuids != null && uuids.length > 0) { for (ParcelUuid uuid : uuids) { LOG.debug(" supports uuid: " + uuid.toString()); } } } if (device.getBondState() == BluetoothDevice.BOND_BONDED && ignoreBonded) { return true; // Ignore already bonded devices } GBDeviceCandidate candidate = new GBDeviceCandidate(device, rssi, uuids); DeviceType deviceType = DeviceHelper.getInstance().getSupportedType(candidate); if (deviceType.isSupported()) { candidate.setDeviceType(deviceType); LOG.info("Recognized supported device: " + candidate); int index = deviceCandidates.indexOf(candidate); if (index >= 0) { deviceCandidates.set(index, candidate); // replace } else { deviceCandidates.add(candidate); } deviceCandidateAdapter.notifyDataSetChanged(); return true; } return false; } private void startDiscovery(Scanning what) { if (isScanning()) { LOG.warn("Not starting discovery, because already scanning."); return; } LOG.info("Starting discovery: " + what); startButton.setText(getString(R.string.discovery_stop_scanning)); if (ensureBluetoothReady() && isScanning == Scanning.SCANNING_OFF) { if (what == Scanning.SCANNING_BT || what == Scanning.SCANNING_BT_NEXT_BLE) { startBTDiscovery(what); } else if (what == Scanning.SCANNING_BLE && GB.supportsBluetoothLE()) { if (oldBleScanning || Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { startOldBTLEDiscovery(); } else { startBTLEDiscovery(); } } else { discoveryFinished(); toast(DiscoveryActivity.this, getString(R.string.discovery_enable_bluetooth), Toast.LENGTH_SHORT, GB.ERROR); } } else { discoveryFinished(); toast(DiscoveryActivity.this, getString(R.string.discovery_enable_bluetooth), Toast.LENGTH_SHORT, GB.ERROR); } } private void stopDiscovery() { LOG.info("Stopping discovery"); if (isScanning()) { Scanning wasScanning = isScanning; if (wasScanning == Scanning.SCANNING_BT || wasScanning == Scanning.SCANNING_BT_NEXT_BLE) { stopBTDiscovery(); } else if (wasScanning == Scanning.SCANNING_BLE) { if (oldBleScanning || Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { stopOldBLEDiscovery(); } else { stopBLEDiscovery(); } } discoveryFinished(); handler.removeMessages(0, stopRunnable); } else { discoveryFinished(); } } private boolean isScanning() { return isScanning != Scanning.SCANNING_OFF; } private void startOldBTLEDiscovery() { LOG.info("Starting old BLE discovery"); setIsScanning(Scanning.SCANNING_BLE); handler.removeMessages(0, stopRunnable); handler.sendMessageDelayed(getPostMessage(stopRunnable), SCAN_DURATION); adapter.startLeScan(leScanCallback); bluetoothLEProgress.setVisibility(View.VISIBLE); } private void stopOldBLEDiscovery() { if (adapter != null) { adapter.stopLeScan(leScanCallback); setIsScanning(Scanning.SCANNING_OFF); LOG.info("Stopped old BLE discovery"); } bluetoothLEProgress.setVisibility(View.GONE); } /* New BTLE Discovery uses startScan (List<ScanFilter> filters, ScanSettings settings, ScanCallback callback) */ @RequiresApi(Build.VERSION_CODES.LOLLIPOP) private void startBTLEDiscovery() { LOG.info("Starting BLE discovery"); setIsScanning(Scanning.SCANNING_BLE); handler.removeMessages(0, stopRunnable); handler.sendMessageDelayed(getPostMessage(stopRunnable), SCAN_DURATION); // Filters being non-null would be a very good idea with background scan, but in this case, // not really required. adapter.getBluetoothLeScanner().startScan(null, getScanSettings(), getScanCallback()); bluetoothLEProgress.setVisibility(View.VISIBLE); } @RequiresApi(Build.VERSION_CODES.LOLLIPOP) private void stopBLEDiscovery() { if (adapter == null) { return; } BluetoothLeScanner bluetoothLeScanner = adapter.getBluetoothLeScanner(); if (bluetoothLeScanner == null) { LOG.warn("Could not get BluetoothLeScanner()!"); return; } if (newBLEScanCallback == null) { LOG.warn("newLeScanCallback == null!"); return; } try { bluetoothLeScanner.stopScan(newBLEScanCallback); } catch (NullPointerException e) { LOG.warn("Internal NullPointerException when stopping the scan!"); return; } bluetoothLEProgress.setVisibility(View.GONE); setIsScanning(Scanning.SCANNING_OFF); LOG.debug("Stopped BLE discovery"); } /** * Starts a regular Bluetooth scan * * @param what The scan type, only either SCANNING_BT or SCANNING_BT_NEXT_BLE! */ private void startBTDiscovery(Scanning what) { LOG.info("Starting BT discovery"); handler.removeMessages(0, stopRunnable); handler.sendMessageDelayed(getPostMessage(stopRunnable), SCAN_DURATION); if (adapter.startDiscovery()) { LOG.debug("Discovery starting successful"); bluetoothProgress.setVisibility(View.VISIBLE); setIsScanning(what); } else { LOG.error("Discovery starting failed"); bluetoothProgress.setVisibility(View.GONE); setIsScanning(Scanning.SCANNING_OFF); } } private void stopBTDiscovery() { if (adapter != null) { adapter.cancelDiscovery(); bluetoothProgress.setVisibility(View.GONE); setIsScanning(Scanning.SCANNING_OFF); LOG.info("Stopped BT discovery"); } } private void discoveryFinished() { if (isScanning != Scanning.SCANNING_OFF) { LOG.warn("Scan was not properly stopped: " + isScanning); } setIsScanning(Scanning.SCANNING_OFF); } private void setIsScanning(Scanning to) { this.isScanning = to; if (isScanning == Scanning.SCANNING_OFF) { startButton.setText(getString(R.string.discovery_start_scanning)); } else { startButton.setText(getString(R.string.discovery_stop_scanning)); } } private void bluetoothStateChanged(int newState) { if (newState == BluetoothAdapter.STATE_ON) { this.adapter = BluetoothAdapter.getDefaultAdapter(); startButton.setEnabled(true); } else { this.adapter = null; startButton.setEnabled(false); } discoveryFinished(); } private boolean checkBluetoothAvailable() { BluetoothManager bluetoothService = (BluetoothManager) getSystemService(BLUETOOTH_SERVICE); if (bluetoothService == null) { LOG.warn("No bluetooth service available"); this.adapter = null; return false; } BluetoothAdapter adapter = bluetoothService.getAdapter(); if (adapter == null) { LOG.warn("No bluetooth adapter available"); this.adapter = null; return false; } if (!adapter.isEnabled()) { LOG.warn("Bluetooth not enabled"); Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE); startActivity(enableBtIntent); this.adapter = null; return false; } this.adapter = adapter; return true; } private boolean ensureBluetoothReady() { boolean available = checkBluetoothAvailable(); startButton.setEnabled(available); if (available) { adapter.cancelDiscovery(); // must not return the result of cancelDiscovery() // appears to return false when currently not scanning return true; } return false; } @RequiresApi(Build.VERSION_CODES.LOLLIPOP) private ScanSettings getScanSettings() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { return new ScanSettings.Builder() .setCallbackType(android.bluetooth.le.ScanSettings.CALLBACK_TYPE_ALL_MATCHES) .setScanMode(android.bluetooth.le.ScanSettings.SCAN_MODE_LOW_LATENCY) .setMatchMode(android.bluetooth.le.ScanSettings.MATCH_MODE_AGGRESSIVE) .setPhy(android.bluetooth.le.ScanSettings.PHY_LE_ALL_SUPPORTED) .setNumOfMatches(android.bluetooth.le.ScanSettings.MATCH_NUM_ONE_ADVERTISEMENT) .build(); } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { return new ScanSettings.Builder() .setCallbackType(android.bluetooth.le.ScanSettings.CALLBACK_TYPE_ALL_MATCHES) .setScanMode(android.bluetooth.le.ScanSettings.SCAN_MODE_LOW_LATENCY) .setMatchMode(android.bluetooth.le.ScanSettings.MATCH_MODE_AGGRESSIVE) .setNumOfMatches(android.bluetooth.le.ScanSettings.MATCH_NUM_ONE_ADVERTISEMENT) .build(); } else { return new ScanSettings.Builder() .setScanMode(android.bluetooth.le.ScanSettings.SCAN_MODE_LOW_LATENCY) .build(); } } private List<ScanFilter> getScanFilters() { List<ScanFilter> allFilters = new ArrayList<>(); for (DeviceCoordinator coordinator : DeviceHelper.getInstance().getAllCoordinators()) { allFilters.addAll(coordinator.createBLEScanFilters()); } return allFilters; } private Message getPostMessage(Runnable runnable) { Message message = Message.obtain(handler, runnable); message.obj = runnable; return message; } private void checkAndRequestLocationPermission() { if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) { LOG.error("No permission to access coarse location!"); toast(DiscoveryActivity.this, getString(R.string.error_no_location_access), Toast.LENGTH_SHORT, GB.ERROR); ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.ACCESS_COARSE_LOCATION}, 0); } if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED) { LOG.error("No permission to access fine location!"); toast(DiscoveryActivity.this, getString(R.string.error_no_location_access), Toast.LENGTH_SHORT, GB.ERROR); ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.ACCESS_FINE_LOCATION}, 0); } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.ACCESS_BACKGROUND_LOCATION) != PackageManager.PERMISSION_GRANTED) { LOG.error("No permission to access background location!"); toast(DiscoveryActivity.this, getString(R.string.error_no_location_access), Toast.LENGTH_SHORT, GB.ERROR); ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.ACCESS_BACKGROUND_LOCATION}, 0); } } LocationManager locationManager = (LocationManager) DiscoveryActivity.this.getSystemService(Context.LOCATION_SERVICE); try { if (locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER) || locationManager.isProviderEnabled(LocationManager.NETWORK_PROVIDER)) { // Do nothing LOG.debug("Some location provider is enabled, assuming location is enabled"); } else { toast(DiscoveryActivity.this, getString(R.string.require_location_provider), Toast.LENGTH_LONG, GB.ERROR); DiscoveryActivity.this.startActivity(new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS)); // We can't be sure location was enabled, cancel scan start and wait for new user action toast(DiscoveryActivity.this, getString(R.string.error_location_enabled_mandatory), Toast.LENGTH_SHORT, GB.ERROR); return; } } catch (Exception ex) { LOG.error("Exception when checking location status: ", ex); } } @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { GBDeviceCandidate deviceCandidate = deviceCandidates.get(position); if (deviceCandidate == null) { LOG.error("Device candidate clicked, but item not found"); return; } stopDiscovery(); DeviceCoordinator coordinator = DeviceHelper.getInstance().getCoordinator(deviceCandidate); LOG.info("Using device candidate " + deviceCandidate + " with coordinator: " + coordinator.getClass()); if (coordinator.getBondingStyle() == DeviceCoordinator.BONDING_STYLE_REQUIRE_KEY) { SharedPreferences sharedPrefs = GBApplication.getDeviceSpecificSharedPrefs(deviceCandidate.getMacAddress()); String authKey = sharedPrefs.getString("authkey", null); if (authKey == null || authKey.isEmpty() || authKey.getBytes().length < 34 || !authKey.startsWith("0x")) { toast(DiscoveryActivity.this, getString(R.string.discovery_need_to_enter_authkey), Toast.LENGTH_LONG, GB.WARN); return; } } Class<? extends Activity> pairingActivity = coordinator.getPairingActivity(); if (pairingActivity != null) { Intent intent = new Intent(this, pairingActivity); intent.putExtra(DeviceCoordinator.EXTRA_DEVICE_CANDIDATE, deviceCandidate); startActivity(intent); } else { GBDevice device = DeviceHelper.getInstance().toSupportedDevice(deviceCandidate); int bondingStyle = coordinator.getBondingStyle(); if (bondingStyle == DeviceCoordinator.BONDING_STYLE_NONE) { LOG.info("No bonding needed, according to coordinator, so connecting right away"); connectAndFinish(device); return; } try { BluetoothDevice btDevice = adapter.getRemoteDevice(deviceCandidate.getMacAddress()); switch (btDevice.getBondState()) { case BluetoothDevice.BOND_NONE: { createBond(deviceCandidate, bondingStyle); break; } case BluetoothDevice.BOND_BONDING: { // async, wait for bonding event to finish this activity bondingDevice = deviceCandidate; break; } case BluetoothDevice.BOND_BONDED: { bondingDevice = deviceCandidate; handleDeviceBonded(); break; } } } catch (Exception e) { LOG.error("Error pairing device: " + deviceCandidate.getMacAddress()); } } } @Override public boolean onItemLongClick(AdapterView<?> adapterView, View view, int position, long id) { GBDeviceCandidate deviceCandidate = deviceCandidates.get(position); if (deviceCandidate == null) { LOG.error("Device candidate clicked, but item not found"); return true; } DeviceCoordinator coordinator = DeviceHelper.getInstance().getCoordinator(deviceCandidate); GBDevice device = DeviceHelper.getInstance().toSupportedDevice(deviceCandidate); if (coordinator.getSupportedDeviceSpecificSettings(device) == null) { return true; } Intent startIntent; startIntent = new Intent(this, DeviceSettingsActivity.class); startIntent.putExtra(GBDevice.EXTRA_DEVICE, device); startActivity(startIntent); return true; } @Override protected void onPause() { super.onPause(); stopBTDiscovery(); if (oldBleScanning) { stopOldBLEDiscovery(); } else { if (GBApplication.isRunningLollipopOrLater()) { stopBLEDiscovery(); } } } private enum Scanning { /** * Regular Bluetooth scan */ SCANNING_BT, /** * Regular Bluetooth scan but when ends, start BLE scan */ SCANNING_BT_NEXT_BLE, /** * Regular BLE scan */ SCANNING_BLE, /** * Scanning has ended or hasn't been started */ SCANNING_OFF } }
app/src/main/java/nodomain/freeyourgadget/gadgetbridge/activities/DiscoveryActivity.java
/* Copyright (C) 2015-2020 Andreas Shimokawa, boun, Carsten Pfeiffer, Daniel Dakhno, Daniele Gobbetti, JohnnySun, jonnsoft, Lem Dulfo, Taavi Eomäe, Uwe Hermann This file is part of Gadgetbridge. Gadgetbridge is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Gadgetbridge is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package nodomain.freeyourgadget.gadgetbridge.activities; import android.Manifest; import android.app.Activity; import android.app.AlertDialog; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothManager; import android.bluetooth.le.BluetoothLeScanner; import android.bluetooth.le.ScanCallback; import android.bluetooth.le.ScanFilter; import android.bluetooth.le.ScanRecord; import android.bluetooth.le.ScanResult; import android.bluetooth.le.ScanSettings; import android.companion.AssociationRequest; import android.companion.BluetoothDeviceFilter; import android.companion.CompanionDeviceManager; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.content.IntentSender; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.location.LocationManager; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.os.ParcelUuid; import android.os.Parcelable; import android.provider.Settings; import android.view.View; import android.widget.AdapterView; import android.widget.Button; import android.widget.ListView; import android.widget.ProgressBar; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.annotation.RequiresApi; import androidx.core.app.ActivityCompat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; import java.util.Objects; import nodomain.freeyourgadget.gadgetbridge.GBApplication; import nodomain.freeyourgadget.gadgetbridge.R; import nodomain.freeyourgadget.gadgetbridge.activities.devicesettings.DeviceSettingsActivity; import nodomain.freeyourgadget.gadgetbridge.adapter.DeviceCandidateAdapter; import nodomain.freeyourgadget.gadgetbridge.devices.DeviceCoordinator; import nodomain.freeyourgadget.gadgetbridge.impl.GBDevice; import nodomain.freeyourgadget.gadgetbridge.impl.GBDeviceCandidate; import nodomain.freeyourgadget.gadgetbridge.model.DeviceType; import nodomain.freeyourgadget.gadgetbridge.util.AndroidUtils; import nodomain.freeyourgadget.gadgetbridge.util.DeviceHelper; import nodomain.freeyourgadget.gadgetbridge.util.GB; import nodomain.freeyourgadget.gadgetbridge.util.Prefs; import static nodomain.freeyourgadget.gadgetbridge.util.GB.toast; public class DiscoveryActivity extends AbstractGBActivity implements AdapterView.OnItemClickListener, AdapterView.OnItemLongClickListener { private static final Logger LOG = LoggerFactory.getLogger(DiscoveryActivity.class); private static final long SCAN_DURATION = 30000; // 30s private static final int REQUEST_CODE = 1; private final Handler handler = new Handler(); private final ArrayList<GBDeviceCandidate> deviceCandidates = new ArrayList<>(); private ScanCallback newBLEScanCallback = null; /** * Use old BLE scanning **/ private boolean oldBleScanning = false; /** * If already bonded devices are to be ignored when scanning */ private boolean ignoreBonded = true; /** * If new CompanionDevice-type pairing is enabled on newer Androids **/ private boolean enableCompanionDevicePairing = false; private ProgressBar bluetoothProgress; private ProgressBar bluetoothLEProgress; private DeviceCandidateAdapter deviceCandidateAdapter; private final BluetoothAdapter.LeScanCallback leScanCallback = new BluetoothAdapter.LeScanCallback() { @Override public void onLeScan(BluetoothDevice device, int rssi, byte[] scanRecord) { //logMessageContent(scanRecord); handleDeviceFound(device, (short) rssi); } }; private BluetoothAdapter adapter; private Button startButton; private Scanning isScanning = Scanning.SCANNING_OFF; private final Runnable stopRunnable = new Runnable() { @Override public void run() { if (isScanning == Scanning.SCANNING_BT_NEXT_BLE) { // Start the next scan in the series stopDiscovery(); startDiscovery(Scanning.SCANNING_BLE); } else { stopDiscovery(); } } }; private GBDeviceCandidate bondingDevice; private final BroadcastReceiver bluetoothReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { switch (Objects.requireNonNull(intent.getAction())) { case BluetoothAdapter.ACTION_DISCOVERY_STARTED: if (isScanning != Scanning.SCANNING_BLE) { if (isScanning != Scanning.SCANNING_BT_NEXT_BLE) { setIsScanning(Scanning.SCANNING_BT); } startButton.setText(getString(R.string.discovery_stop_scanning)); } break; case BluetoothAdapter.ACTION_DISCOVERY_FINISHED: handler.post(new Runnable() { @Override public void run() { // continue with LE scan, if available if (isScanning == Scanning.SCANNING_BT || isScanning == Scanning.SCANNING_BT_NEXT_BLE) { checkAndRequestLocationPermission(); stopDiscovery(); startDiscovery(Scanning.SCANNING_BLE); } else { discoveryFinished(); } } }); break; case BluetoothAdapter.ACTION_STATE_CHANGED: int newState = intent.getIntExtra(BluetoothAdapter.EXTRA_STATE, BluetoothAdapter.STATE_OFF); bluetoothStateChanged(newState); break; case BluetoothDevice.ACTION_FOUND: { BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); short rssi = intent.getShortExtra(BluetoothDevice.EXTRA_RSSI, GBDevice.RSSI_UNKNOWN); handleDeviceFound(device, rssi); break; } case BluetoothDevice.ACTION_UUID: { BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); short rssi = intent.getShortExtra(BluetoothDevice.EXTRA_RSSI, GBDevice.RSSI_UNKNOWN); Parcelable[] uuids = intent.getParcelableArrayExtra(BluetoothDevice.EXTRA_UUID); ParcelUuid[] uuids2 = AndroidUtils.toParcelUuids(uuids); handleDeviceFound(device, rssi, uuids2); break; } case BluetoothDevice.ACTION_BOND_STATE_CHANGED: { BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); if (device != null && bondingDevice != null && device.getAddress().equals(bondingDevice.getMacAddress())) { int bondState = intent.getIntExtra(BluetoothDevice.EXTRA_BOND_STATE, BluetoothDevice.BOND_NONE); if (bondState == BluetoothDevice.BOND_BONDED) { handleDeviceBonded(); } } } } } }; private void connectAndFinish(GBDevice device) { toast(DiscoveryActivity.this, getString(R.string.discovery_trying_to_connect_to, device.getName()), Toast.LENGTH_SHORT, GB.INFO); GBApplication.deviceService().connect(device, true); finish(); } private void createBond(final GBDeviceCandidate deviceCandidate, int bondingStyle) { if (bondingStyle == DeviceCoordinator.BONDING_STYLE_NONE) { // Do nothing return; } else if (bondingStyle == DeviceCoordinator.BONDING_STYLE_ASK) { new AlertDialog.Builder(this) .setCancelable(true) .setTitle(DiscoveryActivity.this.getString(R.string.discovery_pair_title, deviceCandidate.getName())) .setMessage(DiscoveryActivity.this.getString(R.string.discovery_pair_question)) .setPositiveButton(DiscoveryActivity.this.getString(R.string.discovery_yes_pair), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { doCreatePair(deviceCandidate); } }) .setNegativeButton(R.string.discovery_dont_pair, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { GBDevice device = DeviceHelper.getInstance().toSupportedDevice(deviceCandidate); connectAndFinish(device); } }) .show(); } else { doCreatePair(deviceCandidate); } LOG.debug("Bonding initiated"); } private void doCreatePair(GBDeviceCandidate deviceCandidate) { toast(DiscoveryActivity.this, getString(R.string.discovery_attempting_to_pair, deviceCandidate.getName()), Toast.LENGTH_SHORT, GB.INFO); if (enableCompanionDevicePairing && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { companionDevicePair(deviceCandidate); } else { deviceBond(deviceCandidate); } } private void deviceBond(GBDeviceCandidate deviceCandidate) { if (deviceCandidate.getDevice().createBond()) { // Async, wait for bonding event to finish this activity LOG.info("Bonding in progress..."); bondingDevice = deviceCandidate; } else { toast(DiscoveryActivity.this, getString(R.string.discovery_bonding_failed_immediately, deviceCandidate.getName()), Toast.LENGTH_SHORT, GB.ERROR); } } public void logMessageContent(byte[] value) { if (value != null) { LOG.warn("DATA: " + GB.hexdump(value, 0, value.length)); } } @RequiresApi(Build.VERSION_CODES.O) private void companionDevicePair(final GBDeviceCandidate deviceCandidate) { CompanionDeviceManager deviceManager = getSystemService(CompanionDeviceManager.class); BluetoothDeviceFilter deviceFilter = new BluetoothDeviceFilter.Builder() .setAddress(deviceCandidate.getMacAddress()) .build(); AssociationRequest pairingRequest = new AssociationRequest.Builder() .addDeviceFilter(deviceFilter) .setSingleDevice(true) .build(); deviceManager.associate(pairingRequest, new CompanionDeviceManager.Callback() { @Override public void onFailure(CharSequence error) { toast(DiscoveryActivity.this, getString(R.string.discovery_bonding_failed_immediately, deviceCandidate.getName()), Toast.LENGTH_SHORT, GB.ERROR); } @Override public void onDeviceFound(IntentSender chooserLauncher) { try { startIntentSenderForResult(chooserLauncher, REQUEST_CODE, null, 0, 0, 0); } catch (IntentSender.SendIntentException e) { e.printStackTrace(); } } }, null ); } @RequiresApi(Build.VERSION_CODES.O) @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == REQUEST_CODE && resultCode == Activity.RESULT_OK) { BluetoothDevice deviceToPair = data.getParcelableExtra(CompanionDeviceManager.EXTRA_DEVICE); if (deviceToPair != null) { deviceBond(new GBDeviceCandidate(deviceToPair, (short) 0, null)); handleDeviceBonded(); } } } private void handleDeviceBonded() { if (bondingDevice == null) { LOG.error("deviceCandidate was null! Can't handle bonded device!"); return; } toast(DiscoveryActivity.this, getString(R.string.discovery_successfully_bonded, bondingDevice.getName()), Toast.LENGTH_SHORT, GB.INFO); GBDevice device = DeviceHelper.getInstance().toSupportedDevice(bondingDevice); connectAndFinish(device); } @RequiresApi(Build.VERSION_CODES.LOLLIPOP) private ScanCallback getScanCallback() { if (newBLEScanCallback != null) { return newBLEScanCallback; } newBLEScanCallback = new ScanCallback() { @RequiresApi(Build.VERSION_CODES.LOLLIPOP) @Override public void onScanResult(int callbackType, ScanResult result) { super.onScanResult(callbackType, result); try { ScanRecord scanRecord = result.getScanRecord(); ParcelUuid[] uuids = null; if (scanRecord != null) { //logMessageContent(scanRecord.getBytes()); List<ParcelUuid> serviceUuids = scanRecord.getServiceUuids(); if (serviceUuids != null) { uuids = serviceUuids.toArray(new ParcelUuid[0]); } } LOG.warn(result.getDevice().getName() + ": " + ((scanRecord != null) ? scanRecord.getBytes().length : -1)); handleDeviceFound(result.getDevice(), (short) result.getRssi(), uuids); } catch (NullPointerException e) { LOG.warn("Error handling scan result", e); } } }; return newBLEScanCallback; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Prefs prefs = GBApplication.getPrefs(); ignoreBonded = prefs.getBoolean("ignore_bonded_devices", true); oldBleScanning = prefs.getBoolean("disable_new_ble_scanning", false); if (oldBleScanning) { LOG.info("New BLE scanning disabled via settings, using old method"); } enableCompanionDevicePairing = prefs.getBoolean("enable_companiondevice_pairing", true); if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) { enableCompanionDevicePairing = false; // No support below 26 } setContentView(R.layout.activity_discovery); startButton = findViewById(R.id.discovery_start); startButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { onStartButtonClick(startButton); } }); bluetoothProgress = findViewById(R.id.discovery_progressbar); bluetoothProgress.setProgress(0); bluetoothProgress.setIndeterminate(true); bluetoothProgress.setVisibility(View.GONE); ListView deviceCandidatesView = findViewById(R.id.discovery_device_candidates_list); bluetoothLEProgress = findViewById(R.id.discovery_ble_progressbar); bluetoothLEProgress.setProgress(0); bluetoothLEProgress.setIndeterminate(true); bluetoothLEProgress.setVisibility(View.GONE); deviceCandidateAdapter = new DeviceCandidateAdapter(this, deviceCandidates); deviceCandidatesView.setAdapter(deviceCandidateAdapter); deviceCandidatesView.setOnItemClickListener(this); deviceCandidatesView.setOnItemLongClickListener(this); IntentFilter bluetoothIntents = new IntentFilter(); bluetoothIntents.addAction(BluetoothDevice.ACTION_FOUND); bluetoothIntents.addAction(BluetoothDevice.ACTION_UUID); bluetoothIntents.addAction(BluetoothDevice.ACTION_BOND_STATE_CHANGED); bluetoothIntents.addAction(BluetoothAdapter.ACTION_DISCOVERY_STARTED); bluetoothIntents.addAction(BluetoothAdapter.ACTION_DISCOVERY_FINISHED); bluetoothIntents.addAction(BluetoothAdapter.ACTION_STATE_CHANGED); registerReceiver(bluetoothReceiver, bluetoothIntents); checkAndRequestLocationPermission(); startDiscovery(Scanning.SCANNING_BT_NEXT_BLE); } public void onStartButtonClick(View button) { LOG.debug("Start button clicked"); if (isScanning()) { stopDiscovery(); } else { if (GB.supportsBluetoothLE()) { startDiscovery(Scanning.SCANNING_BT_NEXT_BLE); } else { startDiscovery(Scanning.SCANNING_BT); } } } @Override protected void onSaveInstanceState(@NonNull Bundle outState) { super.onSaveInstanceState(outState); outState.putParcelableArrayList("deviceCandidates", deviceCandidates); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); ArrayList<Parcelable> restoredCandidates = savedInstanceState.getParcelableArrayList("deviceCandidates"); if (restoredCandidates != null) { deviceCandidates.clear(); for (Parcelable p : restoredCandidates) { deviceCandidates.add((GBDeviceCandidate) p); } } } @Override protected void onDestroy() { try { unregisterReceiver(bluetoothReceiver); } catch (IllegalArgumentException e) { LOG.warn("Tried to unregister Bluetooth Receiver that wasn't registered"); LOG.warn(e.getMessage()); } super.onDestroy(); } private void handleDeviceFound(BluetoothDevice device, short rssi) { if (device.getName() != null) { if (handleDeviceFound(device, rssi, null)) { LOG.info("found supported device " + device.getName() + " without scanning services, skipping service scan."); return; } } ParcelUuid[] uuids = device.getUuids(); if (uuids == null) { if (device.fetchUuidsWithSdp()) { return; } } handleDeviceFound(device, rssi, uuids); } private boolean handleDeviceFound(BluetoothDevice device, short rssi, ParcelUuid[] uuids) { LOG.debug("found device: " + device.getName() + ", " + device.getAddress()); if (LOG.isDebugEnabled()) { if (uuids != null && uuids.length > 0) { for (ParcelUuid uuid : uuids) { LOG.debug(" supports uuid: " + uuid.toString()); } } } if (device.getBondState() == BluetoothDevice.BOND_BONDED && ignoreBonded) { return true; // Ignore already bonded devices } GBDeviceCandidate candidate = new GBDeviceCandidate(device, rssi, uuids); DeviceType deviceType = DeviceHelper.getInstance().getSupportedType(candidate); if (deviceType.isSupported()) { candidate.setDeviceType(deviceType); LOG.info("Recognized supported device: " + candidate); int index = deviceCandidates.indexOf(candidate); if (index >= 0) { deviceCandidates.set(index, candidate); // replace } else { deviceCandidates.add(candidate); } deviceCandidateAdapter.notifyDataSetChanged(); return true; } return false; } private void startDiscovery(Scanning what) { if (isScanning()) { LOG.warn("Not starting discovery, because already scanning."); return; } LOG.info("Starting discovery: " + what); startButton.setText(getString(R.string.discovery_stop_scanning)); if (ensureBluetoothReady() && isScanning == Scanning.SCANNING_OFF) { if (what == Scanning.SCANNING_BT || what == Scanning.SCANNING_BT_NEXT_BLE) { startBTDiscovery(what); } else if (what == Scanning.SCANNING_BLE && GB.supportsBluetoothLE()) { if (oldBleScanning || Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { startOldBTLEDiscovery(); } else { startBTLEDiscovery(); } } else { discoveryFinished(); toast(DiscoveryActivity.this, getString(R.string.discovery_enable_bluetooth), Toast.LENGTH_SHORT, GB.ERROR); } } else { discoveryFinished(); toast(DiscoveryActivity.this, getString(R.string.discovery_enable_bluetooth), Toast.LENGTH_SHORT, GB.ERROR); } } private void stopDiscovery() { LOG.info("Stopping discovery"); if (isScanning()) { Scanning wasScanning = isScanning; if (wasScanning == Scanning.SCANNING_BT || wasScanning == Scanning.SCANNING_BT_NEXT_BLE) { stopBTDiscovery(); } else if (wasScanning == Scanning.SCANNING_BLE) { if (oldBleScanning || Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { stopOldBLEDiscovery(); } else { stopBLEDiscovery(); } } discoveryFinished(); handler.removeMessages(0, stopRunnable); } else { discoveryFinished(); } } private boolean isScanning() { return isScanning != Scanning.SCANNING_OFF; } private void startOldBTLEDiscovery() { LOG.info("Starting old BLE discovery"); setIsScanning(Scanning.SCANNING_BLE); handler.removeMessages(0, stopRunnable); handler.sendMessageDelayed(getPostMessage(stopRunnable), SCAN_DURATION); adapter.startLeScan(leScanCallback); bluetoothLEProgress.setVisibility(View.VISIBLE); } private void stopOldBLEDiscovery() { if (adapter != null) { adapter.stopLeScan(leScanCallback); setIsScanning(Scanning.SCANNING_OFF); LOG.info("Stopped old BLE discovery"); } bluetoothLEProgress.setVisibility(View.GONE); } /* New BTLE Discovery uses startScan (List<ScanFilter> filters, ScanSettings settings, ScanCallback callback) */ @RequiresApi(Build.VERSION_CODES.LOLLIPOP) private void startBTLEDiscovery() { LOG.info("Starting BLE discovery"); setIsScanning(Scanning.SCANNING_BLE); handler.removeMessages(0, stopRunnable); handler.sendMessageDelayed(getPostMessage(stopRunnable), SCAN_DURATION); // Filters being non-null would be a very good idea with background scan, but in this case, // not really required. adapter.getBluetoothLeScanner().startScan(null, getScanSettings(), getScanCallback()); bluetoothLEProgress.setVisibility(View.VISIBLE); } @RequiresApi(Build.VERSION_CODES.LOLLIPOP) private void stopBLEDiscovery() { if (adapter == null) { return; } BluetoothLeScanner bluetoothLeScanner = adapter.getBluetoothLeScanner(); if (bluetoothLeScanner == null) { LOG.warn("Could not get BluetoothLeScanner()!"); return; } if (newBLEScanCallback == null) { LOG.warn("newLeScanCallback == null!"); return; } try { bluetoothLeScanner.stopScan(newBLEScanCallback); } catch (NullPointerException e) { LOG.warn("Internal NullPointerException when stopping the scan!"); return; } bluetoothLEProgress.setVisibility(View.GONE); setIsScanning(Scanning.SCANNING_OFF); LOG.debug("Stopped BLE discovery"); } /** * Starts a regular Bluetooth scan * * @param what The scan type, only either SCANNING_BT or SCANNING_BT_NEXT_BLE! */ private void startBTDiscovery(Scanning what) { LOG.info("Starting BT discovery"); setIsScanning(what); handler.removeMessages(0, stopRunnable); handler.sendMessageDelayed(getPostMessage(stopRunnable), SCAN_DURATION); if (adapter.startDiscovery()) { LOG.error("Discovery starting failed"); } bluetoothProgress.setVisibility(View.VISIBLE); } private void stopBTDiscovery() { if (adapter != null) { adapter.cancelDiscovery(); bluetoothProgress.setVisibility(View.GONE); setIsScanning(Scanning.SCANNING_OFF); LOG.info("Stopped BT discovery"); } } private void discoveryFinished() { if (isScanning != Scanning.SCANNING_OFF) { LOG.warn("Scan was not properly stopped: " + isScanning); } setIsScanning(Scanning.SCANNING_OFF); } private void setIsScanning(Scanning to) { this.isScanning = to; if (isScanning == Scanning.SCANNING_OFF) { startButton.setText(getString(R.string.discovery_start_scanning)); } else { startButton.setText(getString(R.string.discovery_stop_scanning)); } } private void bluetoothStateChanged(int newState) { if (newState == BluetoothAdapter.STATE_ON) { this.adapter = BluetoothAdapter.getDefaultAdapter(); startButton.setEnabled(true); } else { this.adapter = null; startButton.setEnabled(false); } discoveryFinished(); } private boolean checkBluetoothAvailable() { BluetoothManager bluetoothService = (BluetoothManager) getSystemService(BLUETOOTH_SERVICE); if (bluetoothService == null) { LOG.warn("No bluetooth service available"); this.adapter = null; return false; } BluetoothAdapter adapter = bluetoothService.getAdapter(); if (adapter == null) { LOG.warn("No bluetooth adapter available"); this.adapter = null; return false; } if (!adapter.isEnabled()) { LOG.warn("Bluetooth not enabled"); Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE); startActivity(enableBtIntent); this.adapter = null; return false; } this.adapter = adapter; return true; } private boolean ensureBluetoothReady() { boolean available = checkBluetoothAvailable(); startButton.setEnabled(available); if (available) { adapter.cancelDiscovery(); // must not return the result of cancelDiscovery() // appears to return false when currently not scanning return true; } return false; } @RequiresApi(Build.VERSION_CODES.LOLLIPOP) private ScanSettings getScanSettings() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { return new ScanSettings.Builder() .setCallbackType(android.bluetooth.le.ScanSettings.CALLBACK_TYPE_ALL_MATCHES) .setScanMode(android.bluetooth.le.ScanSettings.SCAN_MODE_LOW_LATENCY) .setMatchMode(android.bluetooth.le.ScanSettings.MATCH_MODE_AGGRESSIVE) .setPhy(android.bluetooth.le.ScanSettings.PHY_LE_ALL_SUPPORTED) .setNumOfMatches(android.bluetooth.le.ScanSettings.MATCH_NUM_ONE_ADVERTISEMENT) .build(); } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { return new ScanSettings.Builder() .setCallbackType(android.bluetooth.le.ScanSettings.CALLBACK_TYPE_ALL_MATCHES) .setScanMode(android.bluetooth.le.ScanSettings.SCAN_MODE_LOW_LATENCY) .setMatchMode(android.bluetooth.le.ScanSettings.MATCH_MODE_AGGRESSIVE) .setNumOfMatches(android.bluetooth.le.ScanSettings.MATCH_NUM_ONE_ADVERTISEMENT) .build(); } else { return new ScanSettings.Builder() .setScanMode(android.bluetooth.le.ScanSettings.SCAN_MODE_LOW_LATENCY) .build(); } } private List<ScanFilter> getScanFilters() { List<ScanFilter> allFilters = new ArrayList<>(); for (DeviceCoordinator coordinator : DeviceHelper.getInstance().getAllCoordinators()) { allFilters.addAll(coordinator.createBLEScanFilters()); } return allFilters; } private Message getPostMessage(Runnable runnable) { Message message = Message.obtain(handler, runnable); message.obj = runnable; return message; } private void checkAndRequestLocationPermission() { if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) { LOG.error("No permission to access coarse location!"); toast(DiscoveryActivity.this, getString(R.string.error_no_location_access), Toast.LENGTH_SHORT, GB.ERROR); ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.ACCESS_COARSE_LOCATION}, 0); } if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED) { LOG.error("No permission to access fine location!"); toast(DiscoveryActivity.this, getString(R.string.error_no_location_access), Toast.LENGTH_SHORT, GB.ERROR); ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.ACCESS_FINE_LOCATION}, 0); } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.ACCESS_BACKGROUND_LOCATION) != PackageManager.PERMISSION_GRANTED) { LOG.error("No permission to access background location!"); toast(DiscoveryActivity.this, getString(R.string.error_no_location_access), Toast.LENGTH_SHORT, GB.ERROR); ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.ACCESS_BACKGROUND_LOCATION}, 0); } } LocationManager locationManager = (LocationManager) DiscoveryActivity.this.getSystemService(Context.LOCATION_SERVICE); try { if (locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER) || locationManager.isProviderEnabled(LocationManager.NETWORK_PROVIDER)) { // Do nothing LOG.debug("Some location provider is enabled, assuming location is enabled"); } else { toast(DiscoveryActivity.this, getString(R.string.require_location_provider), Toast.LENGTH_LONG, GB.ERROR); DiscoveryActivity.this.startActivity(new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS)); // We can't be sure location was enabled, cancel scan start and wait for new user action toast(DiscoveryActivity.this, getString(R.string.error_location_enabled_mandatory), Toast.LENGTH_SHORT, GB.ERROR); return; } } catch (Exception ex) { LOG.error("Exception when checking location status: ", ex); } } @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { GBDeviceCandidate deviceCandidate = deviceCandidates.get(position); if (deviceCandidate == null) { LOG.error("Device candidate clicked, but item not found"); return; } stopDiscovery(); DeviceCoordinator coordinator = DeviceHelper.getInstance().getCoordinator(deviceCandidate); LOG.info("Using device candidate " + deviceCandidate + " with coordinator: " + coordinator.getClass()); if (coordinator.getBondingStyle() == DeviceCoordinator.BONDING_STYLE_REQUIRE_KEY) { SharedPreferences sharedPrefs = GBApplication.getDeviceSpecificSharedPrefs(deviceCandidate.getMacAddress()); String authKey = sharedPrefs.getString("authkey", null); if (authKey == null || authKey.isEmpty() || authKey.getBytes().length < 34 || !authKey.startsWith("0x")) { toast(DiscoveryActivity.this, getString(R.string.discovery_need_to_enter_authkey), Toast.LENGTH_LONG, GB.WARN); return; } } Class<? extends Activity> pairingActivity = coordinator.getPairingActivity(); if (pairingActivity != null) { Intent intent = new Intent(this, pairingActivity); intent.putExtra(DeviceCoordinator.EXTRA_DEVICE_CANDIDATE, deviceCandidate); startActivity(intent); } else { GBDevice device = DeviceHelper.getInstance().toSupportedDevice(deviceCandidate); int bondingStyle = coordinator.getBondingStyle(); if (bondingStyle == DeviceCoordinator.BONDING_STYLE_NONE) { LOG.info("No bonding needed, according to coordinator, so connecting right away"); connectAndFinish(device); return; } try { BluetoothDevice btDevice = adapter.getRemoteDevice(deviceCandidate.getMacAddress()); switch (btDevice.getBondState()) { case BluetoothDevice.BOND_NONE: { createBond(deviceCandidate, bondingStyle); break; } case BluetoothDevice.BOND_BONDING: { // async, wait for bonding event to finish this activity bondingDevice = deviceCandidate; break; } case BluetoothDevice.BOND_BONDED: { bondingDevice = deviceCandidate; handleDeviceBonded(); break; } } } catch (Exception e) { LOG.error("Error pairing device: " + deviceCandidate.getMacAddress()); } } } @Override public boolean onItemLongClick(AdapterView<?> adapterView, View view, int position, long id) { GBDeviceCandidate deviceCandidate = deviceCandidates.get(position); if (deviceCandidate == null) { LOG.error("Device candidate clicked, but item not found"); return true; } DeviceCoordinator coordinator = DeviceHelper.getInstance().getCoordinator(deviceCandidate); GBDevice device = DeviceHelper.getInstance().toSupportedDevice(deviceCandidate); if (coordinator.getSupportedDeviceSpecificSettings(device) == null) { return true; } Intent startIntent; startIntent = new Intent(this, DeviceSettingsActivity.class); startIntent.putExtra(GBDevice.EXTRA_DEVICE, device); startActivity(startIntent); return true; } @Override protected void onPause() { super.onPause(); stopBTDiscovery(); if (oldBleScanning) { stopOldBLEDiscovery(); } else { if (GBApplication.isRunningLollipopOrLater()) { stopBLEDiscovery(); } } } private enum Scanning { /** * Regular Bluetooth scan */ SCANNING_BT, /** * Regular Bluetooth scan but when ends, start BLE scan */ SCANNING_BT_NEXT_BLE, /** * Regular BLE scan */ SCANNING_BLE, /** * Scanning has ended or hasn't been started */ SCANNING_OFF } }
Fixed an UI error when Bluetooth discovery starting fails
app/src/main/java/nodomain/freeyourgadget/gadgetbridge/activities/DiscoveryActivity.java
Fixed an UI error when Bluetooth discovery starting fails
Java
lgpl-2.1
dd94c9ac8beb5c3b2f2f3ac90e96aae66e812143
0
andreasprlic/spice-3d
/* * BioJava development code * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. If you do not have a copy, * see: * * http://www.gnu.org/copyleft/lesser.html * * Copyright for this code is held jointly by the individual * authors. These should be listed in @author doc comments. * * For more information on the BioJava project and its aims, * or to join the biojava-l mailing list, visit the home page * at: * * http://www.biojava.org/ * * Created on Jan 25, 2006 * */ package org.biojava.spice.jmol; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import org.biojava.bio.structure.Chain; import org.biojava.bio.structure.Group; import org.biojava.bio.structure.Structure; import org.biojava.bio.structure.StructureException; import org.biojava.bio.structure.StructureImpl; import org.biojava.dasobert.eventmodel.SequenceListener; import org.biojava.dasobert.eventmodel.StructureEvent; import org.biojava.dasobert.eventmodel.StructureListener; import org.jmol.api.JmolStatusListener; import org.jmol.api.JmolViewer; import org.jmol.popup.JmolPopup; public class JmolSpiceTranslator implements JmolStatusListener, StructureListener { static Logger logger = Logger.getLogger("org.biojava.spice"); static char emptyChain = ' '; JmolViewer viewer; JmolPopup jmolpopup ; Structure structure; int currentChainNumber; List pdbSequenceListener; public JmolSpiceTranslator() { super(); structure = new StructureImpl(); currentChainNumber = -1; pdbSequenceListener = new ArrayList(); } public void setJmolViewer(JmolViewer viewer){ this.viewer = viewer; } public void setJmolPopup(JmolPopup popup){ this.jmolpopup = popup; } public synchronized void notifyFileLoaded(String fullPathName, String fileName, String modelName, Object clientFile, String errorMessage){ //logger.info("JmolSpiceTranslator notifyFileLoaded " + fileName + " " + modelName + " " + errorMessage); if (errorMessage != null){ logger.log(Level.SEVERE,errorMessage); } } public void notifyFileNotLoaded(String fullPathName, String errorMsg){} public void setStatusMessage(String statusMessage){ logger.log(Level.INFO,statusMessage); } public void scriptEcho(String strEcho){ if ( strEcho.equals("no structure found")) return; logger.log(Level.INFO, "jmol scriptEcho: " + strEcho); } public void scriptStatus(String strStatus){ logger.log(Level.FINE,"jmol scriptStatus: " +strStatus); } public void notifyScriptTermination(String statusMessage, int msWalltime){ //logger.fine("Script finished in " + msWalltime + "ms"); } public void showUrl(String urlString) { logger.finest("showUrl: " +urlString); } public void showConsole(boolean showConsole){ logger.finest("jmol: showConsole "+showConsole); } public void handlePopupMenu(int x, int y){ //logger.finest("handlePopupMenu"); //viewer.popupMenu(e.getX(),e.getY()); if ( jmolpopup != null) { jmolpopup.show(x,y); } } public void notifyAtomPicked(int atomIndex, String strInfo){ logger.info("Atom picked " + atomIndex + " " + strInfo); if ( viewer != null ) { //int mod = viewer.getAtomModelIndex(atomIndex); //viewer.get String info = viewer.getAtomInfo(atomIndex); AtomInfo ai = AtomInfoParser.parse(info); logger.info(info); logger.info(ai+""); //int modelNr = viewer.getAtomModelIndex(atomIndex); String pdbcode = ai.getResidueNumber(); String chainId = ai.getChainId(); int modelNr = ai.getModelNumber(); if ( modelNr > 1) { logger.info("you selected an atom from model "+modelNr+" which is currently not active"); logger.info(strInfo); return; } //logger.info(">"+chainId + "< >" + pdbcode +"<"); highlitePdbPosition(pdbcode,""+chainId); } } private void highlitePdbPosition(String pdbresnum,String chainId){ // notify that a particulat position has been selected Chain currentChain = structure.getChain(currentChainNumber); //logger.info("current chain is >" + currentChain.getName() + "< selected is >" + chainId + "< " + chainId.length()); if ( (chainId == null) || (chainId.equals(""))) chainId = " "; if ( currentChain.getName().equals(chainId)){ int seqPos = getSeqPosFromPdb(pdbresnum, currentChain); //logger.info("is spice seq. position " + seqPos); if ( seqPos >=0){ triggerSelectedSeqPos(seqPos); } } else { logger.info("selected residue " + pdbresnum + " chain >" + chainId + "< (chain currently not active in sequence dispay)"); } // set the selection in Jmol... String cmd ; if (! chainId.equals(" ")) cmd = "select "+pdbresnum+":"+chainId+"/1; set display selected"; else cmd = "select "+pdbresnum+"/1; set display selected"; if ( viewer != null){ viewer.evalString(cmd); } } private int getSeqPosFromPdb(String pdbresnum, Chain currentChain){ List groups = currentChain.getGroups(); try { Group g = currentChain.getGroupByPDB(pdbresnum); return groups.indexOf(g); } catch (StructureException e) { return -1; } } public void notifyMeasurementsChanged(){ logger.finest("nofiyMeasurementsChanged"); } public void notifyFrameChanged(int frameNo){} // now the Spice Structure events ... public void newStructure(StructureEvent event) { //logger.info("JmolSpiceTranslator got new structure " + event.getPDBCode() + " " + structure.getPDBCode()); String p = event.getPDBCode(); if ( ( p != null ) && ( p.equalsIgnoreCase(structure.getPDBCode()))) { // already known return; } this.structure = event.getStructure(); this.currentChainNumber = event.getCurrentChainNumber(); } public void selectedChain(StructureEvent event) { //logger.info("JmolSpiceTranslator selected Chain" + event.getCurrentChainNumber()); this.structure = event.getStructure(); this.currentChainNumber = event.getCurrentChainNumber(); } public void newObjectRequested(String accessionCode) { this.structure = new StructureImpl(); this.currentChainNumber = -1; } public void noObjectFound(String accessionCode) { // TODO Auto-generated method stub } public void addPDBSequenceListener(SequenceListener li){ pdbSequenceListener.add(li); } public void clearListeners(){ structure = new StructureImpl(); pdbSequenceListener.clear(); } private void triggerSelectedSeqPos(int position){ Iterator iter = pdbSequenceListener.iterator(); while (iter.hasNext()){ SequenceListener li = (SequenceListener)iter.next(); li.selectedSeqPosition(position); } } public void notifyNewDefaultModeMeasurement(int count, String strInfo) { // TODO Auto-generated method stub } public void notifyNewPickingModeMeasurement(int iatom, String strMeasure) { // TODO Auto-generated method stub } public void notifyScriptStart(String statusMessage, String additionalInfo) { // TODO Auto-generated method stub } public void sendConsoleEcho(String strEcho) { // TODO Auto-generated method stub logger.info(strEcho); } public void sendConsoleMessage(String strStatus) { // TODO Auto-generated method stub //logger.info(strStatus); } public void sendSyncScript(String script, String appletName) { // TODO Auto-generated method stub logger.info("sendSyncScript" + script); } public float functionXY(String functionName, int x, int y) { // TODO Auto-generated method stub return 0; } public void notifyAtomHovered(int atomIndex, String strInfo) { logger.info("over Atom " + strInfo); } public void setCallbackFunction(String callbackType, String callbackFunction) { // TODO Auto-generated method stub } public void createImage(String file, String type, int quality) { // TODO Auto-generated method stub } public String eval(String strEval) { // System.out.println("strEval called" + strEval); return null; } public void notifyFrameChanged(int frameNo, int fileNo, int modelNo, int firstNo, int LastNo) { //System.out.println("notifyFrameChanged " + frameNo); } }
src/org/biojava/spice/jmol/JmolSpiceTranslator.java
/* * BioJava development code * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. If you do not have a copy, * see: * * http://www.gnu.org/copyleft/lesser.html * * Copyright for this code is held jointly by the individual * authors. These should be listed in @author doc comments. * * For more information on the BioJava project and its aims, * or to join the biojava-l mailing list, visit the home page * at: * * http://www.biojava.org/ * * Created on Jan 25, 2006 * */ package org.biojava.spice.jmol; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import org.biojava.bio.structure.Chain; import org.biojava.bio.structure.Group; import org.biojava.bio.structure.Structure; import org.biojava.bio.structure.StructureException; import org.biojava.bio.structure.StructureImpl; import org.biojava.dasobert.eventmodel.SequenceListener; import org.biojava.dasobert.eventmodel.StructureEvent; import org.biojava.dasobert.eventmodel.StructureListener; import org.jmol.api.JmolStatusListener; import org.jmol.api.JmolViewer; import org.jmol.popup.JmolPopup; public class JmolSpiceTranslator implements JmolStatusListener, StructureListener { static Logger logger = Logger.getLogger("org.biojava.spice"); static char emptyChain = ' '; JmolViewer viewer; JmolPopup jmolpopup ; Structure structure; int currentChainNumber; List pdbSequenceListener; public JmolSpiceTranslator() { super(); structure = new StructureImpl(); currentChainNumber = -1; pdbSequenceListener = new ArrayList(); } public void setJmolViewer(JmolViewer viewer){ this.viewer = viewer; } public void setJmolPopup(JmolPopup popup){ this.jmolpopup = popup; } public synchronized void notifyFileLoaded(String fullPathName, String fileName, String modelName, Object clientFile, String errorMessage){ //logger.info("JmolSpiceTranslator notifyFileLoaded " + fileName + " " + modelName + " " + errorMessage); if (errorMessage != null){ logger.log(Level.SEVERE,errorMessage); } } public void notifyFileNotLoaded(String fullPathName, String errorMsg){} public void setStatusMessage(String statusMessage){ logger.log(Level.INFO,statusMessage); } public void scriptEcho(String strEcho){ if ( strEcho.equals("no structure found")) return; logger.log(Level.INFO, "jmol scriptEcho: " + strEcho); } public void scriptStatus(String strStatus){ logger.log(Level.FINE,"jmol scriptStatus: " +strStatus); } public void notifyScriptTermination(String statusMessage, int msWalltime){ //logger.fine("Script finished in " + msWalltime + "ms"); } public void showUrl(String urlString) { logger.finest("showUrl: " +urlString); } public void showConsole(boolean showConsole){ logger.finest("jmol: showConsole "+showConsole); } public void handlePopupMenu(int x, int y){ //logger.finest("handlePopupMenu"); //viewer.popupMenu(e.getX(),e.getY()); if ( jmolpopup != null) { jmolpopup.show(x,y); } } public void notifyAtomPicked(int atomIndex, String strInfo){ logger.info("Atom picked " + atomIndex + " " + strInfo); if ( viewer != null ) { //int mod = viewer.getAtomModelIndex(atomIndex); //viewer.get String info = viewer.getAtomInfo(atomIndex); AtomInfo ai = AtomInfoParser.parse(info); logger.info(info); logger.info(ai+""); //int modelNr = viewer.getAtomModelIndex(atomIndex); String pdbcode = ai.getResidueNumber(); String chainId = ai.getChainId(); int modelNr = ai.getModelNumber(); if ( modelNr > 1) { logger.info("you selected an atom from model "+modelNr+" which is currently not active"); logger.info(strInfo); return; } //logger.info(">"+chainId + "< >" + pdbcode +"<"); highlitePdbPosition(pdbcode,""+chainId); } } private void highlitePdbPosition(String pdbresnum,String chainId){ // notify that a particulat position has been selected Chain currentChain = structure.getChain(currentChainNumber); //logger.info("current chain is >" + currentChain.getName() + "< selected is >" + chainId + "< " + chainId.length()); if ( (chainId == null) || (chainId.equals(""))) chainId = " "; if ( currentChain.getName().equals(chainId)){ int seqPos = getSeqPosFromPdb(pdbresnum, currentChain); //logger.info("is spice seq. position " + seqPos); if ( seqPos >=0){ triggerSelectedSeqPos(seqPos); } } else { logger.info("selected residue " + pdbresnum + " chain >" + chainId + "< (chain currently not active in sequence dispay)"); } // set the selection in Jmol... String cmd ; if (! chainId.equals(" ")) cmd = "select "+pdbresnum+":"+chainId+"/1; set display selected"; else cmd = "select "+pdbresnum+"/1; set display selected"; if ( viewer != null){ viewer.evalString(cmd); } } private int getSeqPosFromPdb(String pdbresnum, Chain currentChain){ List groups = currentChain.getGroups(); try { Group g = currentChain.getGroupByPDB(pdbresnum); return groups.indexOf(g); } catch (StructureException e) { return -1; } } public void notifyMeasurementsChanged(){ logger.finest("nofiyMeasurementsChanged"); } public void notifyFrameChanged(int frameNo){} // now the Spice Structure events ... public void newStructure(StructureEvent event) { //logger.info("JmolSpiceTranslator got new structure " + event.getPDBCode() + " " + structure.getPDBCode()); String p = event.getPDBCode(); if ( ( p != null ) && ( p.equalsIgnoreCase(structure.getPDBCode()))) { // already known return; } this.structure = event.getStructure(); this.currentChainNumber = event.getCurrentChainNumber(); } public void selectedChain(StructureEvent event) { //logger.info("JmolSpiceTranslator selected Chain" + event.getCurrentChainNumber()); this.structure = event.getStructure(); this.currentChainNumber = event.getCurrentChainNumber(); } public void newObjectRequested(String accessionCode) { this.structure = new StructureImpl(); this.currentChainNumber = -1; } public void noObjectFound(String accessionCode) { // TODO Auto-generated method stub } public void addPDBSequenceListener(SequenceListener li){ pdbSequenceListener.add(li); } public void clearListeners(){ structure = new StructureImpl(); pdbSequenceListener.clear(); } private void triggerSelectedSeqPos(int position){ Iterator iter = pdbSequenceListener.iterator(); while (iter.hasNext()){ SequenceListener li = (SequenceListener)iter.next(); li.selectedSeqPosition(position); } } public void notifyNewDefaultModeMeasurement(int count, String strInfo) { // TODO Auto-generated method stub } public void notifyNewPickingModeMeasurement(int iatom, String strMeasure) { // TODO Auto-generated method stub } public void notifyScriptStart(String statusMessage, String additionalInfo) { // TODO Auto-generated method stub } public void sendConsoleEcho(String strEcho) { // TODO Auto-generated method stub logger.info(strEcho); } public void sendConsoleMessage(String strStatus) { // TODO Auto-generated method stub //logger.info(strStatus); } public void sendSyncScript(String script, String appletName) { // TODO Auto-generated method stub logger.info("sendSyncScript" + script); } public float functionXY(String functionName, int x, int y) { // TODO Auto-generated method stub return 0; } public void notifyAtomHovered(int atomIndex, String strInfo) { logger.info("over Atom " + strInfo); } public void setCallbackFunction(String callbackType, String callbackFunction) { // TODO Auto-generated method stub } public void createImage(String file, String type, int quality) { // TODO Auto-generated method stub } public String eval(String strEval) { // System.out.println("strEval called" + strEval); return null; } }
adjustment to latest jmol
src/org/biojava/spice/jmol/JmolSpiceTranslator.java
adjustment to latest jmol