lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
apache-2.0
d8e0d07f30c59e8b88421b63647d4dd3943bb0c9
0
foam-framework/foam2,jacksonic/vjlofvhjfgm,jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2
/** * @license * Copyright 2020 The FOAM Authors. All Rights Reserved. * http://www.apache.org/licenses/LICENSE-2.0 */ package foam.lib.formatter; import foam.core.ClassInfo; import foam.core.FObject; import foam.core.PropertyInfo; import foam.core.X; import foam.lib.json.OutputJSON; import foam.lib.PropertyPredicate; import foam.util.SafetyUtil; import java.lang.reflect.Array; import java.text.SimpleDateFormat; import java.util.*; /* To Make faster: 1. faster escaping 2. don't escape class names and property names 3. don't quote property keys 4. use short names 5. smaller format for enums and dates 6. have PropertyInfo output directly from primitive 7. support outputting directly to another Visitor, StringBuilder, OutputStream, etc. without converting to String. 8. Use Fast TimeStamper or similar */ public class JSONFObjectFormatter extends AbstractFObjectFormatter { // TODO: use fast timestamper? protected static ThreadLocal<SimpleDateFormat> sdf = new ThreadLocal<SimpleDateFormat>() { @Override protected SimpleDateFormat initialValue() { SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); df.setTimeZone(java.util.TimeZone.getTimeZone("UTC")); return df; } }; protected boolean quoteKeys_ = false; protected boolean outputShortNames_ = true; protected boolean outputDefaultValues_ = false; protected boolean multiLineOutput_ = false; protected boolean outputClassNames_ = true; protected boolean outputReadableDates_ = true; protected boolean outputDefaultClassNames_ = true; public static ThreadLocal<FObjectFormatter> getThreadLocal(X x, Boolean quoteKeys, Boolean outputClassNames, PropertyPredicate predicate) { return getThreadLocal(x, quoteKeys, outputClassNames, false, predicate); } public static ThreadLocal<FObjectFormatter> getThreadLocal(X x, Boolean quoteKeys, Boolean outputClassNames, Boolean outputDefaultClassNames, PropertyPredicate predicate) { return new ThreadLocal<FObjectFormatter>() { @Override protected JSONFObjectFormatter initialValue() { foam.lib.formatter.JSONFObjectFormatter formatter = new foam.lib.formatter.JSONFObjectFormatter(x); formatter.setQuoteKeys(quoteKeys); formatter.setOutputClassNames(outputClassNames); formatter.setOutputDefaultClassNames(outputDefaultClassNames); formatter.setPropertyPredicate(predicate); return formatter; } }; } public JSONFObjectFormatter(X x) { super(x); } public JSONFObjectFormatter() { super(); } protected void outputUndefined() { } protected void outputNull() { } public void output(String s) { if ( multiLineOutput_ && s.indexOf('\n') >= 0 ) { b_.append("\n\"\"\""); escapeAppend(s); b_.append("\"\"\""); } else { b_.append('"'); escapeAppend(s); b_.append('"'); } } public void escapeAppend(String s) { foam.lib.json.Util.escape(s, b_); } public void output(short val) { b_.append(val); } public void output(int val) { b_.append(val); } public void output(long val) { b_.append(val); } public void output(float val) { b_.append(val); } public void output(double val) { b_.append(val); } public void output(boolean val) { b_.append(val); } protected void outputNumber(Number value) { b_.append(value); } protected void outputBoolean(Boolean value) { output(value.booleanValue()); } public void output(String[] arr) { output((Object[]) arr); } public void output(Object[] array) { b_.append('['); for ( int i = 0 ; i < array.length ; i++ ) { output(array[i]); if ( i < array.length - 1 ) b_.append(','); } b_.append(']'); } public void output(byte[][] array) { b_.append('['); for ( int i = 0 ; i < array.length ; i++ ) { output(array[i]); if ( i < array.length - 1 ) b_.append(','); } b_.append(']'); } public void output(byte[] array) { output(foam.util.SecurityUtil.ByteArrayToHexString(array)); } public void output(Map map) { b_.append('{'); java.util.Iterator keys = map.keySet().iterator(); while ( keys.hasNext() ) { Object key = keys.next(); Object value = map.get(key); output(key == null ? "" : key.toString()); b_.append(':'); output(value); if ( keys.hasNext() ) b_.append(','); } b_.append('}'); } public void output(List list) { b_.append('['); java.util.Iterator iter = list.iterator(); while ( iter.hasNext() ) { output(iter.next()); if ( iter.hasNext() ) b_.append(','); } b_.append(']'); } protected void outputProperty(FObject o, PropertyInfo p) { outputKey(getPropertyName(p)); b_.append(':'); p.format(this, o); } /* public void outputMap(Object... values) { if ( values.length % 2 != 0 ) { throw new RuntimeException("Need even number of arguments for outputMap"); } b_.append("{"); int i = 0; while ( i < values.length ) { b_.append(beforeKey_()); b_.append(values[i++].toString()); b_.append(afterKey_()); b_.append(":"); output(values[i++]); if ( i < values.length ) b_.append(","); } b_.append("}"); } */ public void output(Enum<?> value) { output(value.ordinal()); // outputNumber(value.ordinal()); /* b_.append('{'); b_.append(beforeKey_()); b_.append("class"); b_.append(afterKey_()); b_.append(':'); output(value.getClass().getName()); b_.append(","); b_.append(beforeKey_()); b_.append("ordinal"); b_.append(afterKey_()); b_.append(':'); outputNumber(value.ordinal()); b_.append('}'); */ } public void output(Object value) { if ( value instanceof OutputJSON ) { ((OutputJSON) value).formatJSON(this); } else if ( value instanceof String ) { output((String) value); } else if ( value instanceof FObject ) { output((FObject) value); } else if ( value instanceof PropertyInfo) { output((PropertyInfo) value); } else if ( value instanceof ClassInfo ) { output((ClassInfo) value); } else if ( value instanceof Number ) { outputNumber((Number) value); } else if ( isArray(value) ) { if ( value.getClass().equals(byte[][].class) ) { output((byte[][]) value); } else if ( value instanceof byte[] ) { output((byte[]) value); } else { output((Object[]) value); } } else if ( value instanceof Boolean ) { outputBoolean((Boolean) value); } else if ( value instanceof Date ) { output((Date) value); } else if ( value instanceof Map ) { output((Map) value); } else if ( value instanceof List ) { output((List) value); } else if ( value instanceof Enum<?> ) { output((Enum<?>) value); } else /*if ( value == null )*/ { b_.append("null"); } } protected boolean isArray(Object value) { return value != null && ( value.getClass() != null ) && value.getClass().isArray(); } public void outputDateValue(Date date) { if ( outputReadableDates_ ) output(sdf.get().format(date)); else outputNumber(date.getTime()); } public void output(Date date) { output(date.getTime()); /* b_.append("{\"class\":\"__Timestamp__\",\"value\":"); outputDateValue(date); b_.append('}'); */ } protected Boolean maybeOutputProperty(FObject fo, PropertyInfo prop, boolean includeComma) { if ( ! outputDefaultValues_ && ! prop.isSet(fo) ) return false; Object value = prop.get(fo); if ( value == null || ( isArray(value) && Array.getLength(value) == 0 ) ) { return false; } if ( includeComma ) b_.append(','); if ( multiLineOutput_ ) addInnerNewline(); outputProperty(fo, prop); return true; } public void outputDelta(FObject oldFObject, FObject newFObject) { ClassInfo info = oldFObject.getClassInfo(); ClassInfo newInfo = newFObject.getClassInfo(); boolean outputComma = true; boolean isDiff = false; boolean isPropertyDiff = false; if ( ! oldFObject.equals(newFObject) ) { List axioms = getProperties(info); int size = axioms.size(); b_.append('{'); addInnerNewline(); for ( int i = 0 ; i < size ; i++ ) { PropertyInfo prop = (PropertyInfo) axioms.get(i); isPropertyDiff = maybeOutputPropertyDelta(oldFObject, newFObject, prop); if ( isPropertyDiff ) { if ( ! isDiff ) { if ( outputClassNames_ && outputDefaultClassNames_ ) { //output Class name outputKey("class"); b_.append(':'); output(newInfo.getId()); b_.append(','); } addInnerNewline(); PropertyInfo id = (PropertyInfo) newInfo.getAxiomByName("id"); outputProperty(newFObject, id); isDiff = true; // to output class names for references outputDefaultClassNames_ = true; } b_.append(','); addInnerNewline(); outputProperty(newFObject, prop); } } if ( isDiff ) { addInnerNewline(); b_.append('}'); } } } public void outputDelta(FObject oldFObject, FObject newFObject, ClassInfo defaultClass) { ClassInfo info = newFObject.getClassInfo(); if ( info == defaultClass ) outputDefaultClassNames_ = false; outputDelta(oldFObject, newFObject); } protected void addInnerNewline() { if ( multiLineOutput_ ) { b_.append('\n'); } } protected boolean maybeOutputPropertyDelta(FObject oldFObject, FObject newFObject, PropertyInfo prop) { return prop.compare(oldFObject, newFObject) != 0; } /* public void outputJSONJFObject(FObject o) { b_.append("p("); outputFObject(o); b_.append(")\r\n"); } */ public void output(FObject[] arr, ClassInfo defaultClass) { output(arr); } public void output(FObject[] arr) { } public void output(FObject o, ClassInfo defaultClass) { ClassInfo info = o.getClassInfo(); if ( info == defaultClass ) outputDefaultClassNames_ = false; output(o); } public void output(FObject o) { ClassInfo info = o.getClassInfo(); b_.append('{'); addInnerNewline(); if ( outputClassNames_ && outputDefaultClassNames_ ) { outputKey("class"); b_.append(':'); output(info.getId()); } boolean outputComma = outputClassNames_ && outputDefaultClassNames_; // to output class names for references outputDefaultClassNames_ = true; List axioms = getProperties(info); int size = axioms.size(); for ( int i = 0 ; i < size ; i++ ) { PropertyInfo prop = (PropertyInfo) axioms.get(i); outputComma = maybeOutputProperty(o, prop, outputComma) || outputComma; } addInnerNewline(); b_.append('}'); } public void output(PropertyInfo prop) { b_.append('{'); outputKey("class"); b_.append(':'); output("__Property__"); b_.append(','); outputKey("forClass_"); b_.append(':'); output(prop.getClassInfo().getId()); b_.append(','); outputKey("name"); b_.append(':'); output(getPropertyName(prop)); // if ( quoteKeys_ ) { // output(getPropertyName(prop)); // } else { // outputRawString(getPropertyName(prop)); // } b_.append('}'); } public void outputJson(String str) { if ( ! quoteKeys_ ) str = str.replaceAll("\"class\"", "class"); outputFormattedString(str); } public void output(ClassInfo info) { outputKey(info.getId()); // b_.append('{'); // if ( quoteKeys_ ) b_.append(beforeKey_()); // b_.append("class"); // if ( quoteKeys_ ) b_.append(afterKey_()); // b_.append(":"); // b_.append("\"__Class__\""); // b_.append(":"); // b_.append("{\"class\":\"__Class__\",\"forClass_\":"); // output(info.getId()); // b_.append('}'); } protected void appendQuote() { b_.append('"'); } public String getPropertyName(PropertyInfo p) { return outputShortNames_ && ! SafetyUtil.isEmpty(p.getShortName()) ? p.getShortName() : p.getName(); } public void outputFormattedString(String str) { b_.append(str); } public JSONFObjectFormatter setQuoteKeys(boolean quoteKeys) { quoteKeys_ = quoteKeys; return this; } public JSONFObjectFormatter setOutputShortNames(boolean outputShortNames) { outputShortNames_ = outputShortNames; return this; } public JSONFObjectFormatter setOutputDefaultValues(boolean outputDefaultValues) { outputDefaultValues_ = outputDefaultValues; return this; } public JSONFObjectFormatter setOutputClassNames(boolean outputClassNames) { outputClassNames_ = outputClassNames; return this; } public JSONFObjectFormatter setOutputDefaultClassNames(boolean f) { outputDefaultClassNames_ = f; return this; } public JSONFObjectFormatter setMultiLine(boolean ml) { multiLineOutput_ = ml; return this; } protected void outputKey(String val) { if ( quoteKeys_ ) appendQuote(); b_.append(val); if ( quoteKeys_ ) appendQuote(); } }
src/foam/lib/formatter/JSONFObjectFormatter.java
/** * @license * Copyright 2020 The FOAM Authors. All Rights Reserved. * http://www.apache.org/licenses/LICENSE-2.0 */ package foam.lib.formatter; import foam.core.ClassInfo; import foam.core.FObject; import foam.core.PropertyInfo; import foam.core.X; import foam.lib.json.OutputJSON; import foam.lib.PropertyPredicate; import foam.util.SafetyUtil; import java.lang.reflect.Array; import java.text.SimpleDateFormat; import java.util.*; /* To Make faster: 1. faster escaping 2. don't escape class names and property names 3. don't quote property keys 4. use short names 5. smaller format for enums and dates 6. have PropertyInfo output directly from primitive 7. support outputting directly to another Visitor, StringBuilder, OutputStream, etc. without converting to String. 8. Use Fast TimeStamper or similar */ public class JSONFObjectFormatter extends AbstractFObjectFormatter { // TODO: use fast timestamper? protected static ThreadLocal<SimpleDateFormat> sdf = new ThreadLocal<SimpleDateFormat>() { @Override protected SimpleDateFormat initialValue() { SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); df.setTimeZone(java.util.TimeZone.getTimeZone("UTC")); return df; } }; protected boolean quoteKeys_ = false; protected boolean outputShortNames_ = true; protected boolean outputDefaultValues_ = false; protected boolean multiLineOutput_ = false; protected boolean outputClassNames_ = true; protected boolean outputReadableDates_ = true; protected boolean outputDefaultClassNames_ = true; public static ThreadLocal<FObjectFormatter> getThreadLocal(X x, Boolean quoteKeys, Boolean outputClassNames, PropertyPredicate predicate) { return new ThreadLocal<FObjectFormatter>() { @Override protected JSONFObjectFormatter initialValue() { foam.lib.formatter.JSONFObjectFormatter formatter = new foam.lib.formatter.JSONFObjectFormatter(x); formatter.setQuoteKeys(quoteKeys); formatter.setOutputClassNames(outputClassNames); formatter.setPropertyPredicate(predicate); return formatter; } }; } public JSONFObjectFormatter(X x) { super(x); } public JSONFObjectFormatter() { super(); } protected void outputUndefined() { } protected void outputNull() { } public void output(String s) { if ( multiLineOutput_ && s.indexOf('\n') >= 0 ) { b_.append("\n\"\"\""); escapeAppend(s); b_.append("\"\"\""); } else { b_.append('"'); escapeAppend(s); b_.append('"'); } } public void escapeAppend(String s) { foam.lib.json.Util.escape(s, b_); } public void output(short val) { b_.append(val); } public void output(int val) { b_.append(val); } public void output(long val) { b_.append(val); } public void output(float val) { b_.append(val); } public void output(double val) { b_.append(val); } public void output(boolean val) { b_.append(val); } protected void outputNumber(Number value) { b_.append(value); } protected void outputBoolean(Boolean value) { output(value.booleanValue()); } public void output(String[] arr) { output((Object[]) arr); } public void output(Object[] array) { b_.append('['); for ( int i = 0 ; i < array.length ; i++ ) { output(array[i]); if ( i < array.length - 1 ) b_.append(','); } b_.append(']'); } public void output(byte[][] array) { b_.append('['); for ( int i = 0 ; i < array.length ; i++ ) { output(array[i]); if ( i < array.length - 1 ) b_.append(','); } b_.append(']'); } public void output(byte[] array) { output(foam.util.SecurityUtil.ByteArrayToHexString(array)); } public void output(Map map) { b_.append('{'); java.util.Iterator keys = map.keySet().iterator(); while ( keys.hasNext() ) { Object key = keys.next(); Object value = map.get(key); output(key == null ? "" : key.toString()); b_.append(':'); output(value); if ( keys.hasNext() ) b_.append(','); } b_.append('}'); } public void output(List list) { b_.append('['); java.util.Iterator iter = list.iterator(); while ( iter.hasNext() ) { output(iter.next()); if ( iter.hasNext() ) b_.append(','); } b_.append(']'); } protected void outputProperty(FObject o, PropertyInfo p) { outputKey(getPropertyName(p)); b_.append(':'); p.format(this, o); } /* public void outputMap(Object... values) { if ( values.length % 2 != 0 ) { throw new RuntimeException("Need even number of arguments for outputMap"); } b_.append("{"); int i = 0; while ( i < values.length ) { b_.append(beforeKey_()); b_.append(values[i++].toString()); b_.append(afterKey_()); b_.append(":"); output(values[i++]); if ( i < values.length ) b_.append(","); } b_.append("}"); } */ public void output(Enum<?> value) { output(value.ordinal()); // outputNumber(value.ordinal()); /* b_.append('{'); b_.append(beforeKey_()); b_.append("class"); b_.append(afterKey_()); b_.append(':'); output(value.getClass().getName()); b_.append(","); b_.append(beforeKey_()); b_.append("ordinal"); b_.append(afterKey_()); b_.append(':'); outputNumber(value.ordinal()); b_.append('}'); */ } public void output(Object value) { if ( value instanceof OutputJSON ) { ((OutputJSON) value).formatJSON(this); } else if ( value instanceof String ) { output((String) value); } else if ( value instanceof FObject ) { output((FObject) value); } else if ( value instanceof PropertyInfo) { output((PropertyInfo) value); } else if ( value instanceof ClassInfo ) { output((ClassInfo) value); } else if ( value instanceof Number ) { outputNumber((Number) value); } else if ( isArray(value) ) { if ( value.getClass().equals(byte[][].class) ) { output((byte[][]) value); } else if ( value instanceof byte[] ) { output((byte[]) value); } else { output((Object[]) value); } } else if ( value instanceof Boolean ) { outputBoolean((Boolean) value); } else if ( value instanceof Date ) { output((Date) value); } else if ( value instanceof Map ) { output((Map) value); } else if ( value instanceof List ) { output((List) value); } else if ( value instanceof Enum<?> ) { output((Enum<?>) value); } else /*if ( value == null )*/ { b_.append("null"); } } protected boolean isArray(Object value) { return value != null && ( value.getClass() != null ) && value.getClass().isArray(); } public void outputDateValue(Date date) { if ( outputReadableDates_ ) output(sdf.get().format(date)); else outputNumber(date.getTime()); } public void output(Date date) { output(date.getTime()); /* b_.append("{\"class\":\"__Timestamp__\",\"value\":"); outputDateValue(date); b_.append('}'); */ } protected Boolean maybeOutputProperty(FObject fo, PropertyInfo prop, boolean includeComma) { if ( ! outputDefaultValues_ && ! prop.isSet(fo) ) return false; Object value = prop.get(fo); if ( value == null || ( isArray(value) && Array.getLength(value) == 0 ) ) { return false; } if ( includeComma ) b_.append(','); if ( multiLineOutput_ ) addInnerNewline(); outputProperty(fo, prop); return true; } public void outputDelta(FObject oldFObject, FObject newFObject) { ClassInfo info = oldFObject.getClassInfo(); ClassInfo newInfo = newFObject.getClassInfo(); boolean outputComma = true; boolean isDiff = false; boolean isPropertyDiff = false; if ( ! oldFObject.equals(newFObject) ) { List axioms = getProperties(info); int size = axioms.size(); b_.append('{'); addInnerNewline(); for ( int i = 0 ; i < size ; i++ ) { PropertyInfo prop = (PropertyInfo) axioms.get(i); isPropertyDiff = maybeOutputPropertyDelta(oldFObject, newFObject, prop); if ( isPropertyDiff ) { if ( ! isDiff ) { if ( outputClassNames_ && outputDefaultClassNames_ ) { //output Class name outputKey("class"); b_.append(':'); output(newInfo.getId()); b_.append(','); } addInnerNewline(); PropertyInfo id = (PropertyInfo) newInfo.getAxiomByName("id"); outputProperty(newFObject, id); isDiff = true; // to output class names for references outputDefaultClassNames_ = true; } b_.append(','); addInnerNewline(); outputProperty(newFObject, prop); } } if ( isDiff ) { addInnerNewline(); b_.append('}'); } } } public void outputDelta(FObject oldFObject, FObject newFObject, ClassInfo defaultClass) { ClassInfo info = newFObject.getClassInfo(); if ( info == defaultClass ) outputDefaultClassNames_ = false; outputDelta(oldFObject, newFObject); } protected void addInnerNewline() { if ( multiLineOutput_ ) { b_.append('\n'); } } protected boolean maybeOutputPropertyDelta(FObject oldFObject, FObject newFObject, PropertyInfo prop) { return prop.compare(oldFObject, newFObject) != 0; } /* public void outputJSONJFObject(FObject o) { b_.append("p("); outputFObject(o); b_.append(")\r\n"); } */ public void output(FObject[] arr, ClassInfo defaultClass) { output(arr); } public void output(FObject[] arr) { } public void output(FObject o, ClassInfo defaultClass) { ClassInfo info = o.getClassInfo(); if ( info == defaultClass ) outputDefaultClassNames_ = false; output(o); } public void output(FObject o) { ClassInfo info = o.getClassInfo(); b_.append('{'); addInnerNewline(); if ( outputClassNames_ && outputDefaultClassNames_ ) { outputKey("class"); b_.append(':'); output(info.getId()); } boolean outputComma = outputClassNames_ && outputDefaultClassNames_; // to output class names for references outputDefaultClassNames_ = true; List axioms = getProperties(info); int size = axioms.size(); for ( int i = 0 ; i < size ; i++ ) { PropertyInfo prop = (PropertyInfo) axioms.get(i); outputComma = maybeOutputProperty(o, prop, outputComma) || outputComma; } addInnerNewline(); b_.append('}'); } public void output(PropertyInfo prop) { b_.append('{'); outputKey("class"); b_.append(':'); output("__Property__"); b_.append(','); outputKey("forClass_"); b_.append(':'); output(prop.getClassInfo().getId()); b_.append(','); outputKey("name"); b_.append(':'); output(getPropertyName(prop)); // if ( quoteKeys_ ) { // output(getPropertyName(prop)); // } else { // outputRawString(getPropertyName(prop)); // } b_.append('}'); } public void outputJson(String str) { if ( ! quoteKeys_ ) str = str.replaceAll("\"class\"", "class"); outputFormattedString(str); } public void output(ClassInfo info) { outputKey(info.getId()); // b_.append('{'); // if ( quoteKeys_ ) b_.append(beforeKey_()); // b_.append("class"); // if ( quoteKeys_ ) b_.append(afterKey_()); // b_.append(":"); // b_.append("\"__Class__\""); // b_.append(":"); // b_.append("{\"class\":\"__Class__\",\"forClass_\":"); // output(info.getId()); // b_.append('}'); } protected void appendQuote() { b_.append('"'); } public String getPropertyName(PropertyInfo p) { return outputShortNames_ && ! SafetyUtil.isEmpty(p.getShortName()) ? p.getShortName() : p.getName(); } public void outputFormattedString(String str) { b_.append(str); } public JSONFObjectFormatter setQuoteKeys(boolean quoteKeys) { quoteKeys_ = quoteKeys; return this; } public JSONFObjectFormatter setOutputShortNames(boolean outputShortNames) { outputShortNames_ = outputShortNames; return this; } public JSONFObjectFormatter setOutputDefaultValues(boolean outputDefaultValues) { outputDefaultValues_ = outputDefaultValues; return this; } public JSONFObjectFormatter setOutputClassNames(boolean outputClassNames) { outputClassNames_ = outputClassNames; return this; } public JSONFObjectFormatter setOutputDefaultClassNames(boolean f) { outputDefaultClassNames_ = f; return this; } public JSONFObjectFormatter setMultiLine(boolean ml) { multiLineOutput_ = ml; return this; } protected void outputKey(String val) { if ( quoteKeys_ ) appendQuote(); b_.append(val); if ( quoteKeys_ ) appendQuote(); } }
Add additional method signatures for ThreadLocal
src/foam/lib/formatter/JSONFObjectFormatter.java
Add additional method signatures for ThreadLocal
Java
apache-2.0
50df1d979f88c47f6fab2ffb5574f58d891cbc9b
0
apache/lenya,apache/lenya,apache/lenya,apache/lenya
/* * Copyright 1999-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.lenya.config.impl; import java.awt.Component; import java.awt.Container; import java.awt.FlowLayout; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.util.Vector; import javax.swing.BoxLayout; import javax.swing.ButtonGroup; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JRadioButton; import javax.swing.JTextField; import org.apache.lenya.config.core.Configuration; import org.apache.lenya.config.core.FileConfiguration; import org.apache.lenya.config.core.Parameter; /** * A GUI tool to configure Lenya 1.4 build */ public class ConfigureGUI { protected static final Component Next = null; protected static final Component Previous = null; private JPanel contentPanel; private JPanel checkBoxPanel; private JPanel buttonPanel; private JCheckBox[] checkBoxes; private JLabel defaultValueLabel; private JLabel localValueLabel; private JLabel newLocalValueLabel; private JRadioButton radioButton1; private JRadioButton radioButton2; private JRadioButton radioButton3; private JLabel stepsLabel; private JLabel paraValueLabel; private JTextField localValueTextField; private JTextField defaultValueTextField; private JTextField newLocalValueTextField; private JComboBox DefaultValueComboBox; private JComboBox LocalValueComboBox; private JButton cancelButton; private JButton backButton; private JButton nextButton; private JButton saveButton; private JButton yesButton; private JButton noButton; private JButton exitButton; private Parameter[] params; private JFrame frame; private JLabel warning1; private JLabel warning2; private JLabel saveMessage; private int steps = 0; private String rootDir; public final static boolean RIGHT_TO_LEFT = false; private FileConfiguration buildProperties; public static void main(String[] args) { System.out .println("\nWelcome to the GUI to configure the building process of Apache Lenya"); if (args.length != 1) { System.err .println("No root dir specified (e.g. /home/USERNAME/src/lenya/trunk)!"); return; } String rootDir = args[0]; new ConfigureGUI(rootDir); } public ConfigureGUI(String rootDir) { this.rootDir = rootDir; System.out.println("Starting GUI ..."); buildProperties = new BuildPropertiesConfiguration(); buildProperties.setFilenameDefault(rootDir + "/build.properties"); buildProperties.setFilenameLocal(rootDir + "/local.build.properties"); Vector configs = new Vector(); configs.addElement(buildProperties); JFrame.setDefaultLookAndFeelDecorated(true); frame = new JFrame("Apache Lenya Configuration"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.getContentPane().setLayout(new FlowLayout(FlowLayout.LEFT)); for (int i = 0; i < configs.size(); i++) { Configuration config = (Configuration) configs.elementAt(i); config.read(); params = config.getConfigurableParameters(); } final Configuration config = (Configuration) configs.elementAt(0); config.read(); params = config.getConfigurableParameters(); contentPanel = new JPanel(); checkBoxPanel = new JPanel(); buttonPanel = new JPanel(); defaultValueLabel = new JLabel(); localValueLabel = new JLabel(); newLocalValueLabel = new JLabel(); defaultValueTextField = new JTextField(20); localValueTextField = new JTextField(20); newLocalValueTextField = new JTextField(20); DefaultValueComboBox = new JComboBox(); LocalValueComboBox = new JComboBox(); radioButton1 = new JRadioButton(); radioButton2 = new JRadioButton(); radioButton3 = new JRadioButton(); ButtonGroup g = new ButtonGroup(); cancelButton = new JButton(); backButton = new JButton(); nextButton = new JButton(); saveButton = new JButton(); noButton = new JButton(); yesButton = new JButton(); exitButton = new JButton(); warning1 = new JLabel(); warning2 = new JLabel(); saveMessage = new JLabel(); Container contentPane = frame.getContentPane(); contentPane.setLayout(new FlowLayout(FlowLayout.LEFT)); //TODO: Somehow this doesn't work //contentPane.setPreferredSize(new java.awt.Dimension(380, 182)); contentPanel.setLayout(new GridBagLayout()); GridBagConstraints c = new GridBagConstraints(); stepsLabel = new JLabel(); stepsLabel.setText("Parameters "); c.gridx = 0; c.gridy = 0; contentPanel.add(stepsLabel, c); checkBoxPanel = new JPanel(); checkBoxPanel.setLayout(new BoxLayout(checkBoxPanel, BoxLayout.Y_AXIS)); c.gridx = 0; c.gridy = 1; c.gridheight = 4; c.ipadx = 20; contentPanel.add(checkBoxPanel, c); checkBoxes = new JCheckBox[params.length]; for (int i = 0; i < params.length; ++i) { checkBoxes[i] = new JCheckBox(); checkBoxes[i].setEnabled(false); checkBoxes[i].setText(params[i].getName()); checkBoxes[0].setSelected(true); checkBoxPanel.add(checkBoxes[i]); } paraValueLabel = new JLabel(); c.gridx = 1; c.gridy = 0; c.gridwidth = 1; c.gridheight = 1; paraValueLabel = new JLabel("Parameter: " + params[0].getName()); contentPanel.add(paraValueLabel, c); defaultValueLabel.setText("Default Value:"); contentPanel.add(defaultValueLabel, new GridBagConstraints(1, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); contentPanel.add(defaultValueTextField, new GridBagConstraints(2, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); defaultValueTextField.setText(params[0].getDefaultValue()); defaultValueTextField.addMouseListener(new MouseListener() { public void mouseClicked(MouseEvent event) { radioButton1.setSelected(true); } public void mousePressed(MouseEvent event) { } public void mouseReleased(MouseEvent event) { } public void mouseEntered(MouseEvent event) { } public void mouseExited(MouseEvent event) { } }); contentPanel.add(radioButton1, new GridBagConstraints(3, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); g.add(radioButton1); localValueLabel.setText("Local Value:"); contentPanel.add(localValueLabel, new GridBagConstraints(1, 2, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); contentPanel.add(localValueTextField, new GridBagConstraints(2, 2, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); localValueTextField.setText(params[0].getLocalValue()); localValueTextField.addMouseListener(new MouseListener() { public void mouseClicked(MouseEvent event) { } public void mousePressed(MouseEvent event) { radioButton2.setSelected(true); } public void mouseReleased(MouseEvent event) { } public void mouseEntered(MouseEvent event) { } public void mouseExited(MouseEvent event) { } }); contentPanel.add(radioButton2, new GridBagConstraints(3, 2, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); g.add(radioButton2); radioButton2.setSelected(true); newLocalValueLabel.setText("new Local Value:"); contentPanel.add(newLocalValueLabel, new GridBagConstraints(1, 3, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); contentPanel.add(newLocalValueTextField, new GridBagConstraints(2, 3, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); newLocalValueTextField.addMouseListener(new MouseListener() { public void mouseClicked(MouseEvent event) { } public void mousePressed(MouseEvent event) { radioButton3.setSelected(true); } public void mouseEntered(MouseEvent event) { } public void mouseExited(MouseEvent event) { } public void mouseReleased(MouseEvent event) { } }); contentPanel.add(radioButton3, new GridBagConstraints(3, 3, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); g.add(radioButton3); buttonPanel = new JPanel(); cancelButton.setText("Cancel"); contentPanel.add(cancelButton, new GridBagConstraints(1, 4, 1, 1, 0.0, 0.0, GridBagConstraints.SOUTH, GridBagConstraints.PAGE_END, new Insets(0, 0, 0, 0), 0, 0)); cancelButton.setPreferredSize(new java.awt.Dimension(74, 22)); cancelButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { int n = JOptionPane .showConfirmDialog((Component) null, "Do you want to Exit?", "Exit...", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE); if (n == JOptionPane.YES_OPTION) { System.exit(0); } } }); backButton.setText("<Back"); contentPanel.add(backButton, new GridBagConstraints(2, 4, 1, 1, 0.0, 0.0, GridBagConstraints.SOUTH, GridBagConstraints.PAGE_END, new Insets(0, 0, 0, 0), 0, 0)); backButton.setPreferredSize(new java.awt.Dimension(74, 22)); backButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { backButton.setEnabled(true); if (contentPanel.isVisible()) backButton.setEnabled(true); if (steps != params.length) { saveButton.setVisible(false); } moveBack(); } }); backButton.setEnabled(false); nextButton.setText("Next>"); contentPanel.add(nextButton, new GridBagConstraints(3, 4, 1, 1, 0.0, 0.0, GridBagConstraints.SOUTH, GridBagConstraints.PAGE_END, new Insets(0, 0, 0, 0), 0, 0)); nextButton.setPreferredSize(new java.awt.Dimension(74, 22)); nextButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { backButton.setEnabled(true); if (contentPanel.isVisible()) nextButton.setEnabled(true); moveNext(); } }); buttonPanel.add(cancelButton); buttonPanel.add(backButton); buttonPanel.add(nextButton); contentPane.add(contentPanel); contentPanel.add(buttonPanel, new GridBagConstraints(2, 4, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); frame.pack(); /* int frameWidth = 570; int frameHeight = 250; frame.setSize(frameWidth, frameHeight); */ frame.setVisible(true); } public void moveBack() { steps--; frame.repaint(); nextButton.setVisible(true); checkFirst(); currentStep("down"); showNormalOptions(); comboBox(); checkLast(); newLocalValueTextField.setText(params[getStep()].getLocalValue()); } public void moveNext() { setLocalValue(); steps++; frame.repaint(); checkFirst(); currentStep("up"); showNormalOptions(); comboBox(); checkLast(); newLocalValueTextField.setText(""); } /** * Set local value depending on chosen value */ public void setLocalValue() { String tmpLocalValue = "TBD"; if (radioButton1.isSelected()) { tmpLocalValue = defaultValueTextField.getText(); System.out.println("Default Value: " + tmpLocalValue); } else if (radioButton2.isSelected()) { tmpLocalValue = localValueTextField.getText(); System.out.println("Local Value: " + tmpLocalValue); } else if (radioButton3.isSelected()) { tmpLocalValue = newLocalValueTextField.getText(); System.out.println("New Local Value: " + tmpLocalValue); } else { System.err.println("Fatal Error 0123456789!"); } params[steps].setLocalValue(tmpLocalValue); System.out.println("Temporary Local Value: " + params[steps].getLocalValue()); } public void currentStep(String direction) { if (direction.equals("up")) { for (int i = 1; i <= getStep(); ++i) { checkBoxes[i].setSelected(true); } } if (direction.equals("down")) { checkBoxes[getStep() + 1].setSelected(false); } } public void checkFirst() { if (getStep() == 0) { backButton.setEnabled(false); } else { backButton.setEnabled(true); } } public void checkLast() { saveButton = new JButton("Save"); warning1 = new JLabel("WARNING: Local configuration already exists!"); warning2 = new JLabel("Do you want to overwrite?"); if (getStep() == params.length - 1) { nextButton.setEnabled(false); nextButton.setVisible(false); buttonPanel.add(saveButton); contentPanel.add(buttonPanel, new GridBagConstraints(2, 4, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); saveButton.setPreferredSize(new java.awt.Dimension(74, 22)); saveButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { setLocalValue(); showSaveScreen(); showWarningScreen(); } }); } else { nextButton.setEnabled(true); warning1.setVisible(false); warning2.setVisible(false); } } public void showNormalOptions() { if (steps < params.length) { defaultValueTextField.setText(params[steps].getDefaultValue()); localValueTextField.setText(params[steps].getLocalValue()); paraValueLabel.setText(params[steps].getName()); } } private void showSaveScreen() { paraValueLabel.setVisible(false); defaultValueLabel.setVisible(false); localValueLabel.setVisible(false); newLocalValueLabel.setVisible(false); defaultValueTextField.setVisible(false); localValueTextField.setVisible(false); newLocalValueTextField.setVisible(false); radioButton1.setVisible(false); radioButton2.setVisible(false); radioButton3.setVisible(false); cancelButton.setVisible(false); nextButton.setVisible(false); backButton.setVisible(false); saveButton.setVisible(false); yesButton.setVisible(true); noButton.setVisible(true); warning1.setVisible(true); warning2.setVisible(true); } private void showWarningScreen() { contentPanel.add(warning1, new GridBagConstraints(2, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets( 0, 0, 0, 0), 0, 0)); contentPanel.add(warning2, new GridBagConstraints(2, 2, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets( 0, 0, 0, 0), 0, 0)); yesButton.setText("yes"); buttonPanel.add(yesButton); contentPanel.add(buttonPanel, new GridBagConstraints(2, 4, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); yesButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { showYesScreen(); } }); noButton.setText("no"); buttonPanel.add(noButton); contentPanel.add(buttonPanel, new GridBagConstraints(2, 4, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); noButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { showNoScreen(); } }); } private void showYesScreen() { buildProperties.writeLocal(); saveMessage.setText("Successful saved to: " + rootDir + "/local.build.properties"); contentPanel.add(saveMessage, new GridBagConstraints(2, 3, 1, 1, 0.0, 0.0, GridBagConstraints.SOUTH, GridBagConstraints.PAGE_END, new Insets(0, 0, 0, 0), 0, 0)); saveMessage.setVisible(true); yesButton.setVisible(false); noButton.setVisible(false); warning1.setVisible(false); warning2.setVisible(false); exitButton.setPreferredSize(new java.awt.Dimension(74, 22)); exitButton.setText("Exit"); contentPanel.add(exitButton, new GridBagConstraints(2, 3 + 1, 1, 1, 0.0, 0.0, GridBagConstraints.SOUTH, GridBagConstraints.PAGE_END, new Insets(0, 0, 0, 0), 0, 0)); exitButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { System.exit(0); } }); contentPanel.revalidate(); contentPanel.repaint(); } private void showNoScreen() { saveMessage.setVisible(false); paraValueLabel.setVisible(true); defaultValueLabel.setVisible(true); localValueLabel.setVisible(true); newLocalValueLabel.setVisible(true); defaultValueTextField.setVisible(true); localValueTextField.setVisible(true); newLocalValueTextField.setVisible(true); radioButton1.setVisible(true); radioButton2.setVisible(true); radioButton3.setVisible(true); cancelButton.setVisible(true); nextButton.setVisible(false); backButton.setVisible(true); saveButton.setVisible(true); yesButton.setVisible(false); noButton.setVisible(false); saveMessage.setVisible(false); } public void comboBox() { /* Hardcoded, we cant know where the dropdown could be... */ if (steps == 3) { warning1.setVisible(false); warning2.setVisible(false); defaultValueTextField.setVisible(false); localValueTextField.setVisible(false); newLocalValueTextField.setVisible(true); String labels[] = { "Jetty", "Tomcat", "Wls" }; DefaultValueComboBox = new JComboBox(labels); DefaultValueComboBox.setMaximumRowCount(3); contentPanel.add(DefaultValueComboBox, new GridBagConstraints(2, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); LocalValueComboBox = new JComboBox(labels); LocalValueComboBox.setMaximumRowCount(3); contentPanel.add(LocalValueComboBox, new GridBagConstraints(2, 2, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); contentPanel.revalidate(); } else { DefaultValueComboBox.setVisible(false); LocalValueComboBox.setVisible(false); defaultValueTextField.setVisible(true); localValueTextField.setVisible(true); warning1.setVisible(false); warning2.setVisible(false); } } public int getStep() { return steps; } }
tools/configure/src/java/org/apache/lenya/config/impl/ConfigureGUI.java
/* * Copyright 1999-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.lenya.config.impl; import java.awt.Component; import java.awt.Container; import java.awt.FlowLayout; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.TextField; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.util.Vector; import javax.swing.BoxLayout; import javax.swing.ButtonGroup; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JRadioButton; import javax.swing.JTextField; import org.apache.lenya.config.core.Configuration; import org.apache.lenya.config.core.FileConfiguration; import org.apache.lenya.config.core.Parameter; /** * This code was edited or generated using CloudGarden's Jigloo SWT/Swing GUI * Builder, which is free for non-commercial use. If Jigloo is being used * commercially (ie, by a corporation, company or business for any purpose * whatever) then you should purchase a license for each developer using Jigloo. * Please visit www.cloudgarden.com for details. Use of Jigloo implies * acceptance of these licensing terms. A COMMERCIAL LICENSE HAS NOT BEEN * PURCHASED FOR THIS MACHINE, SO JIGLOO OR THIS CODE CANNOT BE USED LEGALLY FOR * ANY CORPORATE OR COMMERCIAL PURPOSE. */ public class ConfigureGUI { protected static final Component Next = null; protected static final Component Previous = null; private JPanel contentPanel; private JPanel checkBoxPanel; private JPanel buttonPanel; private JPanel messageSavePanel; private JCheckBox[] checkBoxes; private JLabel defaultValueLabel; private JLabel localValueLabel; private JLabel newLocalValueLabel; private JRadioButton RadioButton1; private JRadioButton RadioButton2; private JRadioButton RadioButton3; private JLabel stepsLabel; private JLabel paraValueLabel; private JTextField LocalValueTextField; private JTextField DefaultValueTextField; private JTextField newLocalValueTextField; private JComboBox DefaultValueComboBox; private JComboBox LocalValueComboBox; private JButton cancelButton; private JButton backButton; private JButton nextButton; private JButton saveButton; private JButton yesButton; private JButton noButton; private Parameter[] params; private JFrame frame; private JLabel warning1; private JLabel warning2; private JLabel saveMessage; private int steps = 0; public final static boolean RIGHT_TO_LEFT = false; private FileConfiguration buildProperties; public static void main(String[] args) { System.out .println("\nWelcome to the GUI to configure the building process of Apache Lenya"); if (args.length != 1) { System.err .println("No root dir specified (e.g. /home/USERNAME/src/lenya/trunk)!"); return; } String rootDir = args[0]; new ConfigureGUI(rootDir); } public ConfigureGUI(String rootDir) { System.out.println("Starting GUI ..."); buildProperties = new BuildPropertiesConfiguration(); buildProperties.setFilenameDefault(rootDir + "/build.properties"); buildProperties.setFilenameLocal(rootDir + "/local.build.properties"); Vector configs = new Vector(); configs.addElement(buildProperties); JFrame.setDefaultLookAndFeelDecorated(true); frame = new JFrame("Apache Lenya Configuration"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.getContentPane().setLayout(new FlowLayout(FlowLayout.LEFT)); for (int i = 0; i < configs.size(); i++) { Configuration config = (Configuration) configs.elementAt(i); config.read(); params = config.getConfigurableParameters(); } Configuration config = (Configuration) configs.elementAt(0); config.read(); params = config.getConfigurableParameters(); contentPanel = new JPanel(); checkBoxPanel = new JPanel(); buttonPanel = new JPanel(); defaultValueLabel = new JLabel(); localValueLabel = new JLabel(); newLocalValueLabel = new JLabel(); DefaultValueTextField = new JTextField(); LocalValueTextField = new JTextField(); newLocalValueTextField = new JTextField(); DefaultValueComboBox = new JComboBox(); LocalValueComboBox = new JComboBox(); RadioButton1 = new JRadioButton(); RadioButton2 = new JRadioButton(); RadioButton3 = new JRadioButton(); ButtonGroup g = new ButtonGroup(); cancelButton = new JButton(); backButton = new JButton(); nextButton = new JButton(); saveButton = new JButton(); noButton = new JButton(); yesButton = new JButton(); warning1 = new JLabel(); warning2 = new JLabel(); saveMessage = new JLabel(); messageSavePanel = new JPanel(); Container contentPane = frame.getContentPane(); contentPane.setLayout(new FlowLayout(FlowLayout.LEFT)); //TODO: Somehow this doesn't work //contentPane.setPreferredSize(new java.awt.Dimension(380, 182)); contentPanel.setLayout(new GridBagLayout()); GridBagConstraints c = new GridBagConstraints(); stepsLabel = new JLabel(); stepsLabel.setText("Parameters "); c.gridx = 0; c.gridy = 0; contentPanel.add(stepsLabel, c); checkBoxPanel = new JPanel(); checkBoxPanel.setLayout(new BoxLayout(checkBoxPanel, BoxLayout.Y_AXIS)); c.gridx = 0; c.gridy = 1; c.gridheight = 4; c.ipadx = 20; contentPanel.add(checkBoxPanel, c); checkBoxes = new JCheckBox[params.length]; for (int i = 0; i < params.length; ++i) { checkBoxes[i] = new JCheckBox(); checkBoxes[i].setEnabled(false); checkBoxes[i].setText(params[i].getName()); checkBoxes[0].setSelected(true); checkBoxPanel.add(checkBoxes[i]); } paraValueLabel = new JLabel(); c.gridx = 1; c.gridy = 0; c.gridwidth = 1; c.gridheight = 1; paraValueLabel = new JLabel("Parameter: " + params[0].getName()); contentPanel.add(paraValueLabel, c); defaultValueLabel.setText("Default Value:"); contentPanel.add(defaultValueLabel, new GridBagConstraints(1, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); contentPanel.add(DefaultValueTextField, new GridBagConstraints(2, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); DefaultValueTextField.setText(params[0].getDefaultValue()); DefaultValueTextField.addMouseListener(new MouseListener() { public void mouseClicked(MouseEvent event) { RadioButton1.setSelected(true); } public void mousePressed(MouseEvent event) { } public void mouseReleased(MouseEvent event) { } public void mouseEntered(MouseEvent event) { } public void mouseExited(MouseEvent event) { } }); contentPanel.add(RadioButton1, new GridBagConstraints(3, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); g.add(RadioButton1); localValueLabel.setText("Local Value:"); contentPanel.add(localValueLabel, new GridBagConstraints(1, 2, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); contentPanel.add(LocalValueTextField, new GridBagConstraints(2, 2, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); LocalValueTextField.setText(params[0].getLocalValue()); LocalValueTextField.addMouseListener(new MouseListener() { public void mouseClicked(MouseEvent event) { } public void mousePressed(MouseEvent event) { RadioButton2.setSelected(true); } public void mouseReleased(MouseEvent event) { } public void mouseEntered(MouseEvent event) { } public void mouseExited(MouseEvent event) { } }); contentPanel.add(RadioButton2, new GridBagConstraints(3, 2, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); g.add(RadioButton2); RadioButton2.setSelected(true); newLocalValueLabel.setText("new Local Value:"); contentPanel.add(newLocalValueLabel, new GridBagConstraints(1, 3, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); contentPanel.add(newLocalValueTextField, new GridBagConstraints(2, 3, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); newLocalValueTextField.addMouseListener(new MouseListener() { public void mouseClicked(MouseEvent event) { } public void mousePressed(MouseEvent event) { RadioButton3.setSelected(true); } public void mouseEntered(MouseEvent event) { } public void mouseExited(MouseEvent event) { } public void mouseReleased(MouseEvent event) { } }); contentPanel.add(RadioButton3, new GridBagConstraints(3, 3, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); g.add(RadioButton3); buttonPanel = new JPanel(); cancelButton.setText("Cancel"); contentPanel.add(cancelButton, new GridBagConstraints(1, 4, 1, 1, 0.0, 0.0, GridBagConstraints.SOUTH, GridBagConstraints.PAGE_END, new Insets(0, 0, 0, 0), 0, 0)); cancelButton.setPreferredSize(new java.awt.Dimension(74, 22)); cancelButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { int n = JOptionPane .showConfirmDialog((Component) null, "Do you want to Exit?", "Exit...", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE); if (n == JOptionPane.YES_OPTION) { System.exit(0); } } }); backButton.setText("<Back"); contentPanel.add(backButton, new GridBagConstraints(2, 4, 1, 1, 0.0, 0.0, GridBagConstraints.SOUTH, GridBagConstraints.PAGE_END, new Insets(0, 0, 0, 0), 0, 0)); backButton.setPreferredSize(new java.awt.Dimension(74, 22)); backButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { backButton.setEnabled(true); if (contentPanel.isVisible()) backButton.setEnabled(true); if (steps != params.length) { saveButton.setVisible(false); } moveBack(); } }); backButton.setEnabled(false); nextButton.setText("Next>"); contentPanel.add(nextButton, new GridBagConstraints(3, 4, 1, 1, 0.0, 0.0, GridBagConstraints.SOUTH, GridBagConstraints.PAGE_END, new Insets(0, 0, 0, 0), 0, 0)); nextButton.setPreferredSize(new java.awt.Dimension(74, 22)); nextButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { backButton.setEnabled(true); if (contentPanel.isVisible()) nextButton.setEnabled(true); moveNext(); } }); buttonPanel.add(cancelButton); buttonPanel.add(backButton); buttonPanel.add(nextButton); contentPane.add(contentPanel); contentPanel.add(buttonPanel, new GridBagConstraints(2, 4, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); int frameWidth = 570; int frameHeight = 250; frame.pack(); frame.setSize(frameWidth, frameHeight); frame.setVisible(true); } public void moveBack() { steps--; frame.repaint(); nextButton.setVisible(true); checkFirst(); currentStep("down"); showNormalOptions(); comboBox(); checkLast(); newLocalValueTextField.setText(""); } public void moveNext() { steps++; frame.repaint(); checkFirst(); currentStep("up"); showNormalOptions(); comboBox(); checkLast(); newLocalValueTextField.setText(""); } public void currentStep(String direction) { if (direction.equals("up")) { for (int i = 1; i <= getStep(); ++i) { checkBoxes[i].setSelected(true); } } if (direction.equals("down")) { checkBoxes[getStep() + 1].setSelected(false); } } public void checkFirst() { if (getStep() == 0) { backButton.setEnabled(false); } else { backButton.setEnabled(true); } } public void checkLast() { System.out.println(getStep()); saveButton = new JButton("Save"); warning1 = new JLabel("WARNING: Local configuration already exists!"); warning2 = new JLabel("Do you want to overwrite?"); if (getStep() == params.length - 1) { nextButton.setEnabled(false); nextButton.setVisible(false); buttonPanel.add(saveButton); contentPanel.add(buttonPanel, new GridBagConstraints(2, 4, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); saveButton.setPreferredSize(new java.awt.Dimension(74, 22)); saveButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { showSaveScreen(); showWarningScreen(); } }); } else { nextButton.setEnabled(true); warning1.setVisible(false); warning2.setVisible(false); } } public void showNormalOptions() { if (steps < params.length) { DefaultValueTextField.setText(params[steps].getDefaultValue()); LocalValueTextField.setText(params[steps].getLocalValue()); paraValueLabel.setText(params[steps].getName()); } } private void showSaveScreen() { paraValueLabel.setVisible(false); defaultValueLabel.setVisible(false); localValueLabel.setVisible(false); newLocalValueLabel.setVisible(false); DefaultValueTextField.setVisible(false); LocalValueTextField.setVisible(false); newLocalValueTextField.setVisible(false); RadioButton1.setVisible(false); RadioButton2.setVisible(false); RadioButton3.setVisible(false); cancelButton.setVisible(false); nextButton.setVisible(false); backButton.setVisible(false); saveButton.setVisible(false); yesButton.setVisible(true); noButton.setVisible(true); warning1.setVisible(true); warning2.setVisible(true); // frame.repaint(); } private void showWarningScreen() { contentPanel.add(warning1, new GridBagConstraints(2, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets( 0, 0, 0, 0), 0, 0)); contentPanel.add(warning2, new GridBagConstraints(2, 2, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets( 0, 0, 0, 0), 0, 0)); yesButton.setText("yes"); buttonPanel.add(yesButton); contentPanel.add(buttonPanel, new GridBagConstraints(2, 4, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); yesButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { showYesScreen(); } }); noButton.setText("no"); buttonPanel.add(noButton); contentPanel.add(buttonPanel, new GridBagConstraints(2, 4, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); noButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { showNoScreen(); } }); } private void showYesScreen() { buildProperties.writeLocal(); saveMessage.setText("saved!"); // messageSavePanel.add(saveMessage); contentPanel.add(saveMessage, new GridBagConstraints(3, 5, 0, 0, 0.0, 0.0, GridBagConstraints.SOUTH, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); saveMessage.setVisible(true); contentPanel.revalidate(); contentPanel.repaint(); System.out.println("saved"); } private void showNoScreen() { saveMessage.setVisible(false); paraValueLabel.setVisible(true); defaultValueLabel.setVisible(true); localValueLabel.setVisible(true); newLocalValueLabel.setVisible(true); DefaultValueTextField.setVisible(true); LocalValueTextField.setVisible(true); newLocalValueTextField.setVisible(true); RadioButton1.setVisible(true); RadioButton2.setVisible(true); RadioButton3.setVisible(true); cancelButton.setVisible(true); nextButton.setVisible(false); backButton.setVisible(true); saveButton.setVisible(true); yesButton.setVisible(false); noButton.setVisible(false); saveMessage.setVisible(false); } public void comboBox() { /* Hardcoded, we cant know where the dropdown could be... */ if (steps == 3) { DefaultValueTextField.setVisible(false); LocalValueTextField.setVisible(false); newLocalValueTextField.setVisible(true); String labels[] = { "Jetty", "Tomcat", "Wls" }; DefaultValueComboBox = new JComboBox(labels); DefaultValueComboBox.setMaximumRowCount(3); contentPanel.add(DefaultValueComboBox, new GridBagConstraints(2, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); LocalValueComboBox = new JComboBox(labels); LocalValueComboBox.setMaximumRowCount(3); contentPanel.add(LocalValueComboBox, new GridBagConstraints(2, 2, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); contentPanel.revalidate(); } else { DefaultValueComboBox.setVisible(false); LocalValueComboBox.setVisible(false); DefaultValueTextField.setVisible(true); LocalValueTextField.setVisible(true); warning1.setVisible(false); warning2.setVisible(false); } } public int getStep() { return steps; } }
bugs fixed git-svn-id: c334bb69c16d150e1b06e84516f7aa90b3181ca2@405357 13f79535-47bb-0310-9956-ffa450edef68
tools/configure/src/java/org/apache/lenya/config/impl/ConfigureGUI.java
bugs fixed
Java
apache-2.0
297c37faf10a206176294e573b3904fbddcdc26d
0
prachidamle/cattle,wlan0/cattle,sonchang/cattle,stresler/cattle,OnePaaS/cattle,rancher/cattle,rancher/cattle,ubiquityhosting/rancher_cattle,rancherio/cattle,cjellick/cattle,dx9/cattle,OnePaaS/cattle,prachidamle/cattle,sonchang/cattle,Cerfoglg/cattle,sonchang/cattle,prachidamle/cattle,OnePaaS/cattle,wlan0/cattle,rancherio/cattle,stresler/cattle,wlan0/cattle,vincent99/cattle,vincent99/cattle,cloudnautique/cattle,dx9/cattle,cloudnautique/cattle,kaos/cattle,dx9/cattle,cjellick/cattle,ubiquityhosting/rancher_cattle,ubiquityhosting/rancher_cattle,jimengliu/cattle,jimengliu/cattle,stresler/cattle,kaos/cattle,rancher/cattle,OnePaaS/cattle,prachidamle/cattle,cloudnautique/cattle,cjellick/cattle,ubiquityhosting/rancher_cattle,kaos/cattle,rancherio/cattle,cjellick/cattle,Cerfoglg/cattle,jimengliu/cattle,cloudnautique/cattle,sonchang/cattle,vincent99/cattle,dx9/cattle,Cerfoglg/cattle
package io.cattle.platform.servicediscovery.deployment.impl; import io.cattle.platform.allocator.constraint.AffinityConstraintDefinition.AffinityOps; import io.cattle.platform.allocator.constraint.ContainerLabelAffinityConstraint; import io.cattle.platform.core.constants.CommonStatesConstants; import io.cattle.platform.core.constants.InstanceConstants; import io.cattle.platform.core.model.Environment; import io.cattle.platform.core.model.Host; import io.cattle.platform.core.model.Instance; import io.cattle.platform.core.model.Service; import io.cattle.platform.docker.constants.DockerInstanceConstants; import io.cattle.platform.servicediscovery.api.constants.ServiceDiscoveryConstants; import io.cattle.platform.servicediscovery.api.util.ServiceDiscoveryUtil; import io.cattle.platform.servicediscovery.deployment.DeploymentUnitInstance; import io.cattle.platform.servicediscovery.deployment.DeploymentUnitInstanceIdGenerator; import io.cattle.platform.servicediscovery.deployment.InstanceUnit; import io.cattle.platform.servicediscovery.deployment.impl.DeploymentManagerImpl.DeploymentServiceContext; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; public class DeploymentUnit { public static class SidekickType { public static List<SidekickType> supportedTypes = new ArrayList<>(); public static final SidekickType DATA = new SidekickType(DockerInstanceConstants.FIELD_VOLUMES_FROM, ServiceDiscoveryConstants.FIELD_DATA_VOLUMES_LAUNCH_CONFIG, true); public static final SidekickType NETWORK = new SidekickType(DockerInstanceConstants.FIELD_NETWORK_CONTAINER_ID, ServiceDiscoveryConstants.FIELD_NETWORK_LAUNCH_CONFIG, false); public String launchConfigFieldName; public String launchConfigType; public boolean isList; public SidekickType(String launchConfigFieldName, String launchConfigType, boolean isList) { this.launchConfigFieldName = launchConfigFieldName; this.launchConfigType = launchConfigType; this.isList = isList; supportedTypes.add(this); } } String uuid; DeploymentServiceContext context; Map<String, String> unitLabels = new HashMap<>(); Map<Long, DeploymentUnitService> svc = new HashMap<>(); private static List<String> supportedUnitLabels = Arrays .asList(ServiceDiscoveryConstants.LABEL_SERVICE_REQUESTED_HOST_ID); public DeploymentUnit() { } /* * This constructor is called to add existing unit */ public DeploymentUnit(DeploymentServiceContext context, String uuid, List<Service> services, List<DeploymentUnitInstance> deploymentUnitInstances, Map<String, String> labels) { this(context, uuid, services); for (DeploymentUnitInstance instance : deploymentUnitInstances) { Service service = instance.getService(); DeploymentUnitService duService = svc.get(service.getId()); duService.addDeploymentInstance(instance.getLaunchConfigName(), instance); } setLabels(labels); } protected DeploymentUnit(DeploymentServiceContext context, String uuid, List<Service> services) { this.context = context; this.uuid = uuid; for (Service service : services) { this.svc.put(service.getId(), new DeploymentUnitService(service, ServiceDiscoveryUtil.getServiceLaunchConfigNames(service), context)); } } /* * this constructor is called to create a new unit */ public DeploymentUnit(DeploymentServiceContext context, List<Service> services, Map<String, String> labels) { this(context, UUID.randomUUID().toString(), services); setLabels(labels); } protected void setLabels(Map<String, String> labels) { if (labels != null) { for (String label : labels.keySet()) { if (supportedUnitLabels.contains(label)) { this.unitLabels.put(label, labels.get(label)); } } } } private void createMissingUnitInstances(Map<Long, DeploymentUnitInstanceIdGenerator> svcInstanceIdGenerator) { for (Long serviceId : svc.keySet()) { DeploymentUnitService duService = svc.get(serviceId); duService.createMissingInstances(svcInstanceIdGenerator.get(serviceId), uuid); } } public boolean isError() { /* * This should check for instances with an error transitioning state */ for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { if (instance.isError()) { return true; } } return false; } private boolean isHostActive() { for (DeploymentUnitInstance deployUnitInstance : getDeploymentUnitInstances()) { if (!(deployUnitInstance instanceof InstanceUnit)) { // external deployment units do not have instances return true; } Instance instance = ((InstanceUnit)deployUnitInstance).getInstance(); if (instance != null && instance.getId() != null) { // TODO: Performance-wise, this is really bad! Especially, since we already // know what host is going down from the host trigger. // Check whether this instance has been deployed and if so, what is the state of the // host? Host host = context.exposeMapDao.getHostForInstance(instance.getId()); if (host != null) { if (CommonStatesConstants.REMOVING.equals(host.getState()) || CommonStatesConstants.REMOVED.equals(host.getState()) || CommonStatesConstants.PURGING.equals(host.getState()) || CommonStatesConstants.PURGED.equals(host.getState())) { return false; } } } } return true; } public void remove() { /* * Delete all instances. This should be non-blocking (don't wait) */ for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { instance.remove(); } } public void cleanupUnit() { /* * Delete all the units having missing dependencies */ for (Long serviceId : svc.keySet()) { DeploymentUnitService duService = svc.get(serviceId); duService.cleanupInstancesWithMissingDependencies(); } } public void stop() { /* * stops all instances. This should be non-blocking (don't wait) */ for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { instance.stop(); } } public void start(Map<Long, DeploymentUnitInstanceIdGenerator> svcInstanceIdGenerator) { /* * Start the instances in the correct order depending on the volumes from. * Attempt to start things in parallel, but if not possible (like volumes-from) then start each service * sequentially. * * If there are three services but only two containers, create the third * * If one of the containers service health is bad, then create another one (but don't delete the existing). * */ createMissingUnitInstances(svcInstanceIdGenerator); boolean hasSidekicks = false; for (Long serviceId : svc.keySet()) { DeploymentUnitService duService = svc.get(serviceId); List<String> launchConfigNames = duService.getLaunchConfigNames(); if (launchConfigNames.size() > 1) { hasSidekicks = true; } for (String launchConfigName : launchConfigNames) { createInstance(launchConfigName, duService.getService()); } } // don't wait for instance allocate unless sidekicks are present if (hasSidekicks) { this.waitForAllocate(); } } protected void waitForAllocate() { for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { instance.waitForAllocate(); } } public void waitForStart(){ for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { instance.waitForStart(); } } protected DeploymentUnitInstance createInstance(String launchConfigName, Service service) { List<Integer> volumesFromInstanceIds = getSidekickContainersId(service, launchConfigName, SidekickType.DATA); List<Integer> networkContainerIds = getSidekickContainersId(service, launchConfigName, SidekickType.NETWORK); Integer networkContainerId = networkContainerIds.isEmpty() ? null : networkContainerIds.get(0); getDeploymentUnitInstance(service, launchConfigName).waitForNotTransitioning(); getDeploymentUnitInstance(service, launchConfigName) .createAndStart( populateDeployParams(getDeploymentUnitInstance(service, launchConfigName), volumesFromInstanceIds, networkContainerId)); return getDeploymentUnitInstance(service, launchConfigName); } @SuppressWarnings("unchecked") protected List<Integer> getSidekickContainersId(Service service, String launchConfigName, SidekickType sidekickType) { List<Integer> sidekickInstanceIds = new ArrayList<>(); Object sidekickInstances = ServiceDiscoveryUtil.getLaunchConfigObject(service, launchConfigName, sidekickType.launchConfigFieldName); if (sidekickInstances != null) { if (sidekickType.isList) { sidekickInstanceIds.addAll((List<Integer>)sidekickInstances); } else { sidekickInstanceIds.add((Integer) sidekickInstances); } } Object sidekicksLaunchConfigObj = ServiceDiscoveryUtil.getLaunchConfigObject(service, launchConfigName, sidekickType.launchConfigType); if (sidekicksLaunchConfigObj != null) { List<String> sidekicksLaunchConfigNames = new ArrayList<>(); if (sidekickType.isList) { sidekicksLaunchConfigNames.addAll((List<String>) sidekicksLaunchConfigObj); } else { sidekicksLaunchConfigNames.add(sidekicksLaunchConfigObj.toString()); } for (String sidekickLaunchConfigName : sidekicksLaunchConfigNames) { // check if the service is present in the service map (it can be referenced, but removed already) if (sidekickLaunchConfigName.toString().equalsIgnoreCase(service.getName())) { sidekickLaunchConfigName = ServiceDiscoveryConstants.PRIMARY_LAUNCH_CONFIG_NAME; } DeploymentUnitInstance sidekickUnitInstance = getDeploymentUnitInstance(service, sidekickLaunchConfigName.toString()); if (sidekickUnitInstance != null && sidekickUnitInstance instanceof InstanceUnit) { if (((InstanceUnit) sidekickUnitInstance).getInstance() == null) { // request new instance creation sidekickUnitInstance = createInstance(sidekickUnitInstance.getLaunchConfigName(), service); } // wait for start sidekickUnitInstance.createAndStart(new HashMap<String, Object>()); sidekickUnitInstance.waitForStart(); sidekickInstanceIds.add(((InstanceUnit) sidekickUnitInstance).getInstance().getId() .intValue()); } } } return sidekickInstanceIds; } public boolean isStarted() { for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { if (!instance.isStarted()) { return false; } } return true; } public boolean isHealthCheckInitializing() { for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { if (instance.isHealthCheckInitializing()) { return true; } } return false; } public boolean isUnhealthy() { // returns list of instances that need cleanup (having bad health) for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { if (instance.isUnhealthy()) { return true; } } if (!isHostActive()) { return true; } return false; } public boolean isComplete() { for (DeploymentUnitService duService : svc.values()) { if (!duService.isComplete()) { return false; } } return true; } protected Map<String, Object> populateDeployParams(DeploymentUnitInstance instance, List<Integer> volumesFromInstanceIds, Integer networkContainerId) { Map<String, Object> deployParams = new HashMap<>(); Map<String, String> instanceLabels = getLabels(instance); deployParams.put(InstanceConstants.FIELD_LABELS, instanceLabels); if (volumesFromInstanceIds != null && !volumesFromInstanceIds.isEmpty()) { deployParams.put(DockerInstanceConstants.FIELD_VOLUMES_FROM, volumesFromInstanceIds); } Object hostId = instanceLabels.get(ServiceDiscoveryConstants.LABEL_SERVICE_REQUESTED_HOST_ID); if (hostId != null) { deployParams.put(InstanceConstants.FIELD_REQUESTED_HOST_ID, hostId); } if (networkContainerId != null) { deployParams.put(DockerInstanceConstants.FIELD_NETWORK_CONTAINER_ID, networkContainerId); } return deployParams; } protected Map<String, String> getLabels(DeploymentUnitInstance instance) { Map<String, String> labels = new HashMap<>(); String serviceName = instance.getService().getName(); if (!ServiceDiscoveryConstants.PRIMARY_LAUNCH_CONFIG_NAME.equals(instance.getLaunchConfigName())) { serviceName = serviceName + '/' + instance.getLaunchConfigName(); } String envName = context.objectManager.loadResource(Environment.class, instance.getService().getEnvironmentId()) .getName(); labels.put(ServiceDiscoveryConstants.LABEL_STACK_NAME, envName); labels.put(ServiceDiscoveryConstants.LABEL_STACK_SERVICE_NAME, envName + "/" + serviceName); // LEGACY: keeping backwards compatibility with 'project' labels.put(ServiceDiscoveryConstants.LABEL_PROJECT_NAME, envName); labels.put(ServiceDiscoveryConstants.LABEL_PROJECT_SERVICE_NAME, envName + "/" + serviceName); /* * Put label 'io.rancher.deployment.unit=this.uuid' on each one. This way * we can reference a set of containers later. */ labels.put(ServiceDiscoveryConstants.LABEL_SERVICE_DEPLOYMENT_UNIT, uuid); /* * Put label with launch config name */ labels.put(ServiceDiscoveryConstants.LABEL_SERVICE_LAUNCH_CONFIG, instance.getLaunchConfigName()); /* * Put affinity constraint on every instance to let allocator know that they should go to the same host */ // TODO: Might change labels into a Multimap or add a service function to handle merging String containerLabelSoftAffinityKey = ContainerLabelAffinityConstraint.LABEL_HEADER_AFFINITY_CONTAINER_LABEL + AffinityOps.SOFT_EQ.getLabelSymbol(); labels.put(containerLabelSoftAffinityKey, ServiceDiscoveryConstants.LABEL_SERVICE_DEPLOYMENT_UNIT + "=" + this.uuid); labels.putAll(this.unitLabels); return labels; } public Map<String, String> getLabels() { return unitLabels; } protected List<DeploymentUnitInstance> getDeploymentUnitInstances() { List<DeploymentUnitInstance> instances = new ArrayList<>(); for (Long serviceId : svc.keySet()) { DeploymentUnitService duService = svc.get(serviceId); instances.addAll(duService.getInstances()); } return instances; } protected DeploymentUnitInstance getDeploymentUnitInstance(Service service, String launchConfigName) { DeploymentUnitService duService = svc.get(service.getId()); return duService.getInstance(launchConfigName); } }
code/iaas/service-discovery/server/src/main/java/io/cattle/platform/servicediscovery/deployment/impl/DeploymentUnit.java
package io.cattle.platform.servicediscovery.deployment.impl; import io.cattle.platform.allocator.constraint.AffinityConstraintDefinition.AffinityOps; import io.cattle.platform.allocator.constraint.ContainerLabelAffinityConstraint; import io.cattle.platform.core.constants.CommonStatesConstants; import io.cattle.platform.core.constants.InstanceConstants; import io.cattle.platform.core.model.Environment; import io.cattle.platform.core.model.Host; import io.cattle.platform.core.model.Instance; import io.cattle.platform.core.model.Service; import io.cattle.platform.docker.constants.DockerInstanceConstants; import io.cattle.platform.servicediscovery.api.constants.ServiceDiscoveryConstants; import io.cattle.platform.servicediscovery.api.util.ServiceDiscoveryUtil; import io.cattle.platform.servicediscovery.deployment.DeploymentUnitInstance; import io.cattle.platform.servicediscovery.deployment.DeploymentUnitInstanceIdGenerator; import io.cattle.platform.servicediscovery.deployment.InstanceUnit; import io.cattle.platform.servicediscovery.deployment.impl.DeploymentManagerImpl.DeploymentServiceContext; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; public class DeploymentUnit { public static class SidekickType { public static List<SidekickType> supportedTypes = new ArrayList<>(); public static final SidekickType DATA = new SidekickType(DockerInstanceConstants.FIELD_VOLUMES_FROM, ServiceDiscoveryConstants.FIELD_DATA_VOLUMES_LAUNCH_CONFIG, true); public static final SidekickType NETWORK = new SidekickType(DockerInstanceConstants.FIELD_NETWORK_CONTAINER_ID, ServiceDiscoveryConstants.FIELD_NETWORK_LAUNCH_CONFIG, false); public String launchConfigFieldName; public String launchConfigType; public boolean isList; public SidekickType(String launchConfigFieldName, String launchConfigType, boolean isList) { this.launchConfigFieldName = launchConfigFieldName; this.launchConfigType = launchConfigType; this.isList = isList; supportedTypes.add(this); } } String uuid; DeploymentServiceContext context; Map<String, String> unitLabels = new HashMap<>(); Map<Long, DeploymentUnitService> svc = new HashMap<>(); private static List<String> supportedUnitLabels = Arrays .asList(ServiceDiscoveryConstants.LABEL_SERVICE_REQUESTED_HOST_ID); public DeploymentUnit() { } /* * This constructor is called to add existing unit */ public DeploymentUnit(DeploymentServiceContext context, String uuid, List<Service> services, List<DeploymentUnitInstance> deploymentUnitInstances, Map<String, String> labels) { this(context, uuid, services); for (DeploymentUnitInstance instance : deploymentUnitInstances) { Service service = instance.getService(); DeploymentUnitService duService = svc.get(service.getId()); duService.addDeploymentInstance(instance.getLaunchConfigName(), instance); } setLabels(labels); } protected DeploymentUnit(DeploymentServiceContext context, String uuid, List<Service> services) { this.context = context; this.uuid = uuid; for (Service service : services) { this.svc.put(service.getId(), new DeploymentUnitService(service, ServiceDiscoveryUtil.getServiceLaunchConfigNames(service), context)); } } /* * this constructor is called to create a new unit */ public DeploymentUnit(DeploymentServiceContext context, List<Service> services, Map<String, String> labels) { this(context, UUID.randomUUID().toString(), services); setLabels(labels); } protected void setLabels(Map<String, String> labels) { if (labels != null) { for (String label : labels.keySet()) { if (supportedUnitLabels.contains(label)) { this.unitLabels.put(label, labels.get(label)); } } } } private void createMissingUnitInstances(Map<Long, DeploymentUnitInstanceIdGenerator> svcInstanceIdGenerator) { for (Long serviceId : svc.keySet()) { DeploymentUnitService duService = svc.get(serviceId); duService.createMissingInstances(svcInstanceIdGenerator.get(serviceId), uuid); } } public boolean isError() { /* * This should check for instances with an error transitioning state */ for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { if (instance.isError()) { return true; } } return false; } private boolean isHostActive() { for (DeploymentUnitInstance deployUnitInstance : getDeploymentUnitInstances()) { if (!(deployUnitInstance instanceof InstanceUnit)) { // external deployment units do not have instances return true; } Instance instance = ((InstanceUnit)deployUnitInstance).getInstance(); if (instance != null && instance.getId() != null) { // TODO: Performance-wise, this is really bad! Especially, since we already // know what host is going down from the host trigger. // Check whether this instance has been deployed and if so, what is the state of the // host? Host host = context.exposeMapDao.getHostForInstance(instance.getId()); if (host != null) { if (CommonStatesConstants.REMOVING.equals(host.getState()) || CommonStatesConstants.REMOVED.equals(host.getState()) || CommonStatesConstants.PURGING.equals(host.getState()) || CommonStatesConstants.PURGED.equals(host.getState())) { return false; } } } } return true; } public void remove() { /* * Delete all instances. This should be non-blocking (don't wait) */ for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { instance.remove(); } } public void cleanupUnit() { /* * Delete all the units having missing dependencies */ for (Long serviceId : svc.keySet()) { DeploymentUnitService duService = svc.get(serviceId); duService.cleanupInstancesWithMissingDependencies(); } } public void stop() { /* * stops all instances. This should be non-blocking (don't wait) */ for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { instance.stop(); } } public void start(Map<Long, DeploymentUnitInstanceIdGenerator> svcInstanceIdGenerator) { /* * Start the instances in the correct order depending on the volumes from. * Attempt to start things in parallel, but if not possible (like volumes-from) then start each service * sequentially. * * If there are three services but only two containers, create the third * * If one of the containers service health is bad, then create another one (but don't delete the existing). * */ createMissingUnitInstances(svcInstanceIdGenerator); for (Long serviceId : svc.keySet()) { DeploymentUnitService duService = svc.get(serviceId); for (String launchConfigName : duService.getLaunchConfigNames()) { createInstance(launchConfigName, duService.getService()); } } this.waitForAllocate(); } protected void waitForAllocate() { for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { instance.waitForAllocate(); } } public void waitForStart(){ for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { instance.waitForStart(); } } protected DeploymentUnitInstance createInstance(String launchConfigName, Service service) { List<Integer> volumesFromInstanceIds = getSidekickContainersId(service, launchConfigName, SidekickType.DATA); List<Integer> networkContainerIds = getSidekickContainersId(service, launchConfigName, SidekickType.NETWORK); Integer networkContainerId = networkContainerIds.isEmpty() ? null : networkContainerIds.get(0); getDeploymentUnitInstance(service, launchConfigName).waitForNotTransitioning(); getDeploymentUnitInstance(service, launchConfigName) .createAndStart( populateDeployParams(getDeploymentUnitInstance(service, launchConfigName), volumesFromInstanceIds, networkContainerId)); return getDeploymentUnitInstance(service, launchConfigName); } @SuppressWarnings("unchecked") protected List<Integer> getSidekickContainersId(Service service, String launchConfigName, SidekickType sidekickType) { List<Integer> sidekickInstanceIds = new ArrayList<>(); Object sidekickInstances = ServiceDiscoveryUtil.getLaunchConfigObject(service, launchConfigName, sidekickType.launchConfigFieldName); if (sidekickInstances != null) { if (sidekickType.isList) { sidekickInstanceIds.addAll((List<Integer>)sidekickInstances); } else { sidekickInstanceIds.add((Integer) sidekickInstances); } } Object sidekicksLaunchConfigObj = ServiceDiscoveryUtil.getLaunchConfigObject(service, launchConfigName, sidekickType.launchConfigType); if (sidekicksLaunchConfigObj != null) { List<String> sidekicksLaunchConfigNames = new ArrayList<>(); if (sidekickType.isList) { sidekicksLaunchConfigNames.addAll((List<String>) sidekicksLaunchConfigObj); } else { sidekicksLaunchConfigNames.add(sidekicksLaunchConfigObj.toString()); } for (String sidekickLaunchConfigName : sidekicksLaunchConfigNames) { // check if the service is present in the service map (it can be referenced, but removed already) if (sidekickLaunchConfigName.toString().equalsIgnoreCase(service.getName())) { sidekickLaunchConfigName = ServiceDiscoveryConstants.PRIMARY_LAUNCH_CONFIG_NAME; } DeploymentUnitInstance sidekickUnitInstance = getDeploymentUnitInstance(service, sidekickLaunchConfigName.toString()); if (sidekickUnitInstance != null && sidekickUnitInstance instanceof InstanceUnit) { if (((InstanceUnit) sidekickUnitInstance).getInstance() == null) { // request new instance creation sidekickUnitInstance = createInstance(sidekickUnitInstance.getLaunchConfigName(), service); } // wait for start sidekickUnitInstance.createAndStart(new HashMap<String, Object>()); sidekickUnitInstance.waitForStart(); sidekickInstanceIds.add(((InstanceUnit) sidekickUnitInstance).getInstance().getId() .intValue()); } } } return sidekickInstanceIds; } public boolean isStarted() { for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { if (!instance.isStarted()) { return false; } } return true; } public boolean isHealthCheckInitializing() { for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { if (instance.isHealthCheckInitializing()) { return true; } } return false; } public boolean isUnhealthy() { // returns list of instances that need cleanup (having bad health) for (DeploymentUnitInstance instance : getDeploymentUnitInstances()) { if (instance.isUnhealthy()) { return true; } } if (!isHostActive()) { return true; } return false; } public boolean isComplete() { for (DeploymentUnitService duService : svc.values()) { if (!duService.isComplete()) { return false; } } return true; } protected Map<String, Object> populateDeployParams(DeploymentUnitInstance instance, List<Integer> volumesFromInstanceIds, Integer networkContainerId) { Map<String, Object> deployParams = new HashMap<>(); Map<String, String> instanceLabels = getLabels(instance); deployParams.put(InstanceConstants.FIELD_LABELS, instanceLabels); if (volumesFromInstanceIds != null && !volumesFromInstanceIds.isEmpty()) { deployParams.put(DockerInstanceConstants.FIELD_VOLUMES_FROM, volumesFromInstanceIds); } Object hostId = instanceLabels.get(ServiceDiscoveryConstants.LABEL_SERVICE_REQUESTED_HOST_ID); if (hostId != null) { deployParams.put(InstanceConstants.FIELD_REQUESTED_HOST_ID, hostId); } if (networkContainerId != null) { deployParams.put(DockerInstanceConstants.FIELD_NETWORK_CONTAINER_ID, networkContainerId); } return deployParams; } protected Map<String, String> getLabels(DeploymentUnitInstance instance) { Map<String, String> labels = new HashMap<>(); String serviceName = instance.getService().getName(); if (!ServiceDiscoveryConstants.PRIMARY_LAUNCH_CONFIG_NAME.equals(instance.getLaunchConfigName())) { serviceName = serviceName + '/' + instance.getLaunchConfigName(); } String envName = context.objectManager.loadResource(Environment.class, instance.getService().getEnvironmentId()) .getName(); labels.put(ServiceDiscoveryConstants.LABEL_STACK_NAME, envName); labels.put(ServiceDiscoveryConstants.LABEL_STACK_SERVICE_NAME, envName + "/" + serviceName); // LEGACY: keeping backwards compatibility with 'project' labels.put(ServiceDiscoveryConstants.LABEL_PROJECT_NAME, envName); labels.put(ServiceDiscoveryConstants.LABEL_PROJECT_SERVICE_NAME, envName + "/" + serviceName); /* * Put label 'io.rancher.deployment.unit=this.uuid' on each one. This way * we can reference a set of containers later. */ labels.put(ServiceDiscoveryConstants.LABEL_SERVICE_DEPLOYMENT_UNIT, uuid); /* * Put label with launch config name */ labels.put(ServiceDiscoveryConstants.LABEL_SERVICE_LAUNCH_CONFIG, instance.getLaunchConfigName()); /* * Put affinity constraint on every instance to let allocator know that they should go to the same host */ // TODO: Might change labels into a Multimap or add a service function to handle merging String containerLabelSoftAffinityKey = ContainerLabelAffinityConstraint.LABEL_HEADER_AFFINITY_CONTAINER_LABEL + AffinityOps.SOFT_EQ.getLabelSymbol(); labels.put(containerLabelSoftAffinityKey, ServiceDiscoveryConstants.LABEL_SERVICE_DEPLOYMENT_UNIT + "=" + this.uuid); labels.putAll(this.unitLabels); return labels; } public Map<String, String> getLabels() { return unitLabels; } protected List<DeploymentUnitInstance> getDeploymentUnitInstances() { List<DeploymentUnitInstance> instances = new ArrayList<>(); for (Long serviceId : svc.keySet()) { DeploymentUnitService duService = svc.get(serviceId); instances.addAll(duService.getInstances()); } return instances; } protected DeploymentUnitInstance getDeploymentUnitInstance(Service service, String launchConfigName) { DeploymentUnitService duService = svc.get(service.getId()); return duService.getInstance(launchConfigName); } }
service.activate: don't wait for instance allocate unless service has sidekicks
code/iaas/service-discovery/server/src/main/java/io/cattle/platform/servicediscovery/deployment/impl/DeploymentUnit.java
service.activate: don't wait for instance allocate
Java
bsd-3-clause
f2a1d479ce26353c401d66d6f93d10eb233c858d
0
bennn/geogig,smesdaghi/geogig,smesdaghi/geogig,bennn/geogig,bennn/geogig,smesdaghi/geogig,msieger/geogig,mtCarto/geogig,jodygarnett/GeoGig,marcusthebrown/geogig,msieger/geogig,jodygarnett/GeoGig,msieger/geogig,tsauerwein/geogig,marcusthebrown/geogig,mtCarto/geogig,tsauerwein/geogig,tsauerwein/geogig,jdgarrett/geogig,mtCarto/geogig,marcusthebrown/geogig,jdgarrett/geogig,jdgarrett/geogig,jodygarnett/GeoGig
/* Copyright (c) 2013 Boundless and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/org/documents/edl-v10.html * * Contributors: * Victor Olaya (Boundless) - initial implementation */ package org.locationtech.geogig.osm.internal.history; import static com.google.common.base.Optional.fromNullable; import static com.google.common.base.Preconditions.checkArgument; import static javax.xml.stream.XMLStreamConstants.END_DOCUMENT; import static javax.xml.stream.XMLStreamConstants.END_ELEMENT; import static javax.xml.stream.XMLStreamConstants.START_ELEMENT; import static org.locationtech.geogig.osm.internal.history.ParsingUtils.parseDateTime; import java.io.InputStream; import java.util.Iterator; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import com.google.common.collect.AbstractIterator; import com.google.common.collect.ImmutableSet; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.Point; /** * Example changeset download: * * <pre> * <code> * <?xml version="1.0" encoding="UTF-8"?> * <osmChange version="0.6" generator="Example" copyright="GeoGig contributors" attribution="http://geogig.org" license="http://geogig.org"> * <create> * <node id="1234" lat="50.3" lon="16.0" changeset="1" user="fred" uid="1" visible="true" timestamp="2014-09-04T03:30:00Z" version="1"> * <tag k="name" v="Tag"/> * <tag k="amenity" v="restaurant"/> * </node> * </create> * </osmChange> * </code> * </pre> */ class ChangesetContentsScanner { private static ImmutableSet<String> CHANGE_TAGS = ImmutableSet.of( Change.Type.create.toString(), Change.Type.modify.toString(), Change.Type.delete.toString()); private static ImmutableSet<String> PRIMITIVE_TAGS = ImmutableSet.of("node", "way", "relation"); private static final GeometryFactory GEOMFACT = new GeometryFactory(); public Iterator<Change> parse(InputStream changesetDownloadStream) throws XMLStreamException { final XMLStreamReader reader; reader = XMLInputFactory.newFactory().createXMLStreamReader(changesetDownloadStream, "UTF-8"); // position reader at first change, if any reader.nextTag(); reader.require(START_ELEMENT, null, "osmChange"); Iterator<Change> iterator = new AbstractIterator<Change>() { @Override protected Change computeNext() { Change next; try { if (findNextChange(reader)) { next = parseChange(reader); } else { return super.endOfData(); } } catch (XMLStreamException e) { System.err.println("Error parsing change, ignoring and continuing " + "with next change if possible. " + e.getMessage()); next = computeNext(); } return next; } }; return iterator; } private boolean findNextChange(XMLStreamReader reader) throws XMLStreamException { int eventType = reader.getEventType(); do { if (eventType == START_ELEMENT) { String tag = reader.getLocalName(); if (CHANGE_TAGS.contains(tag)) { return true; } } reader.next(); eventType = reader.getEventType(); } while (eventType != END_DOCUMENT); return false; } /** * Example changeset: * * <pre> * <code> * <?xml version="1.0" encoding="UTF-8"?> * <osmChange version="0.6" generator="OpenStreetMap server" copyright="OpenStreetMap and contributors" attribution="http://www.openstreetmap.org/copyright" license="http://opendatacommons.org/licenses/odbl/1-0/"> * <create> * <relation id="4386" visible="true" timestamp="2009-11-21T09:02:09Z" user="jttt" uid="48" version="1" changeset="1864"> * <tag k="type" v="aoeua"/> * </relation> * </create> * <modify> * <relation id="4385" visible="true" timestamp="2009-11-21T09:05:32Z" user="jttt" uid="48" version="2" changeset="1864"> * <member type="relation" ref="4384" role=""/> * <tag k="type" v="parent"/> * </relation> * </modify> * <delete> * <way id="49391" visible="false" timestamp="2009-11-21T09:05:32Z" user="jttt" uid="48" version="3" changeset="1864"/> * </delete> * <delete> * <node id="1082496" changeset="1864" user="jttt" uid="48" visible="false" timestamp="2009-11-21T09:05:57Z" version="2"/> * </delete> * <create> * <way id="49393" visible="true" timestamp="2009-11-21T09:12:53Z" user="jttt" uid="48" version="1" changeset="1864"> * <nd ref="1082500"/> * <nd ref="1082501"/> * <nd ref="1082502"/> * <nd ref="1082503"/> * </way> * </create> * <modify> * <relation id="4394" visible="true" timestamp="2009-11-21T09:21:31Z" user="jttt" uid="48" version="2" changeset="1864"> * <member type="relation" ref="4393" role=""/> * <member type="relation" ref="4395" role=""/> * <member type="relation" ref="4396" role=""/> * <tag k="eouaoeu" v="oeueaoeu"/> * </relation> * </modify> <create> * <relation id="4390" visible="true" timestamp="2009-11-21T09:12:53Z" user="jttt" uid="48" version="1" changeset="1864"> * <tag k="type" v="aeou"/> * </relation> * </create> * <create> * <relation id="4391" visible="true" timestamp="2009-11-21T09:12:53Z" user="jttt" uid="48" version="1" changeset="1864"> * <tag k="type" v="aoeuau"/> * </relation> * </create> * <create> * <relation id="4392" visible="true" timestamp="2009-11-21T09:13:55Z" user="jttt" uid="48" version="1" changeset="1864"> * <tag k="type" v="aeou"/> * </relation> * </create> * <create> * <relation id="4393" visible="true" timestamp="2009-11-21T09:18:06Z" user="jttt" uid="48" version="1" changeset="1864"> * <member type="way" ref="49393" role=""/> * <tag k="type" v="aeua"/> * </relation> * </create> * <create> * <relation id="4394" visible="true" timestamp="2009-11-21T09:18:49Z" user="jttt" uid="48" version="1" changeset="1864"> * <member type="relation" ref="4393" role=""/> * <tag k="eouaoeu" v="oeueaoeu"/> * </relation> * </create> * </osmChange> * </code> * </pre> */ private Change parseChange(XMLStreamReader reader) throws XMLStreamException { reader.require(START_ELEMENT, null, null); final String changeName = reader.getLocalName(); checkArgument(CHANGE_TAGS.contains(changeName)); final Change.Type type = Change.Type.valueOf(reader.getLocalName()); reader.nextTag(); reader.require(START_ELEMENT, null, null); final String primitiveName = reader.getLocalName(); checkArgument(PRIMITIVE_TAGS.contains(primitiveName)); Primitive primitive = parsePrimitive(reader); reader.require(END_ELEMENT, null, primitiveName); reader.nextTag(); reader.require(END_ELEMENT, null, changeName); Change change = new Change(type, primitive); return change; } Primitive parsePrimitive(XMLStreamReader reader) throws XMLStreamException { reader.require(START_ELEMENT, null, null); final String primitiveName = reader.getLocalName(); checkArgument(PRIMITIVE_TAGS.contains(primitiveName)); Primitive primitive = inferrPrimitive(reader); primitive.setId(Long.valueOf(reader.getAttributeValue(null, "id"))); primitive.setVisible(Boolean.valueOf(reader.getAttributeValue(null, "visible"))); primitive.setTimestamp(parseDateTime(reader.getAttributeValue(null, "timestamp"))); primitive.setUserName(reader.getAttributeValue(null, "user")); Long uid = Long.valueOf(fromNullable(reader.getAttributeValue(null, "uid")).or("-1")); primitive.setUserId(uid); Integer version = Integer.valueOf(fromNullable(reader.getAttributeValue(null, "version")) .or("1")); primitive.setVersion(version); primitive.setChangesetId(Long.valueOf(reader.getAttributeValue(null, "changeset"))); if (primitive instanceof Node) { Node node = (Node) primitive; String lat = reader.getAttributeValue(null, "lat"); String lon = reader.getAttributeValue(null, "lon"); // may be null in case of a delete change if (lat != null && lon != null) { double x = Double.valueOf(lon); double y = Double.valueOf(lat); Point location = GEOMFACT.createPoint(new Coordinate(x, y)); node.setLocation(location); } parseNodeContents(node, reader); } else if (primitive instanceof Way) { Way way = (Way) primitive; parseWayContents(way, reader); } else { Relation relation = (Relation) primitive; parseRelationContents(relation, reader); } reader.require(END_ELEMENT, null, primitiveName); return primitive; } /** * @param node * @param reader * @throws XMLStreamException */ private void parseNodeContents(Node node, XMLStreamReader reader) throws XMLStreamException { while (true) { int tag = reader.next(); if (tag == END_ELEMENT) { String tagName = reader.getLocalName(); if (tagName.equals("node")) { break; } } else if (tag == START_ELEMENT) { String tagName = reader.getLocalName(); if ("tag".equals(tagName)) { parseTag(node, reader); reader.require(END_ELEMENT, null, "tag"); } } else if (tag == END_DOCUMENT) { throw new IllegalStateException("premature end of document"); } } reader.require(END_ELEMENT, null, "node"); } /** * @param way * @param reader * @throws XMLStreamException */ private void parseWayContents(Way way, XMLStreamReader reader) throws XMLStreamException { reader.require(START_ELEMENT, null, "way"); while (true) { int tag = reader.next(); if (tag == END_ELEMENT) { String tagName = reader.getLocalName(); if (tagName.equals("way")) { break; } } else if (tag == START_ELEMENT) { String tagName = reader.getLocalName(); if ("tag".equals(tagName)) { parseTag(way, reader); reader.require(END_ELEMENT, null, "tag"); } else if ("nd".equals(tagName)) { long nodeRef = Long.valueOf(reader.getAttributeValue(null, "ref")); reader.nextTag(); reader.require(END_ELEMENT, null, "nd"); way.addNode(nodeRef); } } else if (tag == END_DOCUMENT) { throw new IllegalStateException("premature end of document"); } } reader.require(END_ELEMENT, null, "way"); } /** * @param relation * @param reader * @throws XMLStreamException */ private void parseRelationContents(Relation relation, XMLStreamReader reader) throws XMLStreamException { reader.require(START_ELEMENT, null, "relation"); while (true) { int tag = reader.next(); if (tag == END_ELEMENT) { String tagName = reader.getLocalName(); if (tagName.equals("relation")) { break; } } else if (tag == START_ELEMENT) { String tagName = reader.getLocalName(); if ("tag".equals(tagName)) { parseTag(relation, reader); reader.require(END_ELEMENT, null, "tag"); } else if ("member".equals(tagName)) { String type = reader.getAttributeValue(null, "type"); long ref = Long.valueOf(reader.getAttributeValue(null, "ref")); String role = reader.getAttributeValue(null, "role"); if ("".equals(role)) { role = null; } reader.nextTag(); reader.require(END_ELEMENT, null, "member"); Relation.Member member = new Relation.Member(type, ref, role); relation.addMember(member); } } else if (tag == END_DOCUMENT) { throw new IllegalStateException("premature end of document"); } } reader.require(END_ELEMENT, null, "relation"); } private void parseTag(Primitive primitive, XMLStreamReader reader) throws XMLStreamException { reader.require(START_ELEMENT, null, "tag"); String key = reader.getAttributeValue(null, "k"); String value = reader.getAttributeValue(null, "v"); primitive.getTags().put(key, value); reader.nextTag(); reader.require(END_ELEMENT, null, "tag"); } /** * @param reader * @return */ private Primitive inferrPrimitive(XMLStreamReader reader) { final String primitiveName = reader.getLocalName(); if ("node".equals(primitiveName)) { return new Node(); } else if ("way".equals(primitiveName)) { return new Way(); } else if ("relation".equals(primitiveName)) { return new Relation(); } throw new IllegalArgumentException("Unknown primitive tag: " + primitiveName); } }
src/osm/src/main/java/org/locationtech/geogig/osm/internal/history/ChangesetContentsScanner.java
/* Copyright (c) 2013 Boundless and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/org/documents/edl-v10.html * * Contributors: * Victor Olaya (Boundless) - initial implementation */ package org.locationtech.geogig.osm.internal.history; import static com.google.common.base.Optional.fromNullable; import static com.google.common.base.Preconditions.checkArgument; import static javax.xml.stream.XMLStreamConstants.END_DOCUMENT; import static javax.xml.stream.XMLStreamConstants.END_ELEMENT; import static javax.xml.stream.XMLStreamConstants.START_ELEMENT; import static org.locationtech.geogig.osm.internal.history.ParsingUtils.parseDateTime; import java.io.InputStream; import java.util.Iterator; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import com.google.common.collect.AbstractIterator; import com.google.common.collect.ImmutableSet; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.Point; /** * Example changeset download: * * <pre> * <code> * <?xml version="1.0" encoding="UTF-8"?> * <osmChange version="0.6" generator="OpenStreetMap server" copyright="OpenStreetMap and contributors" attribution="http://www.openstreetmap.org/copyright" license="http://opendatacommons.org/licenses/odbl/1-0/"> * <create> * <node id="968842" lat="48.2127428" lon="16.3521806" changeset="1624" user="zsebastian" uid="46" visible="true" timestamp="2009-10-31T21:41:32Z" version="1"> * <tag k="name" v="Bar Kino"/> * <tag k="amenity" v="cinema"/> * </node> * </create> * <modify> * <node id="969082" lat="48.2127428" lon="16.351245" changeset="1624" user="zsebastian" uid="46" visible="true" timestamp="2009-10-31T21:57:04Z" version="2"> * <tag k="amenity" v="restaurant"/> * </node> * </modify> * <modify> * <node id="969085" lat="48.213269" lon="16.3512021" changeset="1624" user="zsebastian" uid="46" visible="true" timestamp="2009-10-31T22:22:40Z" version="2"> * <tag k="name" v="Blah Pizza"/> * <tag k="amenity" v="restaurant"/> * </node> * </modify> * <delete> * <node id="969084" changeset="1624" user="zsebastian" uid="46" visible="false" timestamp="2009-10-31T22:22:40Z" version="2"> * <tag k="name" v="Foo Inn"/> * <tag k="amenity" v="restaurant"/> * </node> * </delete> * </osmChange> * </code> * </pre> */ class ChangesetContentsScanner { private static ImmutableSet<String> CHANGE_TAGS = ImmutableSet.of( Change.Type.create.toString(), Change.Type.modify.toString(), Change.Type.delete.toString()); private static ImmutableSet<String> PRIMITIVE_TAGS = ImmutableSet.of("node", "way", "relation"); private static final GeometryFactory GEOMFACT = new GeometryFactory(); public Iterator<Change> parse(InputStream changesetDownloadStream) throws XMLStreamException { final XMLStreamReader reader; reader = XMLInputFactory.newFactory().createXMLStreamReader(changesetDownloadStream, "UTF-8"); // position reader at first change, if any reader.nextTag(); reader.require(START_ELEMENT, null, "osmChange"); Iterator<Change> iterator = new AbstractIterator<Change>() { @Override protected Change computeNext() { Change next; try { if (findNextChange(reader)) { next = parseChange(reader); } else { return super.endOfData(); } } catch (XMLStreamException e) { System.err.println("Error parsing change, ignoring and continuing " + "with next change if possible. " + e.getMessage()); next = computeNext(); } return next; } }; return iterator; } private boolean findNextChange(XMLStreamReader reader) throws XMLStreamException { int eventType = reader.getEventType(); do { if (eventType == START_ELEMENT) { String tag = reader.getLocalName(); if (CHANGE_TAGS.contains(tag)) { return true; } } reader.next(); eventType = reader.getEventType(); } while (eventType != END_DOCUMENT); return false; } /** * Example changeset: * * <pre> * <code> * <?xml version="1.0" encoding="UTF-8"?> * <osmChange version="0.6" generator="OpenStreetMap server" copyright="OpenStreetMap and contributors" attribution="http://www.openstreetmap.org/copyright" license="http://opendatacommons.org/licenses/odbl/1-0/"> * <create> * <relation id="4386" visible="true" timestamp="2009-11-21T09:02:09Z" user="jttt" uid="48" version="1" changeset="1864"> * <tag k="type" v="aoeua"/> * </relation> * </create> * <modify> * <relation id="4385" visible="true" timestamp="2009-11-21T09:05:32Z" user="jttt" uid="48" version="2" changeset="1864"> * <member type="relation" ref="4384" role=""/> * <tag k="type" v="parent"/> * </relation> * </modify> * <delete> * <way id="49391" visible="false" timestamp="2009-11-21T09:05:32Z" user="jttt" uid="48" version="3" changeset="1864"/> * </delete> * <delete> * <node id="1082496" changeset="1864" user="jttt" uid="48" visible="false" timestamp="2009-11-21T09:05:57Z" version="2"/> * </delete> * <create> * <way id="49393" visible="true" timestamp="2009-11-21T09:12:53Z" user="jttt" uid="48" version="1" changeset="1864"> * <nd ref="1082500"/> * <nd ref="1082501"/> * <nd ref="1082502"/> * <nd ref="1082503"/> * </way> * </create> * <modify> * <relation id="4394" visible="true" timestamp="2009-11-21T09:21:31Z" user="jttt" uid="48" version="2" changeset="1864"> * <member type="relation" ref="4393" role=""/> * <member type="relation" ref="4395" role=""/> * <member type="relation" ref="4396" role=""/> * <tag k="eouaoeu" v="oeueaoeu"/> * </relation> * </modify> <create> * <relation id="4390" visible="true" timestamp="2009-11-21T09:12:53Z" user="jttt" uid="48" version="1" changeset="1864"> * <tag k="type" v="aeou"/> * </relation> * </create> * <create> * <relation id="4391" visible="true" timestamp="2009-11-21T09:12:53Z" user="jttt" uid="48" version="1" changeset="1864"> * <tag k="type" v="aoeuau"/> * </relation> * </create> * <create> * <relation id="4392" visible="true" timestamp="2009-11-21T09:13:55Z" user="jttt" uid="48" version="1" changeset="1864"> * <tag k="type" v="aeou"/> * </relation> * </create> * <create> * <relation id="4393" visible="true" timestamp="2009-11-21T09:18:06Z" user="jttt" uid="48" version="1" changeset="1864"> * <member type="way" ref="49393" role=""/> * <tag k="type" v="aeua"/> * </relation> * </create> * <create> * <relation id="4394" visible="true" timestamp="2009-11-21T09:18:49Z" user="jttt" uid="48" version="1" changeset="1864"> * <member type="relation" ref="4393" role=""/> * <tag k="eouaoeu" v="oeueaoeu"/> * </relation> * </create> * </osmChange> * </code> * </pre> */ private Change parseChange(XMLStreamReader reader) throws XMLStreamException { reader.require(START_ELEMENT, null, null); final String changeName = reader.getLocalName(); checkArgument(CHANGE_TAGS.contains(changeName)); final Change.Type type = Change.Type.valueOf(reader.getLocalName()); reader.nextTag(); reader.require(START_ELEMENT, null, null); final String primitiveName = reader.getLocalName(); checkArgument(PRIMITIVE_TAGS.contains(primitiveName)); Primitive primitive = parsePrimitive(reader); reader.require(END_ELEMENT, null, primitiveName); reader.nextTag(); reader.require(END_ELEMENT, null, changeName); Change change = new Change(type, primitive); return change; } Primitive parsePrimitive(XMLStreamReader reader) throws XMLStreamException { reader.require(START_ELEMENT, null, null); final String primitiveName = reader.getLocalName(); checkArgument(PRIMITIVE_TAGS.contains(primitiveName)); Primitive primitive = inferrPrimitive(reader); primitive.setId(Long.valueOf(reader.getAttributeValue(null, "id"))); primitive.setVisible(Boolean.valueOf(reader.getAttributeValue(null, "visible"))); primitive.setTimestamp(parseDateTime(reader.getAttributeValue(null, "timestamp"))); primitive.setUserName(reader.getAttributeValue(null, "user")); Long uid = Long.valueOf(fromNullable(reader.getAttributeValue(null, "uid")).or("-1")); primitive.setUserId(uid); Integer version = Integer.valueOf(fromNullable(reader.getAttributeValue(null, "version")) .or("1")); primitive.setVersion(version); primitive.setChangesetId(Long.valueOf(reader.getAttributeValue(null, "changeset"))); if (primitive instanceof Node) { Node node = (Node) primitive; String lat = reader.getAttributeValue(null, "lat"); String lon = reader.getAttributeValue(null, "lon"); // may be null in case of a delete change if (lat != null && lon != null) { double x = Double.valueOf(lon); double y = Double.valueOf(lat); Point location = GEOMFACT.createPoint(new Coordinate(x, y)); node.setLocation(location); } parseNodeContents(node, reader); } else if (primitive instanceof Way) { Way way = (Way) primitive; parseWayContents(way, reader); } else { Relation relation = (Relation) primitive; parseRelationContents(relation, reader); } reader.require(END_ELEMENT, null, primitiveName); return primitive; } /** * @param node * @param reader * @throws XMLStreamException */ private void parseNodeContents(Node node, XMLStreamReader reader) throws XMLStreamException { while (true) { int tag = reader.next(); if (tag == END_ELEMENT) { String tagName = reader.getLocalName(); if (tagName.equals("node")) { break; } } else if (tag == START_ELEMENT) { String tagName = reader.getLocalName(); if ("tag".equals(tagName)) { parseTag(node, reader); reader.require(END_ELEMENT, null, "tag"); } } else if (tag == END_DOCUMENT) { throw new IllegalStateException("premature end of document"); } } reader.require(END_ELEMENT, null, "node"); } /** * @param way * @param reader * @throws XMLStreamException */ private void parseWayContents(Way way, XMLStreamReader reader) throws XMLStreamException { reader.require(START_ELEMENT, null, "way"); while (true) { int tag = reader.next(); if (tag == END_ELEMENT) { String tagName = reader.getLocalName(); if (tagName.equals("way")) { break; } } else if (tag == START_ELEMENT) { String tagName = reader.getLocalName(); if ("tag".equals(tagName)) { parseTag(way, reader); reader.require(END_ELEMENT, null, "tag"); } else if ("nd".equals(tagName)) { long nodeRef = Long.valueOf(reader.getAttributeValue(null, "ref")); reader.nextTag(); reader.require(END_ELEMENT, null, "nd"); way.addNode(nodeRef); } } else if (tag == END_DOCUMENT) { throw new IllegalStateException("premature end of document"); } } reader.require(END_ELEMENT, null, "way"); } /** * @param relation * @param reader * @throws XMLStreamException */ private void parseRelationContents(Relation relation, XMLStreamReader reader) throws XMLStreamException { reader.require(START_ELEMENT, null, "relation"); while (true) { int tag = reader.next(); if (tag == END_ELEMENT) { String tagName = reader.getLocalName(); if (tagName.equals("relation")) { break; } } else if (tag == START_ELEMENT) { String tagName = reader.getLocalName(); if ("tag".equals(tagName)) { parseTag(relation, reader); reader.require(END_ELEMENT, null, "tag"); } else if ("member".equals(tagName)) { String type = reader.getAttributeValue(null, "type"); long ref = Long.valueOf(reader.getAttributeValue(null, "ref")); String role = reader.getAttributeValue(null, "role"); if ("".equals(role)) { role = null; } reader.nextTag(); reader.require(END_ELEMENT, null, "member"); Relation.Member member = new Relation.Member(type, ref, role); relation.addMember(member); } } else if (tag == END_DOCUMENT) { throw new IllegalStateException("premature end of document"); } } reader.require(END_ELEMENT, null, "relation"); } private void parseTag(Primitive primitive, XMLStreamReader reader) throws XMLStreamException { reader.require(START_ELEMENT, null, "tag"); String key = reader.getAttributeValue(null, "k"); String value = reader.getAttributeValue(null, "v"); primitive.getTags().put(key, value); reader.nextTag(); reader.require(END_ELEMENT, null, "tag"); } /** * @param reader * @return */ private Primitive inferrPrimitive(XMLStreamReader reader) { final String primitiveName = reader.getLocalName(); if ("node".equals(primitiveName)) { return new Node(); } else if ("way".equals(primitiveName)) { return new Way(); } else if ("relation".equals(primitiveName)) { return new Relation(); } throw new IllegalArgumentException("Unknown primitive tag: " + primitiveName); } }
Remove odbl data example from javadoc
src/osm/src/main/java/org/locationtech/geogig/osm/internal/history/ChangesetContentsScanner.java
Remove odbl data example from javadoc
Java
mit
5368038d0c467547f46ce859be2f177c8c9df081
0
flocke/andOTP
/* * Copyright (C) 2017 Jakob Nixdorf * Copyright (C) 2015 Bruno Bierbaumer * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.shadowice.flocke.andotp.Activities; import android.animation.ObjectAnimator; import android.app.AlertDialog; import android.app.KeyguardManager; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.os.Handler; import android.preference.PreferenceManager; import android.support.constraint.ConstraintLayout; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.SearchView; import android.support.v7.widget.Toolbar; import android.support.v7.widget.helper.ItemTouchHelper; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.view.animation.LinearInterpolator; import android.widget.ArrayAdapter; import android.widget.EditText; import android.widget.ProgressBar; import android.widget.Spinner; import android.widget.Toast; import com.google.zxing.integration.android.IntentIntegrator; import com.google.zxing.integration.android.IntentResult; import org.shadowice.flocke.andotp.Utilities.Settings; import org.shadowice.flocke.andotp.View.EntriesCardAdapter; import org.shadowice.flocke.andotp.Database.Entry; import org.shadowice.flocke.andotp.View.FloatingActionMenu; import org.shadowice.flocke.andotp.View.ItemTouchHelper.SimpleItemTouchHelperCallback; import org.shadowice.flocke.andotp.R; import org.shadowice.flocke.andotp.Utilities.TokenCalculator; import java.util.Locale; import static org.shadowice.flocke.andotp.Utilities.Settings.SortMode; public class MainActivity extends BaseActivity implements SharedPreferences.OnSharedPreferenceChangeListener { private static final int INTENT_INTERNAL_AUTHENTICATE = 100; private static final int INTENT_INTERNAL_SETTINGS = 101; private static final int INTENT_INTERNAL_BACKUP = 102; private EntriesCardAdapter adapter; private FloatingActionMenu floatingActionMenu; private SearchView searchView; private MenuItem sortMenu; private SimpleItemTouchHelperCallback touchHelperCallback; private boolean requireAuthentication = false; private Handler handler; private Runnable handlerTask; // QR code scanning private void scanQRCode(){ new IntentIntegrator(MainActivity.this) .setOrientationLocked(false) .setBeepEnabled(false) .initiateScan(); } // Manual data entry private void enterDetails() { ViewGroup container = findViewById(R.id.main_content); View inputView = getLayoutInflater().inflate(R.layout.dialog_manual_entry, container, false); final Spinner typeInput = inputView.findViewById(R.id.manual_type); final EditText labelInput = inputView.findViewById(R.id.manual_label); final EditText secretInput = inputView.findViewById(R.id.manual_secret); final EditText periodInput = inputView.findViewById(R.id.manual_period); final EditText digitsInput = inputView.findViewById(R.id.manual_digits); final Spinner algorithmInput = inputView.findViewById(R.id.manual_algorithm); typeInput.setAdapter(new ArrayAdapter<>(this, android.R.layout.simple_expandable_list_item_1, Entry.OTPType.values())); algorithmInput.setAdapter(new ArrayAdapter<>(this, android.R.layout.simple_expandable_list_item_1, TokenCalculator.HashAlgorithm.values())); periodInput.setText(String.format(Locale.US, "%d", TokenCalculator.TOTP_DEFAULT_PERIOD)); digitsInput.setText(String.format(Locale.US, "%d", TokenCalculator.TOTP_DEFAULT_DIGITS)); AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(R.string.dialog_title_manual_entry) .setView(inputView) .setPositiveButton(R.string.button_save, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { Entry.OTPType type = (Entry.OTPType) typeInput.getSelectedItem(); TokenCalculator.HashAlgorithm algorithm = (TokenCalculator.HashAlgorithm) algorithmInput.getSelectedItem(); if (type == Entry.OTPType.TOTP) { String label = labelInput.getText().toString(); String secret = secretInput.getText().toString(); int period = Integer.parseInt(periodInput.getText().toString()); int digits = Integer.parseInt(digitsInput.getText().toString()); Entry e = new Entry(type, secret, period, digits, label, algorithm); e.updateOTP(); adapter.addEntry(e); adapter.saveEntries(); } } }) .setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) {} }) .create() .show(); } private void showFirstTimeWarning() { ViewGroup container = findViewById(R.id.main_content); View msgView = getLayoutInflater().inflate(R.layout.dialog_security_backup, container, false); AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(R.string.dialog_title_security_backup) .setView(msgView) .setPositiveButton(R.string.button_warned, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { settings.setFirstTimeWarningShown(true); } }) .create() .show(); } public void authenticate() { Settings.AuthMethod authMethod = settings.getAuthMethod(); if (authMethod == Settings.AuthMethod.DEVICE) { KeyguardManager km = (KeyguardManager) getSystemService(KEYGUARD_SERVICE); if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP && km.isKeyguardSecure()) { Intent authIntent = km.createConfirmDeviceCredentialIntent(getString(R.string.dialog_title_auth), getString(R.string.dialog_msg_auth)); startActivityForResult(authIntent, INTENT_INTERNAL_AUTHENTICATE); } } else if (authMethod == Settings.AuthMethod.PASSWORD || authMethod == Settings.AuthMethod.PIN) { Intent authIntent = new Intent(this, AuthenticateActivity.class); startActivityForResult(authIntent, INTENT_INTERNAL_AUTHENTICATE); } } private void restoreSortMode() { if (settings != null && adapter != null && touchHelperCallback != null) { SortMode mode = settings.getSortMode(); adapter.setSortMode(mode); if (mode == SortMode.LABEL) touchHelperCallback.setDragEnabled(false); else touchHelperCallback.setDragEnabled(true); } } private void saveSortMode(SortMode mode) { if (settings != null) settings.setSortMode(mode); } // Initialize the main application @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setTitle(R.string.app_name); getWindow().setFlags(WindowManager.LayoutParams.FLAG_SECURE, WindowManager.LayoutParams.FLAG_SECURE); setContentView(R.layout.activity_main); Toolbar toolbar = findViewById(R.id.toolbar); setSupportActionBar(toolbar); PreferenceManager.setDefaultValues(this, R.xml.preferences, false); settings.registerPreferenceChangeListener(this); if (savedInstanceState == null) requireAuthentication = true; setBroadcastCallback(new BroadcastReceivedCallback() { @Override public void onReceivedScreenOff() { requireAuthentication = true; } }); if (! settings.getFirstTimeWarningShown()) { showFirstTimeWarning(); } floatingActionMenu = new FloatingActionMenu(this, (ConstraintLayout) findViewById(R.id.fab_main_layout)); floatingActionMenu.setFABHandler(new FloatingActionMenu.FABHandler() { @Override public void onQRFabClick() { scanQRCode(); } @Override public void onManualFabClick() { enterDetails(); } }); final ProgressBar progressBar = findViewById(R.id.progressBar); RecyclerView recList = findViewById(R.id.cardList); recList.setHasFixedSize(true); LinearLayoutManager llm = new LinearLayoutManager(this); llm.setOrientation(LinearLayoutManager.VERTICAL); recList.setLayoutManager(llm); adapter = new EntriesCardAdapter(this); recList.setAdapter(adapter); recList.addOnScrollListener(new RecyclerView.OnScrollListener() { @Override public void onScrolled(RecyclerView recyclerView, int dx, int dy) { super.onScrolled(recyclerView, dx, dy); if (dy > 0) { floatingActionMenu.hide(); } else { if (searchView == null || searchView.isIconified()) floatingActionMenu.show(); } } }); touchHelperCallback = new SimpleItemTouchHelperCallback(adapter); ItemTouchHelper touchHelper = new ItemTouchHelper(touchHelperCallback); touchHelper.attachToRecyclerView(recList); restoreSortMode(); float durationScale = android.provider.Settings.Global.getFloat(this.getContentResolver(), android.provider.Settings.Global.ANIMATOR_DURATION_SCALE, 0); if (durationScale == 0) durationScale = 1; final long animatorDuration = (long) (1000 / durationScale); adapter.setCallback(new EntriesCardAdapter.Callback() { @Override public void onMoveEventStart() { stopUpdater(); } @Override public void onMoveEventStop() { startUpdater(); } }); handler = new Handler(); handlerTask = new Runnable() { @Override public void run() { int progress = (int) (TokenCalculator.TOTP_DEFAULT_PERIOD - (System.currentTimeMillis() / 1000) % TokenCalculator.TOTP_DEFAULT_PERIOD) ; progressBar.setProgress(progress*100); ObjectAnimator animation = ObjectAnimator.ofInt(progressBar, "progress", (progress-1)*100); animation.setDuration(animatorDuration); animation.setInterpolator(new LinearInterpolator()); animation.start(); adapter.updateTokens(); handler.postDelayed(this, 1000); } }; } // Controls for the updater background task public void stopUpdater() { handler.removeCallbacks(handlerTask); } public void startUpdater() { handler.post(handlerTask); } @Override public void onResume() { super.onResume(); if (requireAuthentication) { requireAuthentication = false; authenticate(); } startUpdater(); } @Override public void onPause() { super.onPause(); stopUpdater(); } public void onSharedPreferenceChanged(SharedPreferences prefs, String key) { if (key.equals(getString(R.string.settings_key_label_size)) || key.equals(getString(R.string.settings_key_tap_to_reveal))) { adapter.notifyDataSetChanged(); } else if (key.equals(getString(R.string.settings_key_theme))) { recreate(); } } // Activity results @Override protected void onActivityResult(int requestCode, int resultCode, Intent intent) { super.onActivityResult(requestCode, resultCode, intent); IntentResult result = IntentIntegrator.parseActivityResult(requestCode, resultCode, intent); if(result != null) { if(result.getContents() != null) { try { Entry e = new Entry(result.getContents()); e.updateOTP(); adapter.addEntry(e); adapter.saveEntries(); } catch (Exception e) { Toast.makeText(this, R.string.toast_invalid_qr_code, Toast.LENGTH_LONG).show(); } } } else if (requestCode == INTENT_INTERNAL_BACKUP && resultCode == RESULT_OK) { if (intent.getBooleanExtra("reload", false)) adapter.loadEntries(); } else if (requestCode == INTENT_INTERNAL_AUTHENTICATE) { if (resultCode != RESULT_OK) { Toast.makeText(getBaseContext(), R.string.toast_auth_failed, Toast.LENGTH_LONG).show(); if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) { finishAndRemoveTask(); } else { finish(); } } else { requireAuthentication = false; } } } // Options menu @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_main, menu); sortMenu = menu.findItem(R.id.menu_sort); if (adapter != null) { SortMode mode = adapter.getSortMode(); if (mode == SortMode.UNSORTED) { sortMenu.setIcon(R.drawable.ic_sort_inverted_white); menu.findItem(R.id.menu_sort_none).setChecked(true); } else if (mode == SortMode.LABEL) { sortMenu.setIcon(R.drawable.ic_sort_inverted_label_white); menu.findItem(R.id.menu_sort_label).setChecked(true); } } MenuItem searchItem = menu.findItem(R.id.menu_search); searchView = (SearchView) searchItem.getActionView(); searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String query) { return false; } @Override public boolean onQueryTextChange(String newText) { adapter.getFilter().filter(newText); return false; } }); searchItem.setOnActionExpandListener(new MenuItem.OnActionExpandListener() { @Override public boolean onMenuItemActionExpand(MenuItem menuItem) { floatingActionMenu.hide(); touchHelperCallback.setDragEnabled(false); if (sortMenu != null) sortMenu.setVisible(false); return true; } @Override public boolean onMenuItemActionCollapse(MenuItem menuItem) { floatingActionMenu.show(); if (adapter == null || adapter.getSortMode() == SortMode.UNSORTED) touchHelperCallback.setDragEnabled(true); if (sortMenu != null) sortMenu.setVisible(true); return true; } }); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == R.id.action_backup) { Intent backupIntent = new Intent(this, BackupActivity.class); startActivityForResult(backupIntent, INTENT_INTERNAL_BACKUP); } else if (id == R.id.action_settings) { Intent settingsIntent = new Intent(this, SettingsActivity.class); startActivityForResult(settingsIntent, INTENT_INTERNAL_SETTINGS); } else if (id == R.id.action_about){ Intent aboutIntent = new Intent(this, AboutActivity.class); startActivity(aboutIntent); return true; } else if (id == R.id.menu_sort_none) { item.setChecked(true); sortMenu.setIcon(R.drawable.ic_sort_inverted_white); saveSortMode(SortMode.UNSORTED); if (adapter != null) { adapter.setSortMode(SortMode.UNSORTED); touchHelperCallback.setDragEnabled(true); } } else if (id == R.id.menu_sort_label) { item.setChecked(true); sortMenu.setIcon(R.drawable.ic_sort_inverted_label_white); saveSortMode(SortMode.LABEL); if (adapter != null) { adapter.setSortMode(SortMode.LABEL); touchHelperCallback.setDragEnabled(false); } } return super.onOptionsItemSelected(item); } }
app/src/main/java/org/shadowice/flocke/andotp/Activities/MainActivity.java
/* * Copyright (C) 2017 Jakob Nixdorf * Copyright (C) 2015 Bruno Bierbaumer * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.shadowice.flocke.andotp.Activities; import android.animation.ObjectAnimator; import android.app.AlertDialog; import android.app.KeyguardManager; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.os.Handler; import android.preference.PreferenceManager; import android.support.constraint.ConstraintLayout; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.SearchView; import android.support.v7.widget.Toolbar; import android.support.v7.widget.helper.ItemTouchHelper; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.view.animation.LinearInterpolator; import android.widget.ArrayAdapter; import android.widget.EditText; import android.widget.ProgressBar; import android.widget.Spinner; import android.widget.Toast; import com.google.zxing.integration.android.IntentIntegrator; import com.google.zxing.integration.android.IntentResult; import org.shadowice.flocke.andotp.Utilities.Settings; import org.shadowice.flocke.andotp.View.EntriesCardAdapter; import org.shadowice.flocke.andotp.Database.Entry; import org.shadowice.flocke.andotp.View.FloatingActionMenu; import org.shadowice.flocke.andotp.View.ItemTouchHelper.SimpleItemTouchHelperCallback; import org.shadowice.flocke.andotp.R; import org.shadowice.flocke.andotp.Utilities.TokenCalculator; import java.util.Locale; import static org.shadowice.flocke.andotp.Utilities.Settings.SortMode; public class MainActivity extends BaseActivity implements SharedPreferences.OnSharedPreferenceChangeListener { private static final int INTENT_INTERNAL_AUTHENTICATE = 100; private static final int INTENT_INTERNAL_SETTINGS = 101; private static final int INTENT_INTERNAL_BACKUP = 102; private EntriesCardAdapter adapter; private FloatingActionMenu floatingActionMenu; private SearchView searchView; private MenuItem sortMenu; private SimpleItemTouchHelperCallback touchHelperCallback; private boolean requireAuthentication = false; private Handler handler; private Runnable handlerTask; // QR code scanning private void scanQRCode(){ new IntentIntegrator(MainActivity.this) .setOrientationLocked(false) .setBeepEnabled(false) .initiateScan(); } // Manual data entry private void enterDetails() { ViewGroup container = findViewById(R.id.main_content); View inputView = getLayoutInflater().inflate(R.layout.dialog_manual_entry, container, false); final Spinner typeInput = inputView.findViewById(R.id.manual_type); final EditText labelInput = inputView.findViewById(R.id.manual_label); final EditText secretInput = inputView.findViewById(R.id.manual_secret); final EditText periodInput = inputView.findViewById(R.id.manual_period); final EditText digitsInput = inputView.findViewById(R.id.manual_digits); final Spinner algorithmInput = inputView.findViewById(R.id.manual_algorithm); typeInput.setAdapter(new ArrayAdapter<>(this, android.R.layout.simple_expandable_list_item_1, Entry.OTPType.values())); algorithmInput.setAdapter(new ArrayAdapter<>(this, android.R.layout.simple_expandable_list_item_1, TokenCalculator.HashAlgorithm.values())); periodInput.setText(String.format(Locale.US, "%d", TokenCalculator.TOTP_DEFAULT_PERIOD)); digitsInput.setText(String.format(Locale.US, "%d", TokenCalculator.TOTP_DEFAULT_DIGITS)); AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(R.string.dialog_title_manual_entry) .setView(inputView) .setPositiveButton(R.string.button_save, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { Entry.OTPType type = (Entry.OTPType) typeInput.getSelectedItem(); TokenCalculator.HashAlgorithm algorithm = (TokenCalculator.HashAlgorithm) algorithmInput.getSelectedItem(); if (type == Entry.OTPType.TOTP) { String label = labelInput.getText().toString(); String secret = secretInput.getText().toString(); int period = Integer.parseInt(periodInput.getText().toString()); int digits = Integer.parseInt(digitsInput.getText().toString()); Entry e = new Entry(type, secret, period, digits, label, algorithm); e.updateOTP(); adapter.addEntry(e); adapter.saveEntries(); } } }) .setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) {} }) .create() .show(); } private void showFirstTimeWarning() { ViewGroup container = findViewById(R.id.main_content); View msgView = getLayoutInflater().inflate(R.layout.dialog_security_backup, container, false); AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(R.string.dialog_title_security_backup) .setView(msgView) .setPositiveButton(R.string.button_warned, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { settings.setFirstTimeWarningShown(true); } }) .create() .show(); } public void authenticate() { Settings.AuthMethod authMethod = settings.getAuthMethod(); if (authMethod == Settings.AuthMethod.DEVICE) { KeyguardManager km = (KeyguardManager) getSystemService(KEYGUARD_SERVICE); if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP && km.isKeyguardSecure()) { Intent authIntent = km.createConfirmDeviceCredentialIntent(getString(R.string.dialog_title_auth), getString(R.string.dialog_msg_auth)); startActivityForResult(authIntent, INTENT_INTERNAL_AUTHENTICATE); } } else if (authMethod == Settings.AuthMethod.PASSWORD || authMethod == Settings.AuthMethod.PIN) { Intent authIntent = new Intent(this, AuthenticateActivity.class); startActivityForResult(authIntent, INTENT_INTERNAL_AUTHENTICATE); } } private void restoreSortMode() { if (settings != null && adapter != null && touchHelperCallback != null) { SortMode mode = settings.getSortMode(); adapter.setSortMode(mode); if (mode == SortMode.LABEL) touchHelperCallback.setDragEnabled(false); else touchHelperCallback.setDragEnabled(true); } } private void saveSortMode(SortMode mode) { if (settings != null) settings.setSortMode(mode); } // Initialize the main application @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setTitle(R.string.app_name); getWindow().setFlags(WindowManager.LayoutParams.FLAG_SECURE, WindowManager.LayoutParams.FLAG_SECURE); setContentView(R.layout.activity_main); Toolbar toolbar = findViewById(R.id.toolbar); setSupportActionBar(toolbar); PreferenceManager.setDefaultValues(this, R.xml.preferences, false); settings.registerPreferenceChangeListener(this); if (savedInstanceState == null) requireAuthentication = true; setBroadcastCallback(new BroadcastReceivedCallback() { @Override public void onReceivedScreenOff() { requireAuthentication = true; } }); if (! settings.getFirstTimeWarningShown()) { showFirstTimeWarning(); } floatingActionMenu = new FloatingActionMenu(this, (ConstraintLayout) findViewById(R.id.fab_main_layout)); floatingActionMenu.setFABHandler(new FloatingActionMenu.FABHandler() { @Override public void onQRFabClick() { scanQRCode(); } @Override public void onManualFabClick() { enterDetails(); } }); final ProgressBar progressBar = findViewById(R.id.progressBar); RecyclerView recList = findViewById(R.id.cardList); recList.setHasFixedSize(true); LinearLayoutManager llm = new LinearLayoutManager(this); llm.setOrientation(LinearLayoutManager.VERTICAL); recList.setLayoutManager(llm); adapter = new EntriesCardAdapter(this); recList.setAdapter(adapter); recList.addOnScrollListener(new RecyclerView.OnScrollListener() { @Override public void onScrolled(RecyclerView recyclerView, int dx, int dy) { super.onScrolled(recyclerView, dx, dy); if (dy > 0) { floatingActionMenu.hide(); } else { if (searchView == null || searchView.isIconified()) floatingActionMenu.show(); } } }); touchHelperCallback = new SimpleItemTouchHelperCallback(adapter); ItemTouchHelper touchHelper = new ItemTouchHelper(touchHelperCallback); touchHelper.attachToRecyclerView(recList); restoreSortMode(); float durationScale = android.provider.Settings.Global.getFloat(this.getContentResolver(), android.provider.Settings.Global.ANIMATOR_DURATION_SCALE, 0); final long animatorDuration = (long) (1000 / durationScale); adapter.setCallback(new EntriesCardAdapter.Callback() { @Override public void onMoveEventStart() { stopUpdater(); } @Override public void onMoveEventStop() { startUpdater(); } }); handler = new Handler(); handlerTask = new Runnable() { @Override public void run() { int progress = (int) (TokenCalculator.TOTP_DEFAULT_PERIOD - (System.currentTimeMillis() / 1000) % TokenCalculator.TOTP_DEFAULT_PERIOD) ; progressBar.setProgress(progress*100); ObjectAnimator animation = ObjectAnimator.ofInt(progressBar, "progress", (progress-1)*100); animation.setDuration(animatorDuration); animation.setInterpolator(new LinearInterpolator()); animation.start(); adapter.updateTokens(); handler.postDelayed(this, 1000); } }; } // Controls for the updater background task public void stopUpdater() { handler.removeCallbacks(handlerTask); } public void startUpdater() { handler.post(handlerTask); } @Override public void onResume() { super.onResume(); if (requireAuthentication) { requireAuthentication = false; authenticate(); } startUpdater(); } @Override public void onPause() { super.onPause(); stopUpdater(); } public void onSharedPreferenceChanged(SharedPreferences prefs, String key) { if (key.equals(getString(R.string.settings_key_label_size)) || key.equals(getString(R.string.settings_key_tap_to_reveal))) { adapter.notifyDataSetChanged(); } else if (key.equals(getString(R.string.settings_key_theme))) { recreate(); } } // Activity results @Override protected void onActivityResult(int requestCode, int resultCode, Intent intent) { super.onActivityResult(requestCode, resultCode, intent); IntentResult result = IntentIntegrator.parseActivityResult(requestCode, resultCode, intent); if(result != null) { if(result.getContents() != null) { try { Entry e = new Entry(result.getContents()); e.updateOTP(); adapter.addEntry(e); adapter.saveEntries(); } catch (Exception e) { Toast.makeText(this, R.string.toast_invalid_qr_code, Toast.LENGTH_LONG).show(); } } } else if (requestCode == INTENT_INTERNAL_BACKUP && resultCode == RESULT_OK) { if (intent.getBooleanExtra("reload", false)) adapter.loadEntries(); } else if (requestCode == INTENT_INTERNAL_AUTHENTICATE) { if (resultCode != RESULT_OK) { Toast.makeText(getBaseContext(), R.string.toast_auth_failed, Toast.LENGTH_LONG).show(); if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) { finishAndRemoveTask(); } else { finish(); } } else { requireAuthentication = false; } } } // Options menu @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_main, menu); sortMenu = menu.findItem(R.id.menu_sort); if (adapter != null) { SortMode mode = adapter.getSortMode(); if (mode == SortMode.UNSORTED) { sortMenu.setIcon(R.drawable.ic_sort_inverted_white); menu.findItem(R.id.menu_sort_none).setChecked(true); } else if (mode == SortMode.LABEL) { sortMenu.setIcon(R.drawable.ic_sort_inverted_label_white); menu.findItem(R.id.menu_sort_label).setChecked(true); } } MenuItem searchItem = menu.findItem(R.id.menu_search); searchView = (SearchView) searchItem.getActionView(); searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String query) { return false; } @Override public boolean onQueryTextChange(String newText) { adapter.getFilter().filter(newText); return false; } }); searchItem.setOnActionExpandListener(new MenuItem.OnActionExpandListener() { @Override public boolean onMenuItemActionExpand(MenuItem menuItem) { floatingActionMenu.hide(); touchHelperCallback.setDragEnabled(false); if (sortMenu != null) sortMenu.setVisible(false); return true; } @Override public boolean onMenuItemActionCollapse(MenuItem menuItem) { floatingActionMenu.show(); if (adapter == null || adapter.getSortMode() == SortMode.UNSORTED) touchHelperCallback.setDragEnabled(true); if (sortMenu != null) sortMenu.setVisible(true); return true; } }); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == R.id.action_backup) { Intent backupIntent = new Intent(this, BackupActivity.class); startActivityForResult(backupIntent, INTENT_INTERNAL_BACKUP); } else if (id == R.id.action_settings) { Intent settingsIntent = new Intent(this, SettingsActivity.class); startActivityForResult(settingsIntent, INTENT_INTERNAL_SETTINGS); } else if (id == R.id.action_about){ Intent aboutIntent = new Intent(this, AboutActivity.class); startActivity(aboutIntent); return true; } else if (id == R.id.menu_sort_none) { item.setChecked(true); sortMenu.setIcon(R.drawable.ic_sort_inverted_white); saveSortMode(SortMode.UNSORTED); if (adapter != null) { adapter.setSortMode(SortMode.UNSORTED); touchHelperCallback.setDragEnabled(true); } } else if (id == R.id.menu_sort_label) { item.setChecked(true); sortMenu.setIcon(R.drawable.ic_sort_inverted_label_white); saveSortMode(SortMode.LABEL); if (adapter != null) { adapter.setSortMode(SortMode.LABEL); touchHelperCallback.setDragEnabled(false); } } return super.onOptionsItemSelected(item); } }
Fix a small bug with the progress bar animation
app/src/main/java/org/shadowice/flocke/andotp/Activities/MainActivity.java
Fix a small bug with the progress bar animation
Java
mit
41b3009658eb2071674f7b8c40ed7a1c733ad007
0
kkkon/java-network-connection-checker
/* * The MIT License * * Copyright (C) 2014 Kiyofumi Kondoh * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package jp.ne.sakura.kkkon.java.net.inetaddress.testapp.android; import android.app.Activity; import android.content.ActivityNotFoundException; import android.content.Context; import android.content.Intent; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; import java.io.IOException; import java.net.HttpURLConnection; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.Proxy; import java.net.ProxySelector; import java.net.SocketAddress; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLConnection; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; import jp.ne.sakura.kkkon.java.net.inetaddress.NetworkConnectionChecker; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicNameValuePair; import org.apache.http.params.CoreConnectionPNames; import org.apache.http.protocol.HTTP; /** * * @author Kiyofumi Kondoh */ public class NetworkConnectionCheckerTestApp extends Activity { public static final String TAG = "appKK"; private InetAddress destHost = null; private boolean isReachable = false; private TextView textView = null; private Handler handler = null; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { final Context context = this.getApplicationContext(); { NetworkConnectionChecker.initialize(); } super.onCreate(savedInstanceState); /* Create a TextView and set its content. * the text is retrieved by calling a native * function. */ LinearLayout layout = new LinearLayout( this ); layout.setOrientation( LinearLayout.VERTICAL ); TextView tv = new TextView(this); tv.setText( "reachable=" ); layout.addView( tv ); this.textView = tv; Button btn1 = new Button( this ); btn1.setText( "invoke Exception" ); btn1.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { final int count = 2; int[] array = new int[count]; int value = array[count]; // invoke IndexOutOfBOundsException } } ); layout.addView( btn1 ); { Button btn = new Button( this ); btn.setText( "disp isReachable" ); btn.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { final boolean isReachable = NetworkConnectionChecker.isReachable(); Toast toast = Toast.makeText( context, "IsReachable=" + isReachable , Toast.LENGTH_LONG ); toast.show(); } } ); layout.addView( btn ); } { Button btn = new Button( this ); btn.setText( "upload http AsyncTask" ); btn.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { AsyncTask<String,Void,Boolean> asyncTask = new AsyncTask<String,Void,Boolean>() { @Override protected Boolean doInBackground(String... paramss) { Boolean result = true; Log.d( TAG, "upload AsyncTask tid=" + android.os.Process.myTid() ); try { //$(BRAND)/$(PRODUCT)/$(DEVICE)/$(BOARD):$(VERSION.RELEASE)/$(ID)/$(VERSION.INCREMENTAL):$(TYPE)/$(TAGS) Log.d( TAG, "fng=" + Build.FINGERPRINT ); final List<NameValuePair> list = new ArrayList<NameValuePair>(16); list.add( new BasicNameValuePair( "fng", Build.FINGERPRINT ) ); HttpPost httpPost = new HttpPost( paramss[0] ); //httpPost.getParams().setParameter( CoreConnectionPNames.SO_TIMEOUT, new Integer(5*1000) ); httpPost.setEntity( new UrlEncodedFormEntity( list, HTTP.UTF_8 ) ); DefaultHttpClient httpClient = new DefaultHttpClient(); Log.d( TAG, "socket.timeout=" + httpClient.getParams().getIntParameter( CoreConnectionPNames.SO_TIMEOUT, -1) ); Log.d( TAG, "connection.timeout=" + httpClient.getParams().getIntParameter( CoreConnectionPNames.CONNECTION_TIMEOUT, -1) ); httpClient.getParams().setParameter( CoreConnectionPNames.SO_TIMEOUT, new Integer(5*1000) ); httpClient.getParams().setParameter( CoreConnectionPNames.CONNECTION_TIMEOUT, new Integer(5*1000) ); Log.d( TAG, "socket.timeout=" + httpClient.getParams().getIntParameter( CoreConnectionPNames.SO_TIMEOUT, -1) ); Log.d( TAG, "connection.timeout=" + httpClient.getParams().getIntParameter( CoreConnectionPNames.CONNECTION_TIMEOUT, -1) ); // <uses-permission android:name="android.permission.INTERNET"/> // got android.os.NetworkOnMainThreadException, run at UI Main Thread HttpResponse response = httpClient.execute( httpPost ); Log.d( TAG, "response=" + response.getStatusLine().getStatusCode() ); } catch ( Exception e ) { Log.d( TAG, "got Exception. msg=" + e.getMessage(), e ); result = false; } Log.d( TAG, "upload finish" ); return result; } }; asyncTask.execute("http://kkkon.sakura.ne.jp/android/bug"); asyncTask.isCancelled(); } } ); layout.addView( btn ); } { Button btn = new Button( this ); btn.setText( "pre DNS query(0.0.0.0)" ); btn.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { isReachable = false; Thread thread = new Thread( new Runnable() { public void run() { try { destHost = InetAddress.getByName("0.0.0.0"); if ( null != destHost ) { try { if ( destHost.isReachable( 5*1000 ) ) { Log.d( TAG, "destHost=" + destHost.toString() + " reachable" ); } else { Log.d( TAG, "destHost=" + destHost.toString() + " not reachable" ); } } catch ( IOException e ) { } } } catch ( UnknownHostException e ) { } Log.d( TAG, "destHost=" + destHost ); } }); thread.start(); try { thread.join( 1000 ); } catch ( InterruptedException e ) { } } }); layout.addView( btn ); } { Button btn = new Button( this ); btn.setText( "pre DNS query(www.google.com)" ); btn.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { isReachable = false; Thread thread = new Thread( new Runnable() { public void run() { Log.d( TAG, "start" ); try { InetAddress dest = InetAddress.getByName("www.google.com"); if ( null == dest ) { dest = destHost; } if ( null != dest ) { final String[] uris = new String[] { "http://www.google.com/", "https://www.google.com/" }; for ( final String destURI : uris ) { URI uri = null; try { uri = new URI( destURI ); } catch ( URISyntaxException e ) { //Log.d( TAG, e.toString() ); } if ( null != uri ) { URL url = null; try { url = uri.toURL(); } catch (MalformedURLException ex) { Log.d( TAG, "got exception:" + ex.toString(), ex ); } URLConnection conn = null; if ( null != url ) { Log.d( TAG, "openConnection before" ); try { conn = url.openConnection(); if ( null != conn ) { conn.setConnectTimeout( 3*1000 ); conn.setReadTimeout( 3*1000 ); } } catch ( IOException e ) { //Log.d( TAG, "got Exception" + e.toString(), e ); } Log.d( TAG, "openConnection after" ); if ( conn instanceof HttpURLConnection ) { HttpURLConnection httpConn = (HttpURLConnection)conn; int responceCode = -1; try { Log.d( TAG, "getResponceCode before" ); responceCode = httpConn.getResponseCode(); Log.d( TAG, "getResponceCode after" ); } catch (IOException ex) { Log.d( TAG, "got exception:" + ex.toString(), ex ); } Log.d( TAG, "responceCode=" + responceCode ); if ( 0 < responceCode ) { isReachable = true; destHost = dest; } Log.d( TAG, " HTTP ContentLength=" + httpConn.getContentLength() ); httpConn.disconnect(); Log.d( TAG, " HTTP ContentLength=" + httpConn.getContentLength() ); } } } // if uri if ( isReachable ) { //break; } } // for uris } else { } } catch ( UnknownHostException e ) { Log.d( TAG, "dns error" + e.toString() ); destHost = null; } { if ( null != destHost ) { Log.d( TAG, "destHost=" + destHost ); } } Log.d( TAG, "end" ); } }); thread.start(); try { thread.join(); { final String addr = (null==destHost)?(""):(destHost.toString()); final String reachable = (isReachable)?("reachable"):("not reachable"); Toast toast = Toast.makeText( context, "DNS result=\n" + addr + "\n " + reachable, Toast.LENGTH_LONG ); toast.show(); } } catch ( InterruptedException e ) { } } }); layout.addView( btn ); } { Button btn = new Button( this ); btn.setText( "pre DNS query(kkkon.sakura.ne.jp)" ); btn.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { isReachable = false; Thread thread = new Thread( new Runnable() { public void run() { Log.d( TAG, "start" ); try { InetAddress dest = InetAddress.getByName("kkkon.sakura.ne.jp"); if ( null == dest ) { dest = destHost; } if ( null != dest ) { try { if ( dest.isReachable( 5*1000 ) ) { Log.d( TAG, "destHost=" + dest.toString() + " reachable" ); isReachable = true; } else { Log.d( TAG, "destHost=" + dest.toString() + " not reachable" ); } destHost = dest; } catch ( IOException e ) { } } else { } } catch ( UnknownHostException e ) { Log.d( TAG, "dns error" + e.toString() ); destHost = null; } { if ( null != destHost ) { Log.d( TAG, "destHost=" + destHost ); } } Log.d( TAG, "end" ); } }); thread.start(); try { thread.join(); { final String addr = (null==destHost)?(""):(destHost.toString()); final String reachable = (isReachable)?("reachable"):("not reachable"); Toast toast = Toast.makeText( context, "DNS result=\n" + addr + "\n " + reachable, Toast.LENGTH_LONG ); toast.show(); } } catch ( InterruptedException e ) { } } }); layout.addView( btn ); } { Button btn = new Button( this ); btn.setText( "pre DNS query(kkkon.sakura.ne.jp) support proxy" ); btn.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { isReachable = false; Thread thread = new Thread( new Runnable() { public void run() { try { String target = null; { ProxySelector proxySelector = ProxySelector.getDefault(); Log.d( TAG, "proxySelector=" + proxySelector ); if ( null != proxySelector ) { URI uri = null; try { uri = new URI("http://www.google.com/"); } catch ( URISyntaxException e ) { Log.d( TAG, e.toString() ); } List<Proxy> proxies = proxySelector.select( uri ); if ( null != proxies ) { for ( final Proxy proxy : proxies ) { Log.d( TAG, " proxy=" + proxy ); if ( null != proxy ) { if ( Proxy.Type.HTTP == proxy.type() ) { final SocketAddress sa = proxy.address(); if ( sa instanceof InetSocketAddress ) { final InetSocketAddress isa = (InetSocketAddress)sa; target = isa.getHostName(); break; } } } } } } } if ( null == target ) { target = "kkkon.sakura.ne.jp"; } InetAddress dest = InetAddress.getByName(target); if ( null == dest ) { dest = destHost; } if ( null != dest ) { try { if ( dest.isReachable( 5*1000 ) ) { Log.d( TAG, "destHost=" + dest.toString() + " reachable" ); isReachable = true; } else { Log.d( TAG, "destHost=" + dest.toString() + " not reachable" ); { ProxySelector proxySelector = ProxySelector.getDefault(); //Log.d( TAG, "proxySelector=" + proxySelector ); if ( null != proxySelector ) { URI uri = null; try { uri = new URI("http://www.google.com/"); } catch ( URISyntaxException e ) { //Log.d( TAG, e.toString() ); } if ( null != uri ) { List<Proxy> proxies = proxySelector.select( uri ); if ( null != proxies ) { for ( final Proxy proxy : proxies ) { //Log.d( TAG, " proxy=" + proxy ); if ( null != proxy ) { if ( Proxy.Type.HTTP == proxy.type() ) { URL url = uri.toURL(); URLConnection conn = null; if ( null != url ) { try { conn = url.openConnection( proxy ); if ( null != conn ) { conn.setConnectTimeout( 3*1000 ); conn.setReadTimeout( 3*1000 ); } } catch ( IOException e ) { Log.d( TAG, "got Exception" + e.toString(), e ); } if ( conn instanceof HttpURLConnection ) { HttpURLConnection httpConn = (HttpURLConnection)conn; if ( 0 < httpConn.getResponseCode() ) { isReachable = true; } Log.d( TAG, " HTTP ContentLength=" + httpConn.getContentLength() ); Log.d( TAG, " HTTP res=" + httpConn.getResponseCode() ); //httpConn.setInstanceFollowRedirects( false ); //httpConn.setRequestMethod( "HEAD" ); //conn.connect(); httpConn.disconnect(); Log.d( TAG, " HTTP ContentLength=" + httpConn.getContentLength() ); Log.d( TAG, " HTTP res=" + httpConn.getResponseCode() ); } } } } } } } } } } destHost = dest; } catch ( IOException e ) { Log.d( TAG, "got Excpetion " + e.toString() ); } } else { } } catch ( UnknownHostException e ) { Log.d( TAG, "dns error" + e.toString() ); destHost = null; } { if ( null != destHost ) { Log.d( TAG, "destHost=" + destHost ); } } } }); thread.start(); try { thread.join(); { final String addr = (null==destHost)?(""):(destHost.toString()); final String reachable = (isReachable)?("reachable"):("not reachable"); Toast toast = Toast.makeText( context, "DNS result=\n" + addr + "\n " + reachable, Toast.LENGTH_LONG ); toast.show(); } } catch ( InterruptedException e ) { } } }); layout.addView( btn ); } setContentView( layout ); } @Override protected void onStart() { NetworkConnectionChecker.start(); this.handler = new Handler() { @Override public void handleMessage(Message msg) { //Log.d( TAG, "hnadleMessage" ); final boolean isReachable = NetworkConnectionChecker.isReachable(); textView.setText( "reachable=" + isReachable ); //textView.invalidate(); removeMessages(0); sendMessageDelayed( obtainMessage(0), 1*1000 ); } }; this.handler.sendMessage( this.handler.obtainMessage(0) ); super.onStart(); //To change body of generated methods, choose Tools | Templates. } @Override protected void onStop() { NetworkConnectionChecker.stop(); this.handler = null; super.onStop(); //To change body of generated methods, choose Tools | Templates. } }
java-network-connection-checker-testapp-android/src/jp/ne/sakura/kkkon/java/net/inetaddress/testapp/android/NetworkConnectionCheckerTestApp.java
/* * The MIT License * * Copyright (C) 2014 Kiyofumi Kondoh * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package jp.ne.sakura.kkkon.java.net.inetaddress.testapp.android; import android.app.Activity; import android.content.ActivityNotFoundException; import android.content.Context; import android.content.Intent; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; import java.io.IOException; import java.net.HttpURLConnection; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.Proxy; import java.net.ProxySelector; import java.net.SocketAddress; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLConnection; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; import jp.ne.sakura.kkkon.java.net.inetaddress.NetworkConnectionChecker; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicNameValuePair; import org.apache.http.params.CoreConnectionPNames; import org.apache.http.protocol.HTTP; /** * * @author Kiyofumi Kondoh */ public class NetworkConnectionCheckerTestApp extends Activity { public static final String TAG = "appKK"; private InetAddress destHost = null; private boolean isReachable = false; private TextView textView = null; private Handler handler = null; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { final Context context = this.getApplicationContext(); { NetworkConnectionChecker.initialize(); } super.onCreate(savedInstanceState); /* Create a TextView and set its content. * the text is retrieved by calling a native * function. */ LinearLayout layout = new LinearLayout( this ); layout.setOrientation( LinearLayout.VERTICAL ); TextView tv = new TextView(this); tv.setText( "reachable=" ); layout.addView( tv ); this.textView = tv; Button btn1 = new Button( this ); btn1.setText( "invoke Exception" ); btn1.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { final int count = 2; int[] array = new int[count]; int value = array[count]; // invoke IndexOutOfBOundsException } } ); layout.addView( btn1 ); { Button btn = new Button( this ); btn.setText( "disp isReachable" ); btn.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { final boolean isReachable = NetworkConnectionChecker.isReachable(); Toast toast = Toast.makeText( context, "IsReachable=" + isReachable , Toast.LENGTH_LONG ); toast.show(); } } ); layout.addView( btn ); } { Button btn = new Button( this ); btn.setText( "upload http AsyncTask" ); btn.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { AsyncTask<String,Void,Boolean> asyncTask = new AsyncTask<String,Void,Boolean>() { @Override protected Boolean doInBackground(String... paramss) { Boolean result = true; Log.d( TAG, "upload AsyncTask tid=" + android.os.Process.myTid() ); try { //$(BRAND)/$(PRODUCT)/$(DEVICE)/$(BOARD):$(VERSION.RELEASE)/$(ID)/$(VERSION.INCREMENTAL):$(TYPE)/$(TAGS) Log.d( TAG, "fng=" + Build.FINGERPRINT ); final List<NameValuePair> list = new ArrayList<NameValuePair>(16); list.add( new BasicNameValuePair( "fng", Build.FINGERPRINT ) ); HttpPost httpPost = new HttpPost( paramss[0] ); //httpPost.getParams().setParameter( CoreConnectionPNames.SO_TIMEOUT, new Integer(5*1000) ); httpPost.setEntity( new UrlEncodedFormEntity( list, HTTP.UTF_8 ) ); DefaultHttpClient httpClient = new DefaultHttpClient(); Log.d( TAG, "socket.timeout=" + httpClient.getParams().getIntParameter( CoreConnectionPNames.SO_TIMEOUT, -1) ); Log.d( TAG, "connection.timeout=" + httpClient.getParams().getIntParameter( CoreConnectionPNames.CONNECTION_TIMEOUT, -1) ); httpClient.getParams().setParameter( CoreConnectionPNames.SO_TIMEOUT, new Integer(5*1000) ); httpClient.getParams().setParameter( CoreConnectionPNames.CONNECTION_TIMEOUT, new Integer(5*1000) ); Log.d( TAG, "socket.timeout=" + httpClient.getParams().getIntParameter( CoreConnectionPNames.SO_TIMEOUT, -1) ); Log.d( TAG, "connection.timeout=" + httpClient.getParams().getIntParameter( CoreConnectionPNames.CONNECTION_TIMEOUT, -1) ); // <uses-permission android:name="android.permission.INTERNET"/> // got android.os.NetworkOnMainThreadException, run at UI Main Thread HttpResponse response = httpClient.execute( httpPost ); Log.d( TAG, "response=" + response.getStatusLine().getStatusCode() ); } catch ( Exception e ) { Log.d( TAG, "got Exception. msg=" + e.getMessage(), e ); result = false; } Log.d( TAG, "upload finish" ); return result; } }; asyncTask.execute("http://kkkon.sakura.ne.jp/android/bug"); asyncTask.isCancelled(); } } ); layout.addView( btn ); } { Button btn = new Button( this ); btn.setText( "pre DNS query(0.0.0.0)" ); btn.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { isReachable = false; Thread thread = new Thread( new Runnable() { public void run() { try { destHost = InetAddress.getByName("0.0.0.0"); if ( null != destHost ) { try { if ( destHost.isReachable( 5*1000 ) ) { Log.d( TAG, "destHost=" + destHost.toString() + " reachable" ); } else { Log.d( TAG, "destHost=" + destHost.toString() + " not reachable" ); } } catch ( IOException e ) { } } } catch ( UnknownHostException e ) { } Log.d( TAG, "destHost=" + destHost ); } }); thread.start(); try { thread.join( 1000 ); } catch ( InterruptedException e ) { } } }); layout.addView( btn ); } { Button btn = new Button( this ); btn.setText( "pre DNS query(www.google.com)" ); btn.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { isReachable = false; Thread thread = new Thread( new Runnable() { public void run() { Log.d( TAG, "start" ); try { InetAddress dest = InetAddress.getByName("www.google.com"); if ( null == dest ) { dest = destHost; } if ( null != dest ) { URI uri = null; try { uri = new URI("http://www.google.com/"); } catch ( URISyntaxException e ) { //Log.d( TAG, e.toString() ); } if ( null != uri ) { URL url = null; try { url = uri.toURL(); } catch (MalformedURLException ex) { Log.d( TAG, "got exception:" + ex.toString(), ex ); } URLConnection conn = null; if ( null != url ) { try { conn = url.openConnection(); if ( null != conn ) { conn.setConnectTimeout( 3*1000 ); conn.setReadTimeout( 3*1000 ); } } catch ( IOException e ) { //Log.d( TAG, "got Exception" + e.toString(), e ); } if ( conn instanceof HttpURLConnection ) { HttpURLConnection httpConn = (HttpURLConnection)conn; int responceCode = -1; try { responceCode = httpConn.getResponseCode(); } catch (IOException ex) { Log.d( TAG, "got exception:" + ex.toString(), ex ); } if ( 0 < responceCode ) { isReachable = true; destHost = dest; } //Log.d( TAG, " HTTP ContentLength=" + httpConn.getContentLength() ); //Log.d( TAG, " HTTP res=" + httpConn.getResponseCode() ); httpConn.disconnect(); //Log.d( TAG, " HTTP ContentLength=" + httpConn.getContentLength() ); //Log.d( TAG, " HTTP res=" + httpConn.getResponseCode() ); } } } } else { } } catch ( UnknownHostException e ) { Log.d( TAG, "dns error" + e.toString() ); destHost = null; } { if ( null != destHost ) { Log.d( TAG, "destHost=" + destHost ); } } Log.d( TAG, "end" ); } }); thread.start(); try { thread.join(); { final String addr = (null==destHost)?(""):(destHost.toString()); final String reachable = (isReachable)?("reachable"):("not reachable"); Toast toast = Toast.makeText( context, "DNS result=\n" + addr + "\n " + reachable, Toast.LENGTH_LONG ); toast.show(); } } catch ( InterruptedException e ) { } } }); layout.addView( btn ); } { Button btn = new Button( this ); btn.setText( "pre DNS query(kkkon.sakura.ne.jp)" ); btn.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { isReachable = false; Thread thread = new Thread( new Runnable() { public void run() { Log.d( TAG, "start" ); try { InetAddress dest = InetAddress.getByName("kkkon.sakura.ne.jp"); if ( null == dest ) { dest = destHost; } if ( null != dest ) { try { if ( dest.isReachable( 5*1000 ) ) { Log.d( TAG, "destHost=" + dest.toString() + " reachable" ); isReachable = true; } else { Log.d( TAG, "destHost=" + dest.toString() + " not reachable" ); } destHost = dest; } catch ( IOException e ) { } } else { } } catch ( UnknownHostException e ) { Log.d( TAG, "dns error" + e.toString() ); destHost = null; } { if ( null != destHost ) { Log.d( TAG, "destHost=" + destHost ); } } Log.d( TAG, "end" ); } }); thread.start(); try { thread.join(); { final String addr = (null==destHost)?(""):(destHost.toString()); final String reachable = (isReachable)?("reachable"):("not reachable"); Toast toast = Toast.makeText( context, "DNS result=\n" + addr + "\n " + reachable, Toast.LENGTH_LONG ); toast.show(); } } catch ( InterruptedException e ) { } } }); layout.addView( btn ); } { Button btn = new Button( this ); btn.setText( "pre DNS query(kkkon.sakura.ne.jp) support proxy" ); btn.setOnClickListener( new View.OnClickListener() { @Override public void onClick(View view) { isReachable = false; Thread thread = new Thread( new Runnable() { public void run() { try { String target = null; { ProxySelector proxySelector = ProxySelector.getDefault(); Log.d( TAG, "proxySelector=" + proxySelector ); if ( null != proxySelector ) { URI uri = null; try { uri = new URI("http://www.google.com/"); } catch ( URISyntaxException e ) { Log.d( TAG, e.toString() ); } List<Proxy> proxies = proxySelector.select( uri ); if ( null != proxies ) { for ( final Proxy proxy : proxies ) { Log.d( TAG, " proxy=" + proxy ); if ( null != proxy ) { if ( Proxy.Type.HTTP == proxy.type() ) { final SocketAddress sa = proxy.address(); if ( sa instanceof InetSocketAddress ) { final InetSocketAddress isa = (InetSocketAddress)sa; target = isa.getHostName(); break; } } } } } } } if ( null == target ) { target = "kkkon.sakura.ne.jp"; } InetAddress dest = InetAddress.getByName(target); if ( null == dest ) { dest = destHost; } if ( null != dest ) { try { if ( dest.isReachable( 5*1000 ) ) { Log.d( TAG, "destHost=" + dest.toString() + " reachable" ); isReachable = true; } else { Log.d( TAG, "destHost=" + dest.toString() + " not reachable" ); { ProxySelector proxySelector = ProxySelector.getDefault(); //Log.d( TAG, "proxySelector=" + proxySelector ); if ( null != proxySelector ) { URI uri = null; try { uri = new URI("http://www.google.com/"); } catch ( URISyntaxException e ) { //Log.d( TAG, e.toString() ); } if ( null != uri ) { List<Proxy> proxies = proxySelector.select( uri ); if ( null != proxies ) { for ( final Proxy proxy : proxies ) { //Log.d( TAG, " proxy=" + proxy ); if ( null != proxy ) { if ( Proxy.Type.HTTP == proxy.type() ) { URL url = uri.toURL(); URLConnection conn = null; if ( null != url ) { try { conn = url.openConnection( proxy ); if ( null != conn ) { conn.setConnectTimeout( 3*1000 ); conn.setReadTimeout( 3*1000 ); } } catch ( IOException e ) { Log.d( TAG, "got Exception" + e.toString(), e ); } if ( conn instanceof HttpURLConnection ) { HttpURLConnection httpConn = (HttpURLConnection)conn; if ( 0 < httpConn.getResponseCode() ) { isReachable = true; } Log.d( TAG, " HTTP ContentLength=" + httpConn.getContentLength() ); Log.d( TAG, " HTTP res=" + httpConn.getResponseCode() ); //httpConn.setInstanceFollowRedirects( false ); //httpConn.setRequestMethod( "HEAD" ); //conn.connect(); httpConn.disconnect(); Log.d( TAG, " HTTP ContentLength=" + httpConn.getContentLength() ); Log.d( TAG, " HTTP res=" + httpConn.getResponseCode() ); } } } } } } } } } } destHost = dest; } catch ( IOException e ) { Log.d( TAG, "got Excpetion " + e.toString() ); } } else { } } catch ( UnknownHostException e ) { Log.d( TAG, "dns error" + e.toString() ); destHost = null; } { if ( null != destHost ) { Log.d( TAG, "destHost=" + destHost ); } } } }); thread.start(); try { thread.join(); { final String addr = (null==destHost)?(""):(destHost.toString()); final String reachable = (isReachable)?("reachable"):("not reachable"); Toast toast = Toast.makeText( context, "DNS result=\n" + addr + "\n " + reachable, Toast.LENGTH_LONG ); toast.show(); } } catch ( InterruptedException e ) { } } }); layout.addView( btn ); } setContentView( layout ); } @Override protected void onStart() { NetworkConnectionChecker.start(); this.handler = new Handler() { @Override public void handleMessage(Message msg) { //Log.d( TAG, "hnadleMessage" ); final boolean isReachable = NetworkConnectionChecker.isReachable(); textView.setText( "reachable=" + isReachable ); //textView.invalidate(); removeMessages(0); sendMessageDelayed( obtainMessage(0), 1*1000 ); } }; this.handler.sendMessage( this.handler.obtainMessage(0) ); super.onStart(); //To change body of generated methods, choose Tools | Templates. } @Override protected void onStop() { NetworkConnectionChecker.stop(); this.handler = null; super.onStop(); //To change body of generated methods, choose Tools | Templates. } }
add https host
java-network-connection-checker-testapp-android/src/jp/ne/sakura/kkkon/java/net/inetaddress/testapp/android/NetworkConnectionCheckerTestApp.java
add https host
Java
mit
d47810af7131725e47728e549bd5a882ea0d9b50
0
sormuras/bach,sormuras/bach
/* * Bach - Java Shell Builder * Copyright (C) 2019 Christian Stein * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.sormuras.bach; import java.io.PrintWriter; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.net.URL; import java.net.URLClassLoader; import java.nio.file.Files; import java.nio.file.Path; import java.util.List; import java.util.spi.ToolProvider; /*BODY*/ public interface Configuration { default Path getHomeDirectory() { return Path.of(""); } default Path getWorkspaceDirectory() { return Path.of("bin"); } default Path getLibraryDirectory() { return getLibraryPaths().get(0); } default List<Path> getLibraryPaths() { return List.of(Path.of("lib")); } default List<Path> getSourceDirectories() { return List.of(Path.of("src")); } default Path resolve(Path path, String name) { return Configuration.resolve(getHomeDirectory(), path, name); } default List<Path> resolve(List<Path> paths, String name) { return Configuration.resolve(getHomeDirectory(), paths, name); } static Configuration of() { return of(Path.of("")); } static Configuration of(Path home) { validateDirectory(Util.requireNonNull(home, "home directory")); var ccc = compileCustomConfiguration(home); return new DefaultConfiguration( home, resolve(home, ccc.getWorkspaceDirectory(), "workspace directory"), resolve(home, ccc.getLibraryPaths(), "library paths"), resolve(home, ccc.getSourceDirectories(), "source directories")); } static Path resolve(Path home, Path path, String name) { return Util.requireNonNull(path, name).isAbsolute() ? path : home.resolve(path); } static List<Path> resolve(Path home, List<Path> paths, String name) { return List.of( Util.requireNonNull(paths, name).stream() .map(path -> resolve(home, path, "element of " + name)) .toArray(Path[]::new)); } private static Configuration compileCustomConfiguration(Path home) { class ConfigurationInvocationHandler implements Configuration, InvocationHandler { private final Object that; private ConfigurationInvocationHandler(Object that) { this.that = that; } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { try { return that.getClass().getMethod(method.getName()).invoke(that); } catch (NoSuchMethodException e) { return this.getClass().getMethod(method.getName()).invoke(this); } } } var dot = home.resolve(".bach"); if (Files.isDirectory(dot)) { var bin = dot.resolve("bin"); var name = "Configuration"; var configurationJava = dot.resolve(name + ".java"); if (Files.exists(configurationJava)) { var javac = ToolProvider.findFirst("javac").orElseThrow(); var out = new PrintWriter(System.out, true); var err = new PrintWriter(System.err, true); javac.run(out, err, "-d", bin.toString(), configurationJava.toString()); } try { var parent = Configuration.class.getClassLoader(); var loader = URLClassLoader.newInstance(new URL[] {bin.toUri().toURL()}, parent); var configuration = loader.loadClass(name).getConstructor().newInstance(); if (configuration instanceof Configuration) { return (Configuration) configuration; } var interfaces = new Class[] {Configuration.class}; var handler = new ConfigurationInvocationHandler(configuration); return (Configuration) Proxy.newProxyInstance(loader, interfaces, handler); } catch (ClassNotFoundException e) { // ignore "missing" custom configuration class } catch (Exception e) { throw new Error("Loading custom configuration failed: " + configurationJava.toUri(), e); } } return new Configuration() {}; } class DefaultConfiguration implements Configuration { private final Path homeDirectory; private final Path workspaceDirectory; private final List<Path> libraryPaths; private final List<Path> sourceDirectories; private DefaultConfiguration( Path homeDirectory, Path workspaceDirectory, List<Path> libraryPaths, List<Path> sourceDirectories) { this.homeDirectory = homeDirectory; this.workspaceDirectory = workspaceDirectory; this.libraryPaths = Util.requireNonEmpty(libraryPaths, "library paths"); this.sourceDirectories = Util.requireNonEmpty(sourceDirectories, "source directories"); } @Override public Path getHomeDirectory() { return homeDirectory; } @Override public Path getWorkspaceDirectory() { return workspaceDirectory; } @Override public List<Path> getLibraryPaths() { return libraryPaths; } @Override public List<Path> getSourceDirectories() { return sourceDirectories; } @Override public String toString() { return "Configuration [" + String.join(", ", toStrings(this)) + "]"; } } class ValidationError extends AssertionError { private ValidationError(String expected, Object hint) { super(String.format("expected that %s: %s", expected, hint)); } } static List<String> toStrings(Configuration configuration) { var home = configuration.getHomeDirectory(); return List.of( String.format("home = '%s' -> %s", home, home.toUri()), String.format("workspace = '%s'", configuration.getWorkspaceDirectory()), String.format("library paths = %s", configuration.getLibraryPaths()), String.format("source directories = %s", configuration.getSourceDirectories())); } static void validate(Configuration configuration) { var home = configuration.getHomeDirectory(); validateDirectory(home); if (Util.list(home, Files::isDirectory).size() == 0) throw new ValidationError("home contains a directory", home.toUri()); var work = configuration.getWorkspaceDirectory(); if (Files.exists(work)) { validateDirectory(work); if (!work.toFile().canWrite()) throw new ValidationError("bin is writable: %s", work.toUri()); } else { var parentOfBin = work.toAbsolutePath().getParent(); if (parentOfBin != null && !parentOfBin.toFile().canWrite()) throw new ValidationError("parent of work is writable", parentOfBin.toUri()); } validateDirectoryIfExists(configuration.getLibraryDirectory()); configuration.getSourceDirectories().forEach(Configuration::validateDirectory); } static void validateDirectoryIfExists(Path path) { if (Files.exists(path)) validateDirectory(path); } static void validateDirectory(Path path) { if (!Files.isDirectory(path)) throw new ValidationError("path is a directory", path.toUri()); } }
src/modules/de.sormuras.bach/main/java/de/sormuras/bach/Configuration.java
/* * Bach - Java Shell Builder * Copyright (C) 2019 Christian Stein * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.sormuras.bach; import java.io.PrintWriter; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.net.URL; import java.net.URLClassLoader; import java.nio.file.Files; import java.nio.file.Path; import java.util.List; import java.util.spi.ToolProvider; /*BODY*/ public interface Configuration { default Path getHomeDirectory() { return Path.of(""); } default Path getWorkspaceDirectory() { return Path.of("bin"); } default Path getLibraryDirectory() { return getLibraryPaths().get(0); } default List<Path> getLibraryPaths() { return List.of(Path.of("lib")); } default List<Path> getSourceDirectories() { return List.of(Path.of("src")); } default Path resolve(Path path, String name) { return Configuration.resolve(getHomeDirectory(), path, name); } default List<Path> resolve(List<Path> paths, String name) { return Configuration.resolve(getHomeDirectory(), paths, name); } static Configuration of() { return of(Path.of("")); } static Configuration of(Path home) { validateDirectory(Util.requireNonNull(home, "home directory")); var ccc = compileCustomConfiguration(home); return new DefaultConfiguration( home, resolve(home, ccc.getWorkspaceDirectory(), "workspace directory"), resolve(home, ccc.getLibraryPaths(), "library paths"), resolve(home, ccc.getSourceDirectories(), "source directories")); } static Path resolve(Path home, Path path, String name) { return Util.requireNonNull(path, name).isAbsolute() ? path : home.resolve(path); } static List<Path> resolve(Path home, List<Path> paths, String name) { return List.of( Util.requireNonNull(paths, name).stream() .map(path -> resolve(home, path, "element of " + name)) .toArray(Path[]::new)); } private static Configuration compileCustomConfiguration(Path home) { class ConfigurationInvocationHandler implements Configuration, InvocationHandler { private final Object that; private ConfigurationInvocationHandler(Object that) { this.that = that; } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { try { return that.getClass().getMethod(method.getName()).invoke(that); } catch (NoSuchMethodException e) { return this.getClass().getMethod(method.getName()).invoke(this); } } } var dot = home.resolve(".bach"); if (Files.isDirectory(dot)) { var out = new PrintWriter(System.out, true); var err = new PrintWriter(System.err, true); var javac = ToolProvider.findFirst("javac").orElseThrow(); var bin = dot.resolve("bin"); var name = "Configuration"; var configurationJava = dot.resolve(name + ".java"); if (Files.exists(configurationJava)) { javac.run(out, err, "-d", bin.toString(), configurationJava.toString()); } try { var parent = Configuration.class.getClassLoader(); var loader = URLClassLoader.newInstance(new URL[] {bin.toUri().toURL()}, parent); var configuration = loader.loadClass(name).getConstructor().newInstance(); var interfaces = new Class[] {Configuration.class}; var handler = new ConfigurationInvocationHandler(configuration); return (Configuration) Proxy.newProxyInstance(loader, interfaces, handler); } catch (ClassNotFoundException e) { // ignore "missing" custom configuration class } catch (Exception e) { throw new Error("Loading custom configuration failed: " + configurationJava.toUri(), e); } } return new Configuration() {}; } class DefaultConfiguration implements Configuration { private final Path homeDirectory; private final Path workspaceDirectory; private final List<Path> libraryPaths; private final List<Path> sourceDirectories; private DefaultConfiguration( Path homeDirectory, Path workspaceDirectory, List<Path> libraryPaths, List<Path> sourceDirectories) { this.homeDirectory = homeDirectory; this.workspaceDirectory = workspaceDirectory; this.libraryPaths = Util.requireNonEmpty(libraryPaths, "library paths"); this.sourceDirectories = Util.requireNonEmpty(sourceDirectories, "source directories"); } @Override public Path getHomeDirectory() { return homeDirectory; } @Override public Path getWorkspaceDirectory() { return workspaceDirectory; } @Override public List<Path> getLibraryPaths() { return libraryPaths; } @Override public List<Path> getSourceDirectories() { return sourceDirectories; } @Override public String toString() { return "Configuration [" + String.join(", ", toStrings(this)) + "]"; } } class ValidationError extends AssertionError { private ValidationError(String expected, Object hint) { super(String.format("expected that %s: %s", expected, hint)); } } static List<String> toStrings(Configuration configuration) { var home = configuration.getHomeDirectory(); return List.of( String.format("home = '%s' -> %s", home, home.toUri()), String.format("workspace = '%s'", configuration.getWorkspaceDirectory()), String.format("library paths = %s", configuration.getLibraryPaths()), String.format("source directories = %s", configuration.getSourceDirectories())); } static void validate(Configuration configuration) { var home = configuration.getHomeDirectory(); validateDirectory(home); if (Util.list(home, Files::isDirectory).size() == 0) throw new ValidationError("home contains a directory", home.toUri()); var work = configuration.getWorkspaceDirectory(); if (Files.exists(work)) { validateDirectory(work); if (!work.toFile().canWrite()) throw new ValidationError("bin is writable: %s", work.toUri()); } else { var parentOfBin = work.toAbsolutePath().getParent(); if (parentOfBin != null && !parentOfBin.toFile().canWrite()) throw new ValidationError("parent of work is writable", parentOfBin.toUri()); } validateDirectoryIfExists(configuration.getLibraryDirectory()); configuration.getSourceDirectories().forEach(Configuration::validateDirectory); } static void validateDirectoryIfExists(Path path) { if (Files.exists(path)) validateDirectory(path); } static void validateDirectory(Path path) { if (!Files.isDirectory(path)) throw new ValidationError("path is a directory", path.toUri()); } }
Use compiled custom configuration directly, if possible
src/modules/de.sormuras.bach/main/java/de/sormuras/bach/Configuration.java
Use compiled custom configuration directly, if possible
Java
mit
5e36e333d645ee6e54b6111ccc15258982916f11
0
eisop/annotation-tools,typetools/annotation-tools,eisop/annotation-tools,eisop/annotation-tools,typetools/annotation-tools,typetools/annotation-tools
package scenelib.annotations.el; import com.google.common.collect.ImmutableMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import scenelib.annotations.Annotation; import scenelib.annotations.io.IndexFileParser; import scenelib.annotations.util.coll.VivifyingMap; /** * An <code>AScene</code> (annotated scene) represents the annotations on a * set of Java classes and packages along with the definitions of some or all of * the annotation types used. * * <p> * Each client of the annotation library may wish to use its own representation * for certain kinds of annotations instead of a simple name-value map; thus, a * layer of abstraction in the storage of annotations was introduced. * * <p> * <code>AScene</code>s and many {@link AElement}s can contain other * {@link AElement}s. When these objects are created, their collections of * subelements are empty. In order to associate an annotation with a particular * Java element in an <code>AScene</code>, one must first ensure that an * appropriate {@link AElement} exists in the <code>AScene</code>. To this * end, the maps of subelements have a <code>vivify</code> method. Calling * <code>vivify</code> to access a particular subelement will return the * subelement if it already exists; otherwise it will create and then return the * subelement. (Compare to vivification in Perl.) For example, the following * code will obtain an {@link AMethod} representing <code>Foo.bar</code> in * the <code>AScene</code> <code>s</code>, creating it if it did not * already exist: * * <pre> * AMethod&lt;A&gt; m = s.classes.getVivify("Foo").methods.getVivify("bar"); * </pre> * * <p> * Then one can add an annotation to the method: * * <pre> * m.annotationsHere.add(new Annotation( * new AnnotationDef(taintedDef, RetentionPolicy.RUNTIME, true), * new Annotation(taintedDef, Collections.emptyMap()) * )); * </pre> */ public class AScene implements Cloneable { /** If true, check that the copy constructor works correctly. */ private static boolean checkClones = true; /** This scene's annotated packages; map key is package name */ public final VivifyingMap<String, AElement> packages = AElement.<String>newVivifyingLHMap_AE(); /** * Contains for each annotation type a set of imports to be added to * the source if the annotation is inserted with the "abbreviate" * option on.<br> * <strong>Key</strong>: fully-qualified name of an annotation. e.g. for <code>@com.foo.Bar(x)</code>, * the fully-qualified name is <code>com.foo.Bar</code> <br> * <strong>Value</strong>: names of packages this annotation needs */ public final Map<String, Set<String>> imports = new LinkedHashMap<>(); /** This scene's annotated classes; map key is class name */ public final VivifyingMap</*@BinaryName*/ String, AClass> classes = new VivifyingMap<String, AClass>( new LinkedHashMap<>()) { @Override public AClass createValueFor( String k) { return new AClass(k); } @Override public boolean isEmptyValue(AClass v) { return v.isEmpty(); } }; /** * Creates a new {@link AScene} with no classes or packages. */ public AScene() { } /** * Copy constructor for {@link AScene}. */ public AScene(AScene scene) { for (String key : scene.packages.keySet()) { AElement val = scene.packages.get(key); packages.put(key, val.clone()); } for (String key : scene.imports.keySet()) { // copy could in principle have different Set implementation Set<String> value = scene.imports.get(key); Set<String> copy = new LinkedHashSet<>(); copy.addAll(value); imports.put(key, copy); } for (String key : scene.classes.keySet()) { AClass clazz = scene.classes.get(key); classes.put(key, clazz.clone()); } if (checkClones) { checkClone(this, scene); } } @Override public AScene clone() { return new AScene(this); } /** * Returns whether this {@link AScene} equals <code>o</code>; the * commentary and the cautionary remarks on {@link AElement#equals(Object)} * also apply to {@link AScene#equals(Object)}. */ @Override public boolean equals(Object o) { return o instanceof AScene && ((AScene) o).equals(this); } /** * Returns whether this {@link AScene} equals <code>o</code>; a * slightly faster variant of {@link #equals(Object)} for when the argument * is statically known to be another nonnull {@link AScene}. */ public boolean equals(AScene o) { return o.classes.equals(classes) && o.packages.equals(packages); } @Override public int hashCode() { return classes.hashCode() + packages.hashCode(); } /** * Fetch the classes in this scene, represented as AClass objects. * * @return an immutable map from binary names to AClass objects */ public Map</*@BinaryName*/ String, AClass> getClasses() { return ImmutableMap.copyOf(classes); } /** * Returns whether this {@link AScene} is empty. */ public boolean isEmpty() { return classes.isEmpty() && packages.isEmpty(); } /** * Removes empty subelements of this {@link AScene} depth-first. */ public void prune() { classes.prune(); packages.prune(); } /** Returns a string representation. */ public String unparse() { StringBuilder sb = new StringBuilder(); sb.append("packages:\n"); for (Map.Entry<String, AElement> entry : packages.entrySet()) { sb.append(" " + entry.getKey() + " => " + entry.getValue() + "\n"); } sb.append("classes:\n"); for (Map.Entry<String, AClass> entry : classes.entrySet()) { sb.append(" " + entry.getKey() + " => " + "\n"); sb.append(entry.getValue().unparse(" ")); } return sb.toString(); } @Override public String toString() { return unparse(); } /** * Checks that the arguments are clones of one another. * * Throws exception if the arguments 1) are the same reference; * 2) are not equal() in both directions; or 3) contain * corresponding elements that meet either of the preceding two * conditions. * * @param s0 the first AScene to compare * @param s1 the second Ascene to compare */ public static void checkClone(AScene s0, AScene s1) { if (s0 == null) { if (s1 != null) { cloneCheckFail(); } } else { if (s1 == null) { cloneCheckFail(); } s0.prune(); s1.prune(); if (s0 == s1) { cloneCheckFail(); } checkCloneElems(s0.packages, s1.packages); checkCloneElems(s0.classes, s1.classes); } } /** * Throw exception if m0 == m1 or !m0.equals(m1). * (See {@link #checkClone(AScene, AScene)} for explanation.) * * @param <K> the type of map keys * @param <V> the type of map values * @param m0 the first map to compare * @param m1 the second map to compare */ public static <K, V extends AElement> void checkCloneElems(VivifyingMap<K, V> m0, VivifyingMap<K, V> m1) { if (m0 == null) { if (m1 != null) { cloneCheckFail(); } } else if (m1 == null) { cloneCheckFail(); } else { for (K k : m0.keySet()) { checkCloneElem(m0.get(k), m1.get(k)); } } } /** * Throw exception if e0 == e1 or !e0.equals(e1). * (See {@link #checkClone(AScene, AScene)} for explanation.) * * @param e0 the first element to compare * @param e1 the second element to compare */ public static void checkCloneElem(AElement e0, AElement e1) { checkCloneObject(e0, e1); if (e0 != null) { if (e0 == e1) { cloneCheckFail(); } e0.accept(checkVisitor, e1); } } /** * Throw exception on visit if !el.equals(arg) or !arg.equals(el). * (See {@link #checkClone(AScene, AScene)} for explanation.) * * @param o0 the first object to compare * @param o1 the second object to compare */ public static void checkCloneObject(Object o0, Object o1) { if (o0 == null ? o1 != null : !(o0.equals(o1) && o1.equals(o0))) { // ok if == throw new RuntimeException( String.format("clone check failed for %s [%s] %s [%s]", o0, o0.getClass(), o1, o1.getClass())); } } /** * Throw exception on visit if el == arg or !el.equals(arg). * (See {@link #checkClone(AScene, AScene)} for explanation.) */ private static ElementVisitor<Void, AElement> checkVisitor = new ElementVisitor<Void, AElement>() { @Override public Void visitAnnotationDef(AnnotationDef el, AElement arg) { return null; } @Override public Void visitBlock(ABlock el, AElement arg) { ABlock b = (ABlock) arg; checkCloneElems(el.locals, b.locals); return null; } @Override public Void visitClass(AClass el, AElement arg) { AClass c = (AClass) arg; checkCloneElems(el.bounds, c.bounds); checkCloneElems(el.extendsImplements, c.extendsImplements); checkCloneElems(el.fieldInits, c.fieldInits); checkCloneElems(el.fields, c.fields); checkCloneElems(el.instanceInits, c.instanceInits); checkCloneElems(el.methods, c.methods); checkCloneElems(el.staticInits, c.staticInits); return visitDeclaration(el, arg); } @Override public Void visitDeclaration(ADeclaration el, AElement arg) { ADeclaration d = (ADeclaration) arg; checkCloneElems(el.insertAnnotations, d.insertAnnotations); checkCloneElems(el.insertTypecasts, d.insertTypecasts); return visitElement(el, arg); } @Override public Void visitExpression(AExpression el, AElement arg) { AExpression e = (AExpression) arg; checkCloneObject(el.id, e.id); checkCloneElems(el.calls, e.calls); checkCloneElems(el.funs, e.funs); checkCloneElems(el.instanceofs, e.instanceofs); checkCloneElems(el.news, e.news); checkCloneElems(el.refs, e.refs); checkCloneElems(el.typecasts, e.typecasts); return visitElement(el, arg); } @Override public Void visitField(AField el, AElement arg) { AField f = (AField) arg; checkCloneElem(el.init, f.init); return visitDeclaration(el, arg); } @Override public Void visitMethod(AMethod el, AElement arg) { AMethod m = (AMethod) arg; checkCloneObject(el.methodSignature, m.methodSignature); checkCloneElems(el.bounds, m.bounds); checkCloneElem(el.returnType, m.returnType); checkCloneElem(el.receiver, m.receiver); checkCloneElems(el.parameters, m.parameters); checkCloneElems(el.throwsException, m.throwsException); checkCloneElem(el.body, m.body); return null; } @Override public Void visitTypeElement(ATypeElement el, AElement arg) { ATypeElement t = (ATypeElement) arg; checkCloneObject(el.description, t.description); checkCloneElems(el.innerTypes, t.innerTypes); return null; } @Override public Void visitTypeElementWithType(ATypeElementWithType el, AElement arg) { ATypeElementWithType t = (ATypeElementWithType) arg; checkCloneObject(el.getType(), t.getType()); return visitTypeElement(el, arg); } @Override public Void visitElement(AElement el, AElement arg) { checkCloneObject(el.description, arg.description); if (el.tlAnnotationsHere.size() != arg.tlAnnotationsHere.size()) { cloneCheckFail(); } for (Annotation a : el.tlAnnotationsHere) { if (!arg.tlAnnotationsHere.contains(a)) { cloneCheckFail(); } } checkCloneElem(el.type, arg.type); return null; } }; private static void cloneCheckFail() { throw new RuntimeException("clone check failed"); } // temporary main for easy testing on JAIFs public static void main(String[] args) { int status = 0; checkClones = true; for (int i = 0; i < args.length; i++) { AScene s0 = new AScene(); System.out.print(args[i] + ": "); try { IndexFileParser.parseFile(args[i], s0); s0.clone(); System.out.println("ok"); } catch (Throwable e) { status = 1; System.out.println("failed"); e.printStackTrace(); } } System.exit(status); } }
scene-lib/src/scenelib/annotations/el/AScene.java
package scenelib.annotations.el; import com.google.common.collect.ImmutableMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import scenelib.annotations.Annotation; import scenelib.annotations.io.IndexFileParser; import scenelib.annotations.util.coll.VivifyingMap; /** * An <code>AScene</code> (annotated scene) represents the annotations on a * set of Java classes and packages along with the definitions of some or all of * the annotation types used. * * <p> * Each client of the annotation library may wish to use its own representation * for certain kinds of annotations instead of a simple name-value map; thus, a * layer of abstraction in the storage of annotations was introduced. * * <p> * <code>AScene</code>s and many {@link AElement}s can contain other * {@link AElement}s. When these objects are created, their collections of * subelements are empty. In order to associate an annotation with a particular * Java element in an <code>AScene</code>, one must first ensure that an * appropriate {@link AElement} exists in the <code>AScene</code>. To this * end, the maps of subelements have a <code>vivify</code> method. Calling * <code>vivify</code> to access a particular subelement will return the * subelement if it already exists; otherwise it will create and then return the * subelement. (Compare to vivification in Perl.) For example, the following * code will obtain an {@link AMethod} representing <code>Foo.bar</code> in * the <code>AScene</code> <code>s</code>, creating it if it did not * already exist: * * <pre> * AMethod&lt;A&gt; m = s.classes.getVivify("Foo").methods.getVivify("bar"); * </pre> * * <p> * Then one can add an annotation to the method: * * <pre> * m.annotationsHere.add(new Annotation( * new AnnotationDef(taintedDef, RetentionPolicy.RUNTIME, true), * new Annotation(taintedDef, Collections.emptyMap()) * )); * </pre> */ public class AScene implements Cloneable { /** If true, check that the copy constructor works correctly. */ private static boolean checkClones = true; /** This scene's annotated packages; map key is package name */ public final VivifyingMap<String, AElement> packages = AElement.<String>newVivifyingLHMap_AE(); /** * Contains for each annotation type a set of imports to be added to * the source if the annotation is inserted with the "abbreviate" * option on.<br> * <strong>Key</strong>: fully-qualified name of an annotation. e.g. for <code>@com.foo.Bar(x)</code>, * the fully-qualified name is <code>com.foo.Bar</code> <br> * <strong>Value</strong>: names of packages this annotation needs */ public final Map<String, Set<String>> imports = new LinkedHashMap<>(); /** This scene's annotated classes; map key is class name */ public final VivifyingMap</*@BinaryName*/ String, AClass> classes = new VivifyingMap<String, AClass>( new LinkedHashMap<>()) { @Override public AClass createValueFor( String k) { return new AClass(k); } @Override public boolean isEmptyValue(AClass v) { return v.isEmpty(); } }; /** * Creates a new {@link AScene} with no classes or packages. */ public AScene() { } /** * Copy constructor for {@link AScene}. */ public AScene(AScene scene) { for (String key : scene.packages.keySet()) { AElement val = scene.packages.get(key); packages.put(key, val.clone()); } for (String key : scene.imports.keySet()) { // copy could in principle have different Set implementation Set<String> value = scene.imports.get(key); Set<String> copy = new LinkedHashSet<>(); copy.addAll(value); imports.put(key, copy); } for (String key : scene.classes.keySet()) { AClass clazz = scene.classes.get(key); classes.put(key, clazz.clone()); } if (checkClones) { checkClone(this, scene); } } @Override public AScene clone() { return new AScene(this); } /** * Returns whether this {@link AScene} equals <code>o</code>; the * commentary and the cautionary remarks on {@link AElement#equals(Object)} * also apply to {@link AScene#equals(Object)}. */ @Override public boolean equals(Object o) { return o instanceof AScene && ((AScene) o).equals(this); } /** * Returns whether this {@link AScene} equals <code>o</code>; a * slightly faster variant of {@link #equals(Object)} for when the argument * is statically known to be another nonnull {@link AScene}. */ public boolean equals(AScene o) { return o.classes.equals(classes) && o.packages.equals(packages); } @Override public int hashCode() { return classes.hashCode() + packages.hashCode(); } /** * Fetch the classes in this scene, represented as AClass objects. * * @return an immutable map from binary names to AClass objects */ public Map</*@BinaryName*/ String, AClass> getClasses() { return ImmutableMap.copyOf(classes); } /** * Returns whether this {@link AScene} is empty. */ public boolean isEmpty() { return classes.isEmpty() && packages.isEmpty(); } /** * Removes empty subelements of this {@link AScene} depth-first. */ public void prune() { classes.prune(); packages.prune(); } /** Returns a string representation. */ public String unparse() { StringBuilder sb = new StringBuilder(); sb.append("packages:\n"); for (Map.Entry<String, AElement> entry : packages.entrySet()) { sb.append(" " + entry.getKey() + " => " + entry.getValue() + "\n"); } sb.append("classes:\n"); for (Map.Entry<String, AClass> entry : classes.entrySet()) { sb.append(" " + entry.getKey() + " => " + "\n"); sb.append(entry.getValue().unparse(" ")); } return sb.toString(); } @Override public String toString() { return unparse(); } /** * Checks that the arguments are clones of one another. * * Throws exception if the arguments 1) are the same reference; * 2) are not equal() in both directions; or 3) contain * corresponding elements that meet either of the preceding two * conditions. */ public static void checkClone(AScene s0, AScene s1) { if (s0 == null) { if (s1 != null) { cloneCheckFail(); } } else { if (s1 == null) { cloneCheckFail(); } s0.prune(); s1.prune(); if (s0 == s1) { cloneCheckFail(); } checkElems(s0.packages, s1.packages); checkElems(s0.classes, s1.classes); } } public static <K, V extends AElement> void checkElems(VivifyingMap<K, V> m0, VivifyingMap<K, V> m1) { if (m0 == null) { if (m1 != null) { cloneCheckFail(); } } else if (m1 == null) { cloneCheckFail(); } else { for (K k : m0.keySet()) { checkElem(m0.get(k), m1.get(k)); } } } /** * Throw exception on visit if e0 == e1 or !e0.equals(e1). * (See {@link #checkClone(AScene, AScene)} for explanation.) */ public static void checkElem(AElement e0, AElement e1) { checkObject(e0, e1); if (e0 != null) { if (e0 == e1) { cloneCheckFail(); } e0.accept(checkVisitor, e1); } } /** * Throw exception on visit if !el.equals(arg) or !arg.equals(el). * (See {@link #checkClone(AScene, AScene)} for explanation.) */ public static void checkObject(Object o0, Object o1) { if (o0 == null ? o1 != null : !(o0.equals(o1) && o1.equals(o0))) { // ok if == throw new RuntimeException( String.format("clone check failed for %s [%s] %s [%s]", o0, o0.getClass(), o1, o1.getClass())); } } /** * Throw exception on visit if el == arg or !el.equals(arg). * (See {@link #checkClone(AScene, AScene)} for explanation.) */ private static ElementVisitor<Void, AElement> checkVisitor = new ElementVisitor<Void, AElement>() { @Override public Void visitAnnotationDef(AnnotationDef el, AElement arg) { return null; } @Override public Void visitBlock(ABlock el, AElement arg) { ABlock b = (ABlock) arg; checkElems(el.locals, b.locals); return null; } @Override public Void visitClass(AClass el, AElement arg) { AClass c = (AClass) arg; checkElems(el.bounds, c.bounds); checkElems(el.extendsImplements, c.extendsImplements); checkElems(el.fieldInits, c.fieldInits); checkElems(el.fields, c.fields); checkElems(el.instanceInits, c.instanceInits); checkElems(el.methods, c.methods); checkElems(el.staticInits, c.staticInits); return visitDeclaration(el, arg); } @Override public Void visitDeclaration(ADeclaration el, AElement arg) { ADeclaration d = (ADeclaration) arg; checkElems(el.insertAnnotations, d.insertAnnotations); checkElems(el.insertTypecasts, d.insertTypecasts); return visitElement(el, arg); } @Override public Void visitExpression(AExpression el, AElement arg) { AExpression e = (AExpression) arg; checkObject(el.id, e.id); checkElems(el.calls, e.calls); checkElems(el.funs, e.funs); checkElems(el.instanceofs, e.instanceofs); checkElems(el.news, e.news); checkElems(el.refs, e.refs); checkElems(el.typecasts, e.typecasts); return visitElement(el, arg); } @Override public Void visitField(AField el, AElement arg) { AField f = (AField) arg; checkElem(el.init, f.init); return visitDeclaration(el, arg); } @Override public Void visitMethod(AMethod el, AElement arg) { AMethod m = (AMethod) arg; checkObject(el.methodSignature, m.methodSignature); checkElem(el.body, m.body); checkElem(el.returnType, m.returnType); checkElems(el.bounds, m.bounds); checkElems(el.parameters, m.parameters); checkElems(el.throwsException, m.throwsException); return null; } @Override public Void visitTypeElement(ATypeElement el, AElement arg) { ATypeElement t = (ATypeElement) arg; checkObject(el.description, t.description); checkElems(el.innerTypes, t.innerTypes); return null; } @Override public Void visitTypeElementWithType(ATypeElementWithType el, AElement arg) { ATypeElementWithType t = (ATypeElementWithType) arg; checkObject(el.getType(), t.getType()); return visitTypeElement(el, arg); } @Override public Void visitElement(AElement el, AElement arg) { checkObject(el.description, arg.description); if (el.tlAnnotationsHere.size() != arg.tlAnnotationsHere.size()) { cloneCheckFail(); } for (Annotation a : el.tlAnnotationsHere) { if (!arg.tlAnnotationsHere.contains(a)) { cloneCheckFail(); } } checkElem(el.type, arg.type); return null; } }; private static void cloneCheckFail() { throw new RuntimeException("clone check failed"); } // temporary main for easy testing on JAIFs public static void main(String[] args) { int status = 0; checkClones = true; for (int i = 0; i < args.length; i++) { AScene s0 = new AScene(); System.out.print(args[i] + ": "); try { IndexFileParser.parseFile(args[i], s0); s0.clone(); System.out.println("ok"); } catch (Throwable e) { status = 1; System.out.println("failed"); e.printStackTrace(); } } System.exit(status); } }
Rename checkClone methods for consistency
scene-lib/src/scenelib/annotations/el/AScene.java
Rename checkClone methods for consistency
Java
mit
958dfe6dac87437150a733a75650a29d69452261
0
fvasquezjatar/fermat-unused,fvasquezjatar/fermat-unused
package com.bitdubai.fermat_dmp_plugin.layer.engine.app_runtime.developer.bitdubai.version_1; /** * Created by ciencias on 2/14/15. */ import com.bitdubai.fermat_api.CantStartPluginException; import com.bitdubai.fermat_api.FermatException; import com.bitdubai.fermat_api.Plugin; import com.bitdubai.fermat_api.Service; import com.bitdubai.fermat_api.layer.all_definition.enums.ServiceStatus; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.Activity; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.Fragment; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.SearchView; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.SideMenu; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.Tab; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.MenuItem; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.TabStrip; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.TitleBar; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.App; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.AppRuntimeManager; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.MainMenu; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.SubApp; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.enums.Activities; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.enums.Apps; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.enums.Fragments; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.enums.SubApps; import com.bitdubai.fermat_api.layer.osa_android.file_system.DealsWithPluginFileSystem; import com.bitdubai.fermat_api.layer.osa_android.file_system.PluginFileSystem; import com.bitdubai.fermat_api.layer.pip_platform_service.error_manager.DealsWithErrors; import com.bitdubai.fermat_api.layer.pip_platform_service.error_manager.ErrorManager; import com.bitdubai.fermat_api.layer.pip_platform_service.event_manager.DealsWithEvents; import com.bitdubai.fermat_api.layer.pip_platform_service.event_manager.EventHandler; import com.bitdubai.fermat_api.layer.pip_platform_service.event_manager.EventListener; import com.bitdubai.fermat_api.layer.pip_platform_service.event_manager.EventManager; import com.bitdubai.fermat_api.layer.pip_platform_service.event_manager.EventType; import com.bitdubai.fermat_dmp_plugin.layer.engine.app_runtime.developer.bitdubai.version_1.event_handlers.WalletResourcesInstalledEventHandler; import com.bitdubai.fermat_dmp_plugin.layer.engine.app_runtime.developer.bitdubai.version_1.exceptions.CantFactoryReset; import com.bitdubai.fermat_dmp_plugin.layer.engine.app_runtime.developer.bitdubai.version_1.structure.RuntimeApp; import com.bitdubai.fermat_dmp_plugin.layer.engine.app_runtime.developer.bitdubai.version_1.structure.RuntimeSubApp; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.UUID; /** * The App Runtime is the module in charge of the UI navigation structure. A user is always at a certain point in this * structure. */ /** * A Navigation stack is maintained by this plugin to allow the user to go back all the stack down to the root if necessary. */ public class AppRuntimeMiddlewarePluginRoot implements Service, AppRuntimeManager, DealsWithEvents, DealsWithErrors, DealsWithPluginFileSystem, Plugin { /** * Service Interface member variables. */ ServiceStatus serviceStatus = ServiceStatus.CREATED; /** * AppRuntimeManager Interface member variables. */ List<EventListener> listenersAdded = new ArrayList<>(); Map<Apps, App> listApps = new HashMap<Apps, App>(); Map<SubApps, SubApp> listSubApp = new HashMap<SubApps, SubApp>(); Map<Activities, Activity> listActivities = new HashMap<Activities, Activity>(); Map<Fragments, Fragment> listFragments = new HashMap<Fragments, Fragment>(); Apps lastApp; SubApps lastSubapp; Activities lastActivity; Fragments lastFragment; /** * UsesFileSystem Interface member variables. */ PluginFileSystem pluginFileSystem; /** * DealsWithErrors Interface member variables. */ ErrorManager errorManager; /** * DealWithEvents Interface member variables. */ EventManager eventManager; /** * Plugin Interface member variables. */ UUID pluginId; public void addToNavigationStructure(/*String NavigationStructure, NicheWalletType*/) { /* PlatformEvent platformEvent = eventManager.getNewEvent(EventType.NAVIGATION_STRUCTURE_UPDATED); ((NavigationStructureUpdatedEvent) platformEvent).----------(this.-----); eventManager.raiseEvent(platformEvent); */ } /* PlatformEvent platformEvent = eventManager.getNewEvent(EventType.NAVIGATION_STRUCTURE_UPDATED); ((NavigationStructureUpdatedEvent) platformEvent).--------(this.-------); eventManager.raiseEvent(platformEvent); */ @Override public void start() throws CantStartPluginException{ /** * I will initialize the handling of com.bitdubai.platform events. */ EventListener eventListener; EventHandler eventHandler; eventListener = eventManager.getNewListener(EventType.WALLET_RESOURCES_INSTALLED); eventHandler = new WalletResourcesInstalledEventHandler(); ((WalletResourcesInstalledEventHandler) eventHandler).setAppRuntimeManager(this); eventListener.setEventHandler(eventHandler); eventManager.addListener(eventListener); listenersAdded.add(eventListener); /** * At this time the only thing I can do is a factory reset. Once there should be a possibility to add * functionality based on wallets downloaded by users this wont be an option. * * * */ try { factoryReset(); } catch(CantFactoryReset ex) { String message = CantStartPluginException.DEFAULT_MESSAGE; FermatException cause = ex; String context = "App Runtime Start"; String possibleReason = "Some null definition"; throw new CantStartPluginException(message, cause, context, possibleReason); } this.serviceStatus = ServiceStatus.STARTED; } @Override public void pause(){ this.serviceStatus = ServiceStatus.PAUSED; } @Override public void resume(){ this.serviceStatus = ServiceStatus.STARTED; } @Override public void stop(){ /** * I will remove all the listeners registered with the event manager. */ for (EventListener eventListener : listenersAdded){ eventManager.removeListener(eventListener); } listenersAdded.clear(); this.serviceStatus = ServiceStatus.STOPPED; } @Override public ServiceStatus getStatus() { return this.serviceStatus; } /** * AppRuntime Interface implementation. */ @Override public App getApp(Apps app) { return null; } @Override public App getLastApp() { Iterator<Map.Entry<Apps, App>> eapp = this.listApps.entrySet().iterator(); while (eapp.hasNext()) { Map.Entry<Apps, App> appEntry = eapp.next(); RuntimeApp app = (RuntimeApp) appEntry.getValue(); if(app.getType().name().equals(lastApp.name())){ return app; } } return null; } @Override public SubApp getLastSubApp() { Iterator<Map.Entry<SubApps, SubApp>> esubapp = this.listSubApp.entrySet().iterator(); while (esubapp.hasNext()) { Map.Entry<SubApps, SubApp> subappEntry = esubapp.next(); RuntimeSubApp subapp = (RuntimeSubApp) subappEntry.getValue(); if(subapp.getType().name().equals(this.lastSubapp.name())){ return subapp; } } return null; } @Override public Activity getLasActivity() { Iterator<Map.Entry<Activities, Activity>> eactivity = this.listActivities.entrySet().iterator(); while (eactivity.hasNext()) { Map.Entry<Activities, Activity> activityEntry = eactivity.next(); Activity activity = (Activity) activityEntry.getValue(); if(activity.getType().name().equals(this.lastActivity.name())){ return activity; } } return null; } @Override public Fragment getLastFragment() { Iterator<Map.Entry<Fragments, Fragment>> efragment = this.listFragments.entrySet().iterator(); while (efragment.hasNext()) { Map.Entry<Fragments, Fragment> fragmentEntry = efragment.next(); Fragment fragment = (Fragment) fragmentEntry.getValue(); if(fragment.getType().name().equals(this.lastFragment.name())){ return fragment; } } return null; } @Override public Activity getActivity(Activities app) { Iterator<Map.Entry<Activities, Activity>> eactivity = this.listActivities.entrySet().iterator(); while (eactivity.hasNext()) { Map.Entry<Activities, Activity> activityEntry = eactivity.next(); Activity activity = (Activity) activityEntry.getValue(); if(activity.getType().name().equals(app.name())){ lastActivity = activity.getType(); return activity; } } return null; } @Override public Fragment getFragment(Fragments frag) { Iterator<Map.Entry<Fragments, Fragment>> efragment = this.listFragments.entrySet().iterator(); while (efragment.hasNext()) { Map.Entry<Fragments, Fragment> fragmentEntry = efragment.next(); Fragment fragment = (Fragment) fragmentEntry.getValue(); if(fragment.getType().name().equals(frag.name())){ lastFragment = fragment.getType(); return fragment; } } return null; } /** * UsesFileSystem Interface implementation. */ @Override public void setPluginFileSystem(PluginFileSystem pluginFileSystem) { this.pluginFileSystem = pluginFileSystem; } /** * DealWithEvents Interface implementation. */ @Override public void setEventManager(EventManager eventManager) { this.eventManager = eventManager; } /** * DealWithErrors Interface implementation. */ @Override public void setErrorManager(ErrorManager errorManager) { this.errorManager = errorManager; } /** * DealsWithPluginIdentity methods implementation. */ @Override public void setId(UUID pluginId) { this.pluginId = pluginId; } /** * The first time this plugins runs, it will setup the initial structure for the App, subApp and so on through the local * interfaces of the classes involved, */ private void firstRunCheck() { /** * First I check weather this a structure already created, if not I create the "Factory" structure. */ } /** * Here is where I actually generate the factory structure of the APP. This method is also useful to reset to the * factory structure. */ private void factoryReset() throws CantFactoryReset { try { RuntimeApp runtimeApp; RuntimeSubApp runtimeSubApp; Activity runtimeActivity; Fragment runtimeFragment; TitleBar runtimeTitleBar; SideMenu runtimeSideMenu; MainMenu runtimeMainMenu; MenuItem runtimeMenuItem; TabStrip runtimeTabStrip; Tab runtimeTab; runtimeApp = new RuntimeApp(); runtimeApp.setType(Apps.CRYPTO_WALLET_PLATFORM); runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_SHELL); runtimeApp.addSubApp(runtimeSubApp); listSubApp.put(SubApps.CWP_SHELL, runtimeSubApp); runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_SHELL_LOGIN); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_SHELL_LOGIN, runtimeActivity); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SHELL_LOGIN); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SHELL_LOGIN, runtimeFragment); /** * Definition of Developer Manager App */ runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_DEVELOPER_APP); listSubApp.put(SubApps.CWP_DEVELOPER_APP, runtimeSubApp); //acá estoy seteando los colores y toda la vaina esa runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_SUP_APP_ALL_DEVELOPER); runtimeActivity.setColor("#b46a54"); //runtimeActivity.setStatusBarColor("#d07b62"); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_SUP_APP_ALL_DEVELOPER, runtimeActivity); runtimeTitleBar = new TitleBar(); //Navigation runtimeSideMenu = new SideMenu(); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Personal Wallets"); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Shops"); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Commercial wallets"); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Factory Projects"); runtimeMenuItem.setLinkToActivity(Activities.CWP_WALLET_FACTORY_MAIN); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Published Wallets"); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Wallet Store"); runtimeMenuItem.setLinkToActivity(Activities.CWP_WALLET_STORE_MAIN); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Exit"); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeActivity.setSideMenu(runtimeSideMenu); //fin navigation runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("Developer"); //runtimeTitleBar.setColor("#d07b62"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTabStrip.setTabsColor("#d07b62"); runtimeTabStrip.setTabsTextColor("#FFFFFF"); runtimeTabStrip.setTabsIndicateColor("#b46a54"); runtimeTab = new Tab(); runtimeTab.setLabel("DataBase Tools"); runtimeTab.setFragment(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Log Tools"); runtimeTab.setFragment(Fragments.CWP_SUB_APP_DEVELOPER_LOG_TOOLS); runtimeTabStrip.addTab(runtimeTab); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS, runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_DATABASES); runtimeFragment.setBack(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_DATABASES, runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_TABLES); runtimeFragment.setBack(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_DATABASES); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_TABLES, runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_RECORDS); runtimeFragment.setBack(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_TABLES); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_RECORDS, runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_LOG_TOOLS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_LOG_TOOLS, runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_1_TOOLS); runtimeFragment.setBack(Fragments.CWP_SUB_APP_DEVELOPER_LOG_TOOLS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_1_TOOLS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_2_TOOLS); runtimeFragment.setBack(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_1_TOOLS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_2_TOOLS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_3_TOOLS); runtimeFragment.setBack(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_1_TOOLS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_3_TOOLS,runtimeFragment); /** * End of Developer tabs. */ //wallet factory app runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_WALLET_FACTORY); runtimeApp.addSubApp(runtimeSubApp); listSubApp.put(SubApps.CWP_WALLET_FACTORY, runtimeSubApp); runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_FACTORY_MAIN); runtimeActivity.setColor("#b46a54"); //runtimeActivity.setStatusBarColor("#b46a54"); runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("Wallet Factory"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTabStrip.setTabsColor("#d07b62"); runtimeTabStrip.setTabsTextColor("#FFFFFF"); runtimeTabStrip.setTabsIndicateColor("#b46a54"); runtimeTab = new Tab(); runtimeTab.setLabel("Wallet Factory"); runtimeTab.setFragment(Fragments.CWP_WALLET_FACTORY_MAIN); runtimeTabStrip.addTab(runtimeTab); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_FACTORY_MAIN, runtimeActivity); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_FACTORY_MAIN); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_FACTORY_MAIN,runtimeFragment); /**End Wallet Publisher*/ //wallet Publisher app runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_WALLET_PUBLISHER); runtimeApp.addSubApp(runtimeSubApp); listSubApp.put(SubApps.CWP_WALLET_PUBLISHER, runtimeSubApp); runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_PUBLISHER_MAIN); runtimeActivity.setColor("#b46a54"); //runtimeActivity.setStatusBarColor("#b46a54"); runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("Wallet Publisher"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTabStrip.setTabsColor("#d07b62"); runtimeTabStrip.setTabsTextColor("#FFFFFF"); runtimeTabStrip.setTabsIndicateColor("#b46a54"); runtimeTab = new Tab(); runtimeTab.setLabel("Wallet Publisher"); runtimeTab.setFragment(Fragments.CWP_WALLET_PUBLISHER_MAIN); runtimeTabStrip.addTab(runtimeTab); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_PUBLISHER_MAIN, runtimeActivity); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_PUBLISHER_MAIN); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_PUBLISHER_MAIN,runtimeFragment); /**End Wallet Publisher*/ runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_WALLET_MANAGER); runtimeApp.addSubApp(runtimeSubApp); listSubApp.put(SubApps.CWP_WALLET_MANAGER,runtimeSubApp); lastSubapp = SubApps.CWP_WALLET_MANAGER; runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_MANAGER_MAIN); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_MANAGER_MAIN, runtimeActivity); lastActivity = Activities.CWP_WALLET_MANAGER_MAIN; runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_MANAGER_MAIN); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_MANAGER_MAIN, runtimeFragment); //Desktop page Developer sub App runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER,runtimeFragment); runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_WALLET_RUNTIME); runtimeApp.addSubApp(runtimeSubApp); listSubApp.put(SubApps.CWP_WALLET_RUNTIME,runtimeSubApp); runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_WALLET_STORE); runtimeApp.addSubApp(runtimeSubApp); listSubApp.put(SubApps.CWP_WALLET_STORE,runtimeSubApp); runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_STORE_MAIN); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_STORE_MAIN, runtimeActivity); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_STORE_MAIN); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_STORE_MAIN,runtimeFragment); /** * Definition of Shop Manager */ runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_ADULTS_ALL_SHOPS); runtimeActivity.setColor("#76dc4a"); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_ADULTS_ALL_SHOPS, runtimeActivity); runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("My Shop"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTab = new Tab(); runtimeTab.setLabel("Shop"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_SHOP); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Products"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_PRODUCTS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Reviews"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_REVIEWS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Chat"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_CHAT); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("History"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_HISTORY); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Map"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_MAP); runtimeTabStrip.addTab(runtimeTab); runtimeTabStrip.setDividerColor(0xFFFFFFFF); runtimeTabStrip.setIndicatorColor(0xFFFFFFFF); runtimeTabStrip.setIndicatorHeight(9); runtimeTabStrip.setBackgroundColor(0xFF76dc4a); runtimeTabStrip.setTextColor(0xFFFFFFFF); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_SHOP); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_SHOP,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_PRODUCTS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_PRODUCTS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_REVIEWS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_REVIEWS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_CHAT); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_CHAT,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_HISTORY); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_HISTORY,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_MAP); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_MAP,runtimeFragment); /** * End of SHOPS tabs. */ /*-- wallet store --*/ runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_RUNTIME_STORE_MAIN); runtimeActivity.setColor("#b46a54"); //runtimeActivity.setStatusBarColor("#b46a54"); runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("Wallet Store"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTabStrip.setTabsColor("#d07b62"); runtimeTabStrip.setTabsTextColor("#FFFFFF"); runtimeTabStrip.setTabsIndicateColor("#b46a54"); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_RUNTIME_STORE_MAIN, runtimeActivity); //mati SearchView runtimeSearchView= new SearchView(); runtimeSearchView.setLabel("Search"); runtimeTitleBar.setRuntimeSearchView(runtimeSearchView); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTab = new Tab(); runtimeTab.setLabel("All"); runtimeTab.setFragment(Fragments.CWP_SHOP_MANAGER_MAIN); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Free"); runtimeTab.setFragment(Fragments.CWP_SHOP_MANAGER_FREE); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Paid"); runtimeTab.setFragment(Fragments.CWP_SHOP_MANAGER_PAID); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Accepted nearby"); runtimeTab.setFragment(Fragments.CWP_SHOP_MANAGER_ACCEPTED_NEARBY); runtimeTabStrip.addTab(runtimeTab); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SHOP_MANAGER_MAIN); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SHOP_MANAGER_MAIN,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SHOP_MANAGER_FREE); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SHOP_MANAGER_FREE,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SHOP_MANAGER_PAID); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SHOP_MANAGER_PAID,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SHOP_MANAGER_ACCEPTED_NEARBY); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SHOP_MANAGER_ACCEPTED_NEARBY,runtimeFragment); /** * End of Wallet Store */ //Account Details runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_RUNTIME_ADULTS_ALL_ACCOUNT_DETAIL); runtimeActivity.setColor("#F0E173"); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_RUNTIME_ADULTS_ALL_ACCOUNT_DETAIL, runtimeActivity); runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("Account details"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTab = new Tab(); runtimeTab.setLabel("Debits"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_DEBITS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Credits"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNT_CREDITS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("All"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_ALL); runtimeTabStrip.addTab(runtimeTab); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_DEBITS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_DEBITS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNT_CREDITS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNT_CREDITS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_ALL); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_ALL,runtimeFragment); /*------------------------------*/ runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_ADULTS_ALL_REFFILS); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_ADULTS_ALL_REFFILS, runtimeActivity); //----------------------------------------------------------------------------------- runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_ADULTS_ALL_REQUESTS_RECEIVED); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_ADULTS_ALL_REQUESTS_RECEIVED, runtimeActivity); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_ADULTS_ALL_REQUESTS_RECEIVED); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_ADULTS_ALL_REQUESTS_RECEIVED,runtimeFragment); //------------------------------------------------------------------------------------ runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_ADULTS_ALL_REQUEST_SEND); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_ADULTS_ALL_REQUEST_SEND, runtimeActivity); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_ADULTS_ALL_REQUEST_SEND); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_ADULTS_ALL_REQUEST_SEND,runtimeFragment); //----------------------------------------------------------------------------------- runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_RUNTIME_ADULTS_ALL_ACCOUNTS); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_RUNTIME_ADULTS_ALL_ACCOUNTS, runtimeActivity); runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("Account details"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTab = new Tab(); runtimeTab.setLabel("Debits"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_DEBITS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Credits"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNT_CREDITS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("All"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_ALL); runtimeTabStrip.addTab(runtimeTab); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_DEBITS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_DEBITS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNT_CREDITS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNT_CREDITS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_ALL); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_ALL,runtimeFragment); listApps.put(Apps.CRYPTO_WALLET_PLATFORM,runtimeApp); lastApp = Apps.CRYPTO_WALLET_PLATFORM; /** * End of Wallet Accounts tabs. */ } catch(Exception e) { String message = CantFactoryReset.DEFAULT_MESSAGE; FermatException cause = FermatException.wrapException(e); String context = "Error on method Factory Reset, setting the structure of the apps"; String possibleReason = "some null definition"; throw new CantFactoryReset(message, cause, context, possibleReason); } } }
DMP/plugin/engine/fermat-dmp-plugin-engine-sub-app-runtime-bitdubai/src/main/java/com/bitdubai/fermat_dmp_plugin/layer/engine/app_runtime/developer/bitdubai/version_1/AppRuntimeMiddlewarePluginRoot.java
package com.bitdubai.fermat_dmp_plugin.layer.engine.app_runtime.developer.bitdubai.version_1; /** * Created by ciencias on 2/14/15. */ import com.bitdubai.fermat_api.CantStartPluginException; import com.bitdubai.fermat_api.FermatException; import com.bitdubai.fermat_api.Plugin; import com.bitdubai.fermat_api.Service; import com.bitdubai.fermat_api.layer.all_definition.enums.ServiceStatus; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.Activity; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.Fragment; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.SearchView; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.SideMenu; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.Tab; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.MenuItem; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.TabStrip; import com.bitdubai.fermat_api.layer.all_definition.navigation_structure.TitleBar; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.App; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.AppRuntimeManager; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.MainMenu; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.SubApp; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.enums.Activities; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.enums.Apps; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.enums.Fragments; import com.bitdubai.fermat_api.layer.dmp_engine.sub_app_runtime.enums.SubApps; import com.bitdubai.fermat_api.layer.osa_android.file_system.DealsWithPluginFileSystem; import com.bitdubai.fermat_api.layer.osa_android.file_system.PluginFileSystem; import com.bitdubai.fermat_api.layer.pip_platform_service.error_manager.DealsWithErrors; import com.bitdubai.fermat_api.layer.pip_platform_service.error_manager.ErrorManager; import com.bitdubai.fermat_api.layer.pip_platform_service.event_manager.DealsWithEvents; import com.bitdubai.fermat_api.layer.pip_platform_service.event_manager.EventHandler; import com.bitdubai.fermat_api.layer.pip_platform_service.event_manager.EventListener; import com.bitdubai.fermat_api.layer.pip_platform_service.event_manager.EventManager; import com.bitdubai.fermat_api.layer.pip_platform_service.event_manager.EventType; import com.bitdubai.fermat_dmp_plugin.layer.engine.app_runtime.developer.bitdubai.version_1.event_handlers.WalletResourcesInstalledEventHandler; import com.bitdubai.fermat_dmp_plugin.layer.engine.app_runtime.developer.bitdubai.version_1.exceptions.CantFactoryReset; import com.bitdubai.fermat_dmp_plugin.layer.engine.app_runtime.developer.bitdubai.version_1.structure.RuntimeApp; import com.bitdubai.fermat_dmp_plugin.layer.engine.app_runtime.developer.bitdubai.version_1.structure.RuntimeSubApp; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.UUID; /** * The App Runtime is the module in charge of the UI navigation structure. A user is always at a certain point in this * structure. */ /** * A Navigation stack is maintained by this plugin to allow the user to go back all the stack down to the root if necessary. */ public class AppRuntimeMiddlewarePluginRoot implements Service, AppRuntimeManager, DealsWithEvents, DealsWithErrors, DealsWithPluginFileSystem, Plugin { /** * Service Interface member variables. */ ServiceStatus serviceStatus = ServiceStatus.CREATED; /** * AppRuntimeManager Interface member variables. */ List<EventListener> listenersAdded = new ArrayList<>(); Map<Apps, App> listApps = new HashMap<Apps, App>(); Map<SubApps, SubApp> listSubApp = new HashMap<SubApps, SubApp>(); Map<Activities, Activity> listActivities = new HashMap<Activities, Activity>(); Map<Fragments, Fragment> listFragments = new HashMap<Fragments, Fragment>(); Apps lastApp; SubApps lastSubapp; Activities lastActivity; Fragments lastFragment; /** * UsesFileSystem Interface member variables. */ PluginFileSystem pluginFileSystem; /** * DealsWithErrors Interface member variables. */ ErrorManager errorManager; /** * DealWithEvents Interface member variables. */ EventManager eventManager; /** * Plugin Interface member variables. */ UUID pluginId; public void addToNavigationStructure(/*String NavigationStructure, NicheWalletType*/) { /* PlatformEvent platformEvent = eventManager.getNewEvent(EventType.NAVIGATION_STRUCTURE_UPDATED); ((NavigationStructureUpdatedEvent) platformEvent).----------(this.-----); eventManager.raiseEvent(platformEvent); */ } /* PlatformEvent platformEvent = eventManager.getNewEvent(EventType.NAVIGATION_STRUCTURE_UPDATED); ((NavigationStructureUpdatedEvent) platformEvent).--------(this.-------); eventManager.raiseEvent(platformEvent); */ @Override public void start() throws CantStartPluginException{ /** * I will initialize the handling of com.bitdubai.platform events. */ EventListener eventListener; EventHandler eventHandler; eventListener = eventManager.getNewListener(EventType.WALLET_RESOURCES_INSTALLED); eventHandler = new WalletResourcesInstalledEventHandler(); ((WalletResourcesInstalledEventHandler) eventHandler).setAppRuntimeManager(this); eventListener.setEventHandler(eventHandler); eventManager.addListener(eventListener); listenersAdded.add(eventListener); /** * At this time the only thing I can do is a factory reset. Once there should be a possibility to add * functionality based on wallets downloaded by users this wont be an option. * * * */ try { factoryReset(); } catch(CantFactoryReset ex) { String message = CantStartPluginException.DEFAULT_MESSAGE; FermatException cause = ex; String context = "App Runtime Start"; String possibleReason = "Some null definition"; throw new CantStartPluginException(message, cause, context, possibleReason); } this.serviceStatus = ServiceStatus.STARTED; } @Override public void pause(){ this.serviceStatus = ServiceStatus.PAUSED; } @Override public void resume(){ this.serviceStatus = ServiceStatus.STARTED; } @Override public void stop(){ /** * I will remove all the listeners registered with the event manager. */ for (EventListener eventListener : listenersAdded){ eventManager.removeListener(eventListener); } listenersAdded.clear(); this.serviceStatus = ServiceStatus.STOPPED; } @Override public ServiceStatus getStatus() { return this.serviceStatus; } /** * AppRuntime Interface implementation. */ @Override public App getApp(Apps app) { return null; } @Override public App getLastApp() { Iterator<Map.Entry<Apps, App>> eapp = this.listApps.entrySet().iterator(); while (eapp.hasNext()) { Map.Entry<Apps, App> appEntry = eapp.next(); RuntimeApp app = (RuntimeApp) appEntry.getValue(); if(app.getType().name().equals(lastApp.name())){ return app; } } return null; } @Override public SubApp getLastSubApp() { Iterator<Map.Entry<SubApps, SubApp>> esubapp = this.listSubApp.entrySet().iterator(); while (esubapp.hasNext()) { Map.Entry<SubApps, SubApp> subappEntry = esubapp.next(); RuntimeSubApp subapp = (RuntimeSubApp) subappEntry.getValue(); if(subapp.getType().name().equals(this.lastSubapp.name())){ return subapp; } } return null; } @Override public Activity getLasActivity() { Iterator<Map.Entry<Activities, Activity>> eactivity = this.listActivities.entrySet().iterator(); while (eactivity.hasNext()) { Map.Entry<Activities, Activity> activityEntry = eactivity.next(); Activity activity = (Activity) activityEntry.getValue(); if(activity.getType().name().equals(this.lastActivity.name())){ return activity; } } return null; } @Override public Fragment getLastFragment() { Iterator<Map.Entry<Fragments, Fragment>> efragment = this.listFragments.entrySet().iterator(); while (efragment.hasNext()) { Map.Entry<Fragments, Fragment> fragmentEntry = efragment.next(); Fragment fragment = (Fragment) fragmentEntry.getValue(); if(fragment.getType().name().equals(this.lastFragment.name())){ return fragment; } } return null; } @Override public Activity getActivity(Activities app) { Iterator<Map.Entry<Activities, Activity>> eactivity = this.listActivities.entrySet().iterator(); while (eactivity.hasNext()) { Map.Entry<Activities, Activity> activityEntry = eactivity.next(); Activity activity = (Activity) activityEntry.getValue(); if(activity.getType().name().equals(app.name())){ lastActivity = activity.getType(); return activity; } } return null; } @Override public Fragment getFragment(Fragments frag) { Iterator<Map.Entry<Fragments, Fragment>> efragment = this.listFragments.entrySet().iterator(); while (efragment.hasNext()) { Map.Entry<Fragments, Fragment> fragmentEntry = efragment.next(); Fragment fragment = (Fragment) fragmentEntry.getValue(); if(fragment.getType().name().equals(frag.name())){ lastFragment = fragment.getType(); return fragment; } } return null; } /** * UsesFileSystem Interface implementation. */ @Override public void setPluginFileSystem(PluginFileSystem pluginFileSystem) { this.pluginFileSystem = pluginFileSystem; } /** * DealWithEvents Interface implementation. */ @Override public void setEventManager(EventManager eventManager) { this.eventManager = eventManager; } /** * DealWithErrors Interface implementation. */ @Override public void setErrorManager(ErrorManager errorManager) { this.errorManager = errorManager; } /** * DealsWithPluginIdentity methods implementation. */ @Override public void setId(UUID pluginId) { this.pluginId = pluginId; } /** * The first time this plugins runs, it will setup the initial structure for the App, subApp and so on through the local * interfaces of the classes involved, */ private void firstRunCheck() { /** * First I check weather this a structure already created, if not I create the "Factory" structure. */ } /** * Here is where I actually generate the factory structure of the APP. This method is also useful to reset to the * factory structure. */ private void factoryReset() throws CantFactoryReset { try { RuntimeApp runtimeApp; RuntimeSubApp runtimeSubApp; Activity runtimeActivity; Fragment runtimeFragment; TitleBar runtimeTitleBar; SideMenu runtimeSideMenu; MainMenu runtimeMainMenu; MenuItem runtimeMenuItem; TabStrip runtimeTabStrip; Tab runtimeTab; runtimeApp = new RuntimeApp(); runtimeApp.setType(Apps.CRYPTO_WALLET_PLATFORM); runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_SHELL); runtimeApp.addSubApp(runtimeSubApp); listSubApp.put(SubApps.CWP_SHELL, runtimeSubApp); runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_SHELL_LOGIN); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_SHELL_LOGIN, runtimeActivity); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SHELL_LOGIN); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SHELL_LOGIN, runtimeFragment); /** * Definition of Developer Manager App */ runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_DEVELOPER_APP); listSubApp.put(SubApps.CWP_DEVELOPER_APP, runtimeSubApp); //acá estoy seteando los colores y toda la vaina esa runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_SUP_APP_ALL_DEVELOPER); runtimeActivity.setColor("#b46a54"); runtimeActivity.setStatusBarColor("#d07b62"); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_SUP_APP_ALL_DEVELOPER, runtimeActivity); runtimeTitleBar = new TitleBar(); //Navigation runtimeSideMenu = new SideMenu(); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Personal Wallets"); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Shops"); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Commercial wallets"); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Factory Projects"); runtimeMenuItem.setLinkToActivity(Activities.CWP_WALLET_FACTORY_MAIN); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Published Wallets"); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Wallet Store"); runtimeMenuItem.setLinkToActivity(Activities.CWP_WALLET_STORE_MAIN); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeMenuItem = new MenuItem(); runtimeMenuItem.setLabel("Exit"); runtimeSideMenu.addMenuItem(runtimeMenuItem); runtimeActivity.setSideMenu(runtimeSideMenu); //fin navigation runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("Developer"); //runtimeTitleBar.setColor("#d07b62"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTabStrip.setTabsColor("#d07b62"); runtimeTabStrip.setTabsTextColor("#FFFFFF"); runtimeTabStrip.setTabsIndicateColor("#b46a54"); runtimeTab = new Tab(); runtimeTab.setLabel("DataBase Tools"); runtimeTab.setFragment(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Log Tools"); runtimeTab.setFragment(Fragments.CWP_SUB_APP_DEVELOPER_LOG_TOOLS); runtimeTabStrip.addTab(runtimeTab); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS, runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_DATABASES); runtimeFragment.setBack(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_DATABASES, runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_TABLES); runtimeFragment.setBack(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_DATABASES); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_TABLES, runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_RECORDS); runtimeFragment.setBack(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_TABLES); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_DATABASE_TOOLS_RECORDS, runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_LOG_TOOLS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_LOG_TOOLS, runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_1_TOOLS); runtimeFragment.setBack(Fragments.CWP_SUB_APP_DEVELOPER_LOG_TOOLS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_1_TOOLS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_2_TOOLS); runtimeFragment.setBack(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_1_TOOLS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_2_TOOLS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_3_TOOLS); runtimeFragment.setBack(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_1_TOOLS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER_LOG_LEVEL_3_TOOLS,runtimeFragment); /** * End of Developer tabs. */ //wallet factory app runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_WALLET_FACTORY); runtimeApp.addSubApp(runtimeSubApp); listSubApp.put(SubApps.CWP_WALLET_FACTORY, runtimeSubApp); runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_FACTORY_MAIN); runtimeActivity.setColor("#b46a54"); runtimeActivity.setStatusBarColor("#b46a54"); runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("Wallet Factory"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTabStrip.setTabsColor("#d07b62"); runtimeTabStrip.setTabsTextColor("#FFFFFF"); runtimeTabStrip.setTabsIndicateColor("#b46a54"); runtimeTab = new Tab(); runtimeTab.setLabel("Wallet Factory"); runtimeTab.setFragment(Fragments.CWP_WALLET_FACTORY_MAIN); runtimeTabStrip.addTab(runtimeTab); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_FACTORY_MAIN, runtimeActivity); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_FACTORY_MAIN); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_FACTORY_MAIN,runtimeFragment); /**End Wallet Publisher*/ //wallet Publisher app runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_WALLET_PUBLISHER); runtimeApp.addSubApp(runtimeSubApp); listSubApp.put(SubApps.CWP_WALLET_PUBLISHER, runtimeSubApp); runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_PUBLISHER_MAIN); runtimeActivity.setColor("#b46a54"); runtimeActivity.setStatusBarColor("#b46a54"); runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("Wallet Publisher"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTabStrip.setTabsColor("#d07b62"); runtimeTabStrip.setTabsTextColor("#FFFFFF"); runtimeTabStrip.setTabsIndicateColor("#b46a54"); runtimeTab = new Tab(); runtimeTab.setLabel("Wallet Publisher"); runtimeTab.setFragment(Fragments.CWP_WALLET_PUBLISHER_MAIN); runtimeTabStrip.addTab(runtimeTab); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_PUBLISHER_MAIN, runtimeActivity); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_PUBLISHER_MAIN); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_PUBLISHER_MAIN,runtimeFragment); /**End Wallet Publisher*/ runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_WALLET_MANAGER); runtimeApp.addSubApp(runtimeSubApp); listSubApp.put(SubApps.CWP_WALLET_MANAGER,runtimeSubApp); lastSubapp = SubApps.CWP_WALLET_MANAGER; runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_MANAGER_MAIN); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_MANAGER_MAIN, runtimeActivity); lastActivity = Activities.CWP_WALLET_MANAGER_MAIN; runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_MANAGER_MAIN); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_MANAGER_MAIN, runtimeFragment); //Desktop page Developer sub App runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SUB_APP_DEVELOPER); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SUB_APP_DEVELOPER,runtimeFragment); runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_WALLET_RUNTIME); runtimeApp.addSubApp(runtimeSubApp); listSubApp.put(SubApps.CWP_WALLET_RUNTIME,runtimeSubApp); runtimeSubApp = new RuntimeSubApp(); runtimeSubApp.setType(SubApps.CWP_WALLET_STORE); runtimeApp.addSubApp(runtimeSubApp); listSubApp.put(SubApps.CWP_WALLET_STORE,runtimeSubApp); runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_STORE_MAIN); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_STORE_MAIN, runtimeActivity); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_STORE_MAIN); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_STORE_MAIN,runtimeFragment); /** * Definition of Shop Manager */ runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_ADULTS_ALL_SHOPS); runtimeActivity.setColor("#76dc4a"); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_ADULTS_ALL_SHOPS, runtimeActivity); runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("My Shop"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTab = new Tab(); runtimeTab.setLabel("Shop"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_SHOP); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Products"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_PRODUCTS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Reviews"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_REVIEWS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Chat"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_CHAT); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("History"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_HISTORY); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Map"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_MAP); runtimeTabStrip.addTab(runtimeTab); runtimeTabStrip.setDividerColor(0xFFFFFFFF); runtimeTabStrip.setIndicatorColor(0xFFFFFFFF); runtimeTabStrip.setIndicatorHeight(9); runtimeTabStrip.setBackgroundColor(0xFF76dc4a); runtimeTabStrip.setTextColor(0xFFFFFFFF); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_SHOP); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_SHOP,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_PRODUCTS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_PRODUCTS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_REVIEWS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_REVIEWS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_CHAT); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_CHAT,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_HISTORY); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_HISTORY,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_MAP); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_SHOP_MAP,runtimeFragment); /** * End of SHOPS tabs. */ /*-- wallet store --*/ runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_RUNTIME_STORE_MAIN); runtimeActivity.setColor("#b46a54"); runtimeActivity.setStatusBarColor("#b46a54"); runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("Wallet Store"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTabStrip.setTabsColor("#d07b62"); runtimeTabStrip.setTabsTextColor("#FFFFFF"); runtimeTabStrip.setTabsIndicateColor("#b46a54"); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_RUNTIME_STORE_MAIN, runtimeActivity); //mati SearchView runtimeSearchView= new SearchView(); runtimeSearchView.setLabel("Search"); runtimeTitleBar.setRuntimeSearchView(runtimeSearchView); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTab = new Tab(); runtimeTab.setLabel("All"); runtimeTab.setFragment(Fragments.CWP_SHOP_MANAGER_MAIN); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Free"); runtimeTab.setFragment(Fragments.CWP_SHOP_MANAGER_FREE); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Paid"); runtimeTab.setFragment(Fragments.CWP_SHOP_MANAGER_PAID); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Accepted nearby"); runtimeTab.setFragment(Fragments.CWP_SHOP_MANAGER_ACCEPTED_NEARBY); runtimeTabStrip.addTab(runtimeTab); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SHOP_MANAGER_MAIN); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SHOP_MANAGER_MAIN,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SHOP_MANAGER_FREE); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SHOP_MANAGER_FREE,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SHOP_MANAGER_PAID); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SHOP_MANAGER_PAID,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_SHOP_MANAGER_ACCEPTED_NEARBY); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_SHOP_MANAGER_ACCEPTED_NEARBY,runtimeFragment); /** * End of Wallet Store */ //Account Details runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_RUNTIME_ADULTS_ALL_ACCOUNT_DETAIL); runtimeActivity.setColor("#F0E173"); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_RUNTIME_ADULTS_ALL_ACCOUNT_DETAIL, runtimeActivity); runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("Account details"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTab = new Tab(); runtimeTab.setLabel("Debits"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_DEBITS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Credits"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNT_CREDITS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("All"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_ALL); runtimeTabStrip.addTab(runtimeTab); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_DEBITS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_DEBITS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNT_CREDITS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNT_CREDITS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_ALL); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_ALL,runtimeFragment); /*------------------------------*/ runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_ADULTS_ALL_REFFILS); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_ADULTS_ALL_REFFILS, runtimeActivity); //----------------------------------------------------------------------------------- runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_ADULTS_ALL_REQUESTS_RECEIVED); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_ADULTS_ALL_REQUESTS_RECEIVED, runtimeActivity); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_ADULTS_ALL_REQUESTS_RECEIVED); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_ADULTS_ALL_REQUESTS_RECEIVED,runtimeFragment); //------------------------------------------------------------------------------------ runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_ADULTS_ALL_REQUEST_SEND); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_ADULTS_ALL_REQUEST_SEND, runtimeActivity); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_ADULTS_ALL_REQUEST_SEND); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_ADULTS_ALL_REQUEST_SEND,runtimeFragment); //----------------------------------------------------------------------------------- runtimeActivity= new Activity(); runtimeActivity.setType(Activities.CWP_WALLET_RUNTIME_ADULTS_ALL_ACCOUNTS); runtimeSubApp.addActivity(runtimeActivity); listActivities.put(Activities.CWP_WALLET_RUNTIME_ADULTS_ALL_ACCOUNTS, runtimeActivity); runtimeTitleBar = new TitleBar(); runtimeTitleBar.setLabel("Account details"); runtimeActivity.setTitleBar(runtimeTitleBar); runtimeTabStrip = new TabStrip(); runtimeTab = new Tab(); runtimeTab.setLabel("Debits"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_DEBITS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("Credits"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNT_CREDITS); runtimeTabStrip.addTab(runtimeTab); runtimeTab = new Tab(); runtimeTab.setLabel("All"); runtimeTab.setFragment(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_ALL); runtimeTabStrip.addTab(runtimeTab); runtimeActivity.setTabStrip(runtimeTabStrip); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_DEBITS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_DEBITS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNT_CREDITS); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNT_CREDITS,runtimeFragment); runtimeFragment = new Fragment(); runtimeFragment.setType(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_ALL); runtimeActivity.addFragment(runtimeFragment); listFragments.put(Fragments.CWP_WALLET_RUNTIME_WALLET_ADULTS_ALL_BITDUBAI_ACCOUNTS_ALL,runtimeFragment); listApps.put(Apps.CRYPTO_WALLET_PLATFORM,runtimeApp); lastApp = Apps.CRYPTO_WALLET_PLATFORM; /** * End of Wallet Accounts tabs. */ } catch(Exception e) { String message = CantFactoryReset.DEFAULT_MESSAGE; FermatException cause = FermatException.wrapException(e); String context = "Error on method Factory Reset, setting the structure of the apps"; String possibleReason = "some null definition"; throw new CantFactoryReset(message, cause, context, possibleReason); } } }
| Compiling & Running
DMP/plugin/engine/fermat-dmp-plugin-engine-sub-app-runtime-bitdubai/src/main/java/com/bitdubai/fermat_dmp_plugin/layer/engine/app_runtime/developer/bitdubai/version_1/AppRuntimeMiddlewarePluginRoot.java
| Compiling & Running
Java
mit
766e61a485e7fb2cafb82f7746c006781b53f7ff
0
tlear/pegasus
package com.tlear.pegasus; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.BitmapFont; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.graphics.glutils.ShapeRenderer; import com.badlogic.gdx.graphics.glutils.ShapeRenderer.ShapeType; import com.badlogic.gdx.math.Rectangle; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.math.Vector3; import java.util.HashMap; public class Ship { private Rectangle hitBox; private float x; private float y; // Ship textures private HashMap<ShipDirection, Texture> shipImages; private HashMap<ShipDirection, TextureRegion> shipTextures; // Other textures private BitmapFont font; // Ship movement model private float shipAngle; private float shipSpeed; private ShipDirection shipDirection; public float rotationalVelocity; // Constraints private int maxSpeed; private int maxRotationalVelocity; // Ship texture size private int shipTexWidth; private int shipTexHeight; // Ship model size private int shipWidth; private int shipHeight; // Stage size private int windowWidth; private int windowHeight; // Hitbox offsets private float offX; private float offY; // Laser variables private boolean firingLaser; private Vector2 laserTarget; // Parts private ShipLaser laserTurret; // Debug private boolean debugMode; private String debugString; public Ship(int windowWidth, int windowHeight) { /* Load textures */ // Set up maps of directions shipImages = new HashMap<ShipDirection, Texture>(); shipImages.put(ShipDirection.NONE, new Texture(Gdx.files.internal("ship.png"))); shipImages.put(ShipDirection.FORWARD, new Texture(Gdx.files.internal("shipForward.png"))); shipImages.put(ShipDirection.BACKWARD, new Texture(Gdx.files.internal("shipBackward.png"))); shipImages.put(ShipDirection.LEFT, new Texture(Gdx.files.internal("shipLeft.png"))); shipImages.put(ShipDirection.RIGHT, new Texture(Gdx.files.internal("shipRight.png"))); shipTextures = new HashMap<ShipDirection, TextureRegion>(); shipTextures.put(ShipDirection.NONE, new TextureRegion(shipImages.get(ShipDirection.NONE))); shipTextures.put(ShipDirection.FORWARD, new TextureRegion(shipImages.get(ShipDirection.FORWARD))); shipTextures.put(ShipDirection.BACKWARD, new TextureRegion(shipImages.get(ShipDirection.BACKWARD))); shipTextures.put(ShipDirection.LEFT, new TextureRegion(shipImages.get(ShipDirection.LEFT))); shipTextures.put(ShipDirection.RIGHT, new TextureRegion(shipImages.get(ShipDirection.RIGHT))); // Load other textures font = new BitmapFont(); font.setColor(Color.GREEN); // Initialise speed and rotation shipSpeed = 0f; shipAngle = 0f; rotationalVelocity = 0f; shipDirection = ShipDirection.NONE; // Initialise position x = 50; y = 50; // Initialise ship texture size shipTexWidth = 95; shipTexHeight = 108; // Initalise the ship model size shipWidth = 60; shipHeight = 100; // Initialise the hitbox to always be contained inside the ship's texture // regardless of the rotation hitBox = new Rectangle(); hitBox.width = hitBox.height = (float) Math.sqrt((Math.pow((double) shipWidth, 2.0) / 2.0)); offX = (shipTexWidth / 2) - (hitBox.width / 2); offY = (shipTexHeight / 2) - (hitBox.height / 2); hitBox.x = x + offX; hitBox.y = y + offY; // Initialise parts laserTurret = new ShipLaser(new Vector2(shipTexWidth / 2, shipTexHeight / 2)); laserTurret.addX(-laserTurret.getTexWidth() / 2); laserTurret.addY(-laserTurret.getTexHeight() / 2); // Initialise window this.windowWidth = windowWidth; this.windowHeight = windowHeight; // Initialise constraints maxSpeed = 200; maxRotationalVelocity = 2; // Initialise laser firingLaser = false; laserTarget = new Vector2(); // Set debug mode debugMode = true; debugString = ""; } public void draw(SpriteBatch batch, ShapeRenderer shapeRenderer) { batch.begin(); // Draw ship batch.draw(shipTextures.get(shipDirection), x, y, shipTexWidth / 2, shipTexHeight / 2, shipTexWidth, shipTexHeight, 1.0f, 1.0f, shipAngle-90); batch.end(); shapeRenderer.begin(ShapeType.Line); //Draw laser if (firingLaser) { shapeRenderer.setColor(1, 0, 0, 1); shapeRenderer.line(new Vector2(x + shipTexWidth / 2, y + shipTexHeight / 2), laserTarget); } shapeRenderer.end(); // Draw laser turret after laser batch.begin(); batch.draw(laserTurret.getTextureRegion(), x + laserTurret.getX(), y + laserTurret.getY(), laserTurret.getTexWidth() / 2, laserTurret.getTexHeight() / 2, laserTurret.getTexWidth(), laserTurret.getTexHeight(), 1.0f, 1.0f, shipAngle-90); batch.end(); // Draw debug info last always if (debugMode) { //Draw debug info shapeRenderer.begin(ShapeType.Line); shapeRenderer.setColor(0, 1, 0, 1); shapeRenderer.rect(hitBox.x, hitBox.y, hitBox.width, hitBox.height); shapeRenderer.end(); batch.begin(); font.drawMultiLine(batch, debugString, 10, windowHeight-10); batch.end(); } // Move the ship double dx = shipSpeed * Math.cos(degreesToRadians(shipAngle)) * Gdx.graphics.getDeltaTime(); double dy = shipSpeed * Math.sin(degreesToRadians(shipAngle)) * Gdx.graphics.getDeltaTime(); x += dx; y += dy; shipAngle += rotationalVelocity; checkOutOfBounds(); hitBox.x = x + offX; hitBox.y = y + offY; if (debugMode) { debugString = "Speed: " + shipSpeed + "\nAngle: " + (int) shipAngle + "\nx: " + (int) x + "\ny: " + (int) y + "\nRotVel: " + (double) ((int) (rotationalVelocity*100)) / 100 + "º"; } } public void addAngle(float a) { if (Math.abs(rotationalVelocity + a) <= maxRotationalVelocity) { rotationalVelocity += a; shipDirection = a > 0 ? ShipDirection.LEFT : ShipDirection.RIGHT; } } public void addSpeed(int s) { if (shipSpeed + s <= maxSpeed && shipSpeed + s >= -maxSpeed / 2) { shipSpeed += s; shipDirection = s > 0 ? ShipDirection.FORWARD : ShipDirection.BACKWARD; } else { shipDirection = shipDirection != ShipDirection.NONE ? shipDirection : ShipDirection.NONE; } } public void fire(Vector3 pos) { // Fires the ship's laser at the position firingLaser = true; laserTarget = new Vector2(pos.x, pos.y); } public void reset() { // Set no direction and not firing shipDirection = ShipDirection.NONE; firingLaser = false; } public void stopMoving() { shipSpeed = 0; rotationalVelocity = 0; shipDirection = ShipDirection.NONE; } public void dispose() { font.dispose(); } public void setDirection(ShipDirection d) { shipDirection = d; } private double degreesToRadians(float deg) { return deg * Math.PI / 180; } private void checkOutOfBounds() { if (x > windowWidth - shipWidth/2) { x = -shipWidth/2; } else if (x < -shipWidth/2) { x = windowWidth - shipWidth/2; } if (y > windowHeight - shipHeight/2) { y = -shipHeight/2; } else if (y < -shipHeight/2) { y = windowHeight - shipHeight/2; } } }
pegasus-game/core/src/com/tlear/pegasus/Ship.java
package com.tlear.pegasus; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.BitmapFont; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.graphics.glutils.ShapeRenderer; import com.badlogic.gdx.graphics.glutils.ShapeRenderer.ShapeType; import com.badlogic.gdx.math.Rectangle; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.math.Vector3; import java.util.HashMap; public class Ship { private Rectangle hitBox; private float x; private float y; // Ship textures private HashMap<ShipDirection, Texture> shipImages; private HashMap<ShipDirection, TextureRegion> shipTextures; // Other textures private BitmapFont font; // Ship movement model private float shipAngle; private float shipSpeed; private ShipDirection shipDirection; public float rotationalVelocity; // Constraints private int maxSpeed; private int maxRotationalVelocity; // Ship texture size private int shipTexWidth; private int shipTexHeight; // Ship model size private int shipWidth; private int shipHeight; // Stage size private int windowWidth; private int windowHeight; // Hitbox offsets private float offX; private float offY; // Laser variables private boolean firingLaser; private Vector2 laserTarget; // Parts private ShipLaser laserTurret; // Debug private boolean debugMode; private String debugString; public Ship(int windowWidth, int windowHeight) { /* Load textures */ // Set up maps of directions shipImages = new HashMap<ShipDirection, Texture>(); shipImages.put(ShipDirection.NONE, new Texture(Gdx.files.internal("ship.png"))); shipImages.put(ShipDirection.FORWARD, new Texture(Gdx.files.internal("shipForward.png"))); shipImages.put(ShipDirection.BACKWARD, new Texture(Gdx.files.internal("shipBackward.png"))); shipImages.put(ShipDirection.LEFT, new Texture(Gdx.files.internal("shipLeft.png"))); shipImages.put(ShipDirection.RIGHT, new Texture(Gdx.files.internal("shipRight.png"))); shipTextures = new HashMap<ShipDirection, TextureRegion>(); shipTextures.put(ShipDirection.NONE, new TextureRegion(shipImages.get(ShipDirection.NONE))); shipTextures.put(ShipDirection.FORWARD, new TextureRegion(shipImages.get(ShipDirection.FORWARD))); shipTextures.put(ShipDirection.BACKWARD, new TextureRegion(shipImages.get(ShipDirection.BACKWARD))); shipTextures.put(ShipDirection.LEFT, new TextureRegion(shipImages.get(ShipDirection.LEFT))); shipTextures.put(ShipDirection.RIGHT, new TextureRegion(shipImages.get(ShipDirection.RIGHT))); // Load other textures font = new BitmapFont(); font.setColor(Color.GREEN); // Initialise speed and rotation shipSpeed = 0f; shipAngle = 0f; rotationalVelocity = 0f; shipDirection = ShipDirection.NONE; // Initialise position x = 50; y = 50; // Initialise ship texture size shipTexWidth = 95; shipTexHeight = 108; // Initalise the ship model size shipWidth = 60; shipHeight = 100; // Initialise the hitbox to always be contained inside the ship's texture // regardless of the rotation hitBox = new Rectangle(); hitBox.width = hitBox.height = (float) Math.sqrt((Math.pow((double) shipWidth, 2.0) / 2.0)); offX = (shipTexWidth / 2) - (hitBox.width / 2); offY = (shipTexHeight / 2) - (hitBox.height / 2); hitBox.x = x + offX; hitBox.y = y + offY; // Initialise parts laserTurret = new ShipLaser(new Vector2(shipTexWidth / 2, shipTexHeight / 2)); // Initialise window this.windowWidth = windowWidth; this.windowHeight = windowHeight; // Initialise constraints maxSpeed = 200; maxRotationalVelocity = 2; // Initialise laser firingLaser = false; laserTarget = new Vector2(); // Set debug mode debugMode = true; debugString = ""; } public void draw(SpriteBatch batch, ShapeRenderer shapeRenderer) { batch.begin(); // Draw ship batch.draw(shipTextures.get(shipDirection), x, y, shipTexWidth / 2, shipTexHeight / 2, shipTexWidth, shipTexHeight, 1.0f, 1.0f, shipAngle-90); // Draw laser turret batch.draw(laserTurret.getTextureRegion(), x + laserTurret.getX(), y + laserTurret.getY(), x + laserTurret.getX(), y + laserTurret.getY(), laserTurret.getTexWidth(), laserTurret.getTexHeight(), 1.0f, 1.0f, 0); // Draw debug details if (debugMode) { font.drawMultiLine(batch, debugString, 10, windowHeight-10); } batch.end(); shapeRenderer.begin(ShapeType.Line); //Draw laser if (firingLaser) { shapeRenderer.setColor(1, 0, 0, 1); shapeRenderer.line(new Vector2(x + shipTexWidth / 2, y + shipTexHeight / 2), laserTarget); } //Draw debug info if (debugMode) { shapeRenderer.setColor(0, 1, 0, 1); shapeRenderer.rect(hitBox.x, hitBox.y, hitBox.width, hitBox.height); } shapeRenderer.end(); // Move the ship double dx = shipSpeed * Math.cos(degreesToRadians(shipAngle)) * Gdx.graphics.getDeltaTime(); double dy = shipSpeed * Math.sin(degreesToRadians(shipAngle)) * Gdx.graphics.getDeltaTime(); x += dx; y += dy; shipAngle += rotationalVelocity; checkOutOfBounds(); hitBox.x = x + offX; hitBox.y = y + offY; laserTurret.setX(x + laserTurret.getX()); laserTurret.setY(y + laserTurret.getY()); if (debugMode) { debugString = "Speed: " + shipSpeed + "\nAngle: " + (int) shipAngle + "\nx: " + (int) x + "\ny: " + (int) y + "\nRotVel: " + (double) ((int) (rotationalVelocity*100)) / 100 + "º"; } } public void addAngle(float a) { if (Math.abs(rotationalVelocity + a) <= maxRotationalVelocity) { rotationalVelocity += a; shipDirection = a > 0 ? ShipDirection.LEFT : ShipDirection.RIGHT; } } public void addSpeed(int s) { if (shipSpeed + s <= maxSpeed && shipSpeed + s >= -maxSpeed / 2) { shipSpeed += s; shipDirection = s > 0 ? ShipDirection.FORWARD : ShipDirection.BACKWARD; } else { shipDirection = shipDirection != ShipDirection.NONE ? shipDirection : ShipDirection.NONE; } } public void fire(Vector3 pos) { // Fires the ship's laser at the position firingLaser = true; laserTarget = new Vector2(pos.x, pos.y); } public void reset() { // Set no direction and not firing shipDirection = ShipDirection.NONE; firingLaser = false; } public void stopMoving() { shipSpeed = 0; rotationalVelocity = 0; shipDirection = ShipDirection.NONE; } public void dispose() { font.dispose(); } public void setDirection(ShipDirection d) { shipDirection = d; } private double degreesToRadians(float deg) { return deg * Math.PI / 180; } private void checkOutOfBounds() { if (x > windowWidth - shipWidth/2) { x = -shipWidth/2; } else if (x < -shipWidth/2) { x = windowWidth - shipWidth/2; } if (y > windowHeight - shipHeight/2) { y = -shipHeight/2; } else if (y < -shipHeight/2) { y = windowHeight - shipHeight/2; } } }
Laser now displays above ship. Laser displays above ship and moves and rotates with it.
pegasus-game/core/src/com/tlear/pegasus/Ship.java
Laser now displays above ship.
Java
mit
377add08cb7fc1ad7deddac641b50a2841364bba
0
QuarkWorks/realm-browser,jonasrottmann/realm-browser
package de.jonasrottmann.realmbrowser.utils; import android.graphics.Typeface; import android.support.annotation.NonNull; import android.text.SpannableString; import android.text.style.StyleSpan; import java.lang.reflect.Field; import java.lang.reflect.ParameterizedType; import java.util.Date; import io.realm.DynamicRealmObject; import io.realm.RealmList; public class MagicUtils { public static boolean isParametrizedField(@NonNull Field field) { return field.getGenericType() instanceof ParameterizedType; } @NonNull public static String createParametrizedName(@NonNull Field field) { ParameterizedType pType = (ParameterizedType) field.getGenericType(); String rawType = pType.getRawType().toString(); int rawTypeIndex = rawType.lastIndexOf("."); if (rawTypeIndex > 0) { rawType = rawType.substring(rawTypeIndex + 1); } String argument = pType.getActualTypeArguments()[0].toString(); int argumentIndex = argument.lastIndexOf("."); if (argumentIndex > 0) { argument = argument.substring(argumentIndex + 1); } return rawType + "<" + argument + ">"; } @NonNull public static CharSequence getFieldValueString(DynamicRealmObject realmObject, Field field) { String value; if (field.getType().getName().equals(Byte.class.getName()) || field.getType().getName().equals("byte")) { // Byte value = String.valueOf(realmObject.getByte(field.getName())); } else if (field.getType().getName().equals(Boolean.class.getName()) || field.getType().getName().equals("boolean")) { // Boolean value = String.valueOf(realmObject.getBoolean(field.getName())); } else if (field.getType().getName().equals(Short.class.getName()) || field.getType().getName().equals("short")) { // Short value = String.valueOf(realmObject.getShort(field.getName())); } else if (field.getType().getName().equals(Integer.class.getName()) || field.getType().getName().equals("int")) { // Integer value = String.valueOf(realmObject.getInt(field.getName())); } else if (field.getType().getName().equals(Long.class.getName()) || field.getType().getName().equals("long")) { // Long value = String.valueOf(realmObject.getLong(field.getName())); } else if (field.getType().getName().equals(Float.class.getName()) || field.getType().getName().equals("float")) { // Float value = String.valueOf(realmObject.getFloat(field.getName())); } else if (field.getType().getName().equals(Double.class.getName()) || field.getType().getName().equals("double")) { // Double value = String.valueOf(realmObject.getDouble(field.getName())); } else if (field.getType().getName().equals(String.class.getName())) { // String value = realmObject.getString(field.getName()); } else if (field.getType().getName().equals(Date.class.getName())) { // Date Date date = realmObject.getDate(field.getName()); if (date != null) value = date.toString(); else value = null; } else { if (field.getType().getName().equals(RealmList.class.getName())) { // RealmList value = (MagicUtils.createParametrizedName(field)); } else { // ? extends RealmObject value = realmObject.getObject(field.getName()) != null ? realmObject.getObject(field.getName()).toString() : null; } } if (value == null) { // Display null in italics to be able to distinguish between null and a string that actually says "null" value = "null"; SpannableString nullString = new SpannableString(value); nullString.setSpan(new StyleSpan(Typeface.ITALIC), 0, value.length(), 0); return nullString; } else { return value; } } }
realm-browser/src/main/java/de/jonasrottmann/realmbrowser/utils/MagicUtils.java
package de.jonasrottmann.realmbrowser.utils; import android.graphics.Typeface; import android.support.annotation.NonNull; import android.text.SpannableString; import android.text.style.StyleSpan; import java.lang.reflect.Field; import java.lang.reflect.ParameterizedType; import java.util.Date; import io.realm.DynamicRealmObject; import io.realm.RealmList; public class MagicUtils { public static boolean isParametrizedField(@NonNull Field field) { return field.getGenericType() instanceof ParameterizedType; } @NonNull public static String createParametrizedName(@NonNull Field field) { ParameterizedType pType = (ParameterizedType) field.getGenericType(); String rawType = pType.getRawType().toString(); int rawTypeIndex = rawType.lastIndexOf("."); if (rawTypeIndex > 0) { rawType = rawType.substring(rawTypeIndex + 1); } String argument = pType.getActualTypeArguments()[0].toString(); int argumentIndex = argument.lastIndexOf("."); if (argumentIndex > 0) { argument = argument.substring(argumentIndex + 1); } return rawType + "<" + argument + ">"; } @NonNull public static CharSequence getFieldValueString(DynamicRealmObject realmObject, Field field) { String value; if (field.getType().getName().equals(Byte.class.getName()) || field.getType().getName().equals("byte")) { // Byte value = String.valueOf(realmObject.getByte(field.getName())); } else if (field.getType().getName().equals(Boolean.class.getName()) || field.getType().getName().equals("boolean")) { // Boolean value = String.valueOf(realmObject.getBoolean(field.getName())); } else if (field.getType().getName().equals(Short.class.getName()) || field.getType().getName().equals("short")) { // Short value = String.valueOf(realmObject.getShort(field.getName())); } else if (field.getType().getName().equals(Integer.class.getName()) || field.getType().getName().equals("int")) { // Integer value = String.valueOf(realmObject.getInt(field.getName())); } else if (field.getType().getName().equals(Long.class.getName()) || field.getType().getName().equals("long")) { // Long value = String.valueOf(realmObject.getLong(field.getName())); } else if (field.getType().getName().equals(Float.class.getName()) || field.getType().getName().equals("float")) { // Float value = String.valueOf(realmObject.getFloat(field.getName())); } else if (field.getType().getName().equals(Double.class.getName()) || field.getType().getName().equals("double")) { // Double value = String.valueOf(realmObject.getDouble(field.getName())); } else if (field.getType().getName().equals(String.class.getName())) { // String value = realmObject.getString(field.getName()); } else if (field.getType().getName().equals(Date.class.getName())) { // Date Date date = realmObject.getDate(field.getName()); if (date != null) value = date.toString(); else value = null; } else { if (field.getType().getName().equals(RealmList.class.getName())) { // RealmList value = (MagicUtils.createParametrizedName(field)); } else { // ? extends RealmObject value = (realmObject.getObject(field.getName()).toString()); } } if (value == null) { // Display null in italics to be able to distinguish between null and a string that actually says "null" value = "null"; SpannableString nullString = new SpannableString(value); nullString.setSpan(new StyleSpan(Typeface.ITALIC), 0, value.length(), 0); return nullString; } else { return value; } } }
fixed NPE
realm-browser/src/main/java/de/jonasrottmann/realmbrowser/utils/MagicUtils.java
fixed NPE
Java
epl-1.0
2c5595df91b2cb825f2d487f1bb39f3e7e4b8fec
0
bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs,bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs,bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs
/******************************************************************************* * Copyright (c) 1998, 2008 Oracle. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 02/11/2009-1.1 Michael O'Brien * - 259993: As part 2) During mergeClonesAfterCompletion() * If the the acquire and release threads are different * switch back to the stored acquire thread stored on the mergeManager. ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.util.*; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.descriptors.FetchGroupManager; import org.eclipse.persistence.descriptors.invalidation.CacheInvalidationPolicy; import org.eclipse.persistence.exceptions.ConcurrencyException; import org.eclipse.persistence.internal.queries.ContainerPolicy; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.internal.identitymaps.*; import org.eclipse.persistence.internal.helper.linkedlist.*; import org.eclipse.persistence.logging.SessionLog; /** * INTERNAL: * <p> * <b>Purpose</b>: Acquires all required locks for a particular merge process. * Implements a deadlock avoidance algorithm to prevent concurrent merge conflicts * * <p> * <b>Responsibilities</b>: * <ul> * <li> Acquires locks for writing threads. * <li> Provides deadlock avoidance behavior. * <li> Releases locks for writing threads. * </ul> * @author Gordon Yorke * @since 10.0.3 */ public class WriteLockManager { /* This attribute stores the list of threads that have had a problem acquiring locks */ /* the first element in this list will be the prevailing thread */ protected ExposedNodeLinkedList prevailingQueue; public WriteLockManager() { this.prevailingQueue = new ExposedNodeLinkedList(); } // this will allow us to prevent a readlock thread from looping forever. public static int MAXTRIES = 10000; /** * INTERNAL: * This method will return once the object is locked and all non-indirect * related objects are also locked. */ public Map acquireLocksForClone(Object objectForClone, ClassDescriptor descriptor, CacheKey cacheKey, AbstractSession session, UnitOfWorkImpl unitOfWork) { boolean successful = false; IdentityHashMap lockedObjects = new IdentityHashMap(); IdentityHashMap refreshedObjects = new IdentityHashMap(); try { // if the descriptor has indirection for all mappings then wait as there will be no deadlock risks CacheKey toWaitOn = acquireLockAndRelatedLocks(objectForClone, lockedObjects, refreshedObjects, cacheKey, descriptor, session, unitOfWork); int tries = 0; while (toWaitOn != null) {// loop until we've tried too many times. for (Iterator lockedList = lockedObjects.values().iterator(); lockedList.hasNext();) { ((CacheKey)lockedList.next()).releaseReadLock(); lockedList.remove(); } synchronized (toWaitOn.getMutex()) { try { if (toWaitOn.isAcquired()) {//last minute check to insure it is still locked. toWaitOn.getMutex().wait();// wait for lock on object to be released } } catch (InterruptedException ex) { // Ignore exception thread should continue. } } Object waitObject = toWaitOn.getObject(); // Object may be null for loss of identity. if (waitObject != null) { unitOfWork.checkInvalidObject(waitObject, toWaitOn, session.getDescriptor(waitObject)); refreshedObjects.put(waitObject, waitObject); } toWaitOn = acquireLockAndRelatedLocks(objectForClone, lockedObjects, refreshedObjects, cacheKey, descriptor, session, unitOfWork); if ((toWaitOn != null) && ((++tries) > MAXTRIES)) { // If we've tried too many times abort. throw ConcurrencyException.maxTriesLockOnCloneExceded(objectForClone); } } successful = true;//successfully acquired all locks } finally { if (!successful) {//did not acquire locks but we are exiting for (Iterator lockedList = lockedObjects.values().iterator(); lockedList.hasNext();) { ((CacheKey)lockedList.next()).releaseReadLock(); lockedList.remove(); } } } return lockedObjects; } /** * INTERNAL: * This is a recursive method used to acquire read locks on all objects that * will be cloned. These include all related objects for which there is no * indirection. * The returned object is the first object that the lock could not be acquired for. * The caller must try for exceptions and release locked objects in the case * of an exception. */ public CacheKey acquireLockAndRelatedLocks(Object objectForClone, Map lockedObjects, Map refreshedObjects, CacheKey cacheKey, ClassDescriptor descriptor, AbstractSession session, UnitOfWorkImpl unitOfWork) { if (!refreshedObjects.containsKey(objectForClone) && this.checkInvalidObject(objectForClone, cacheKey, descriptor, unitOfWork)) { return cacheKey; } // Attempt to get a read-lock, null is returned if cannot be read-locked. if (cacheKey.acquireReadLockNoWait()) { if (cacheKey.getObject() == null) { // This will be the case for deleted objects, NoIdentityMap, and aggregates. lockedObjects.put(objectForClone, cacheKey); } else { objectForClone = cacheKey.getObject(); if (lockedObjects.containsKey(objectForClone)) { // This is a check for loss of identity, the original check in // checkAndLockObject() will shortcircuit in the usual case. cacheKey.releaseReadLock(); return null; } // Store locked cachekey for release later. lockedObjects.put(objectForClone, cacheKey); } return traverseRelatedLocks(objectForClone, lockedObjects, refreshedObjects, descriptor, session, unitOfWork); } else { // Return the cache key that could not be locked. return cacheKey; } } /** * INTERNAL: * Check if the object is invalid and should be refreshed, return true, otherwise return false. * This is used to ensure that no invalid objects are registered. */ public boolean checkInvalidObject(Object object, CacheKey cacheKey, ClassDescriptor descriptor, UnitOfWorkImpl unitOfWork) { if (!unitOfWork.isNestedUnitOfWork() && (cacheKey.getObject() != null)) { CacheInvalidationPolicy cachePolicy = descriptor.getCacheInvalidationPolicy(); // BUG#6671556 refresh invalid objects when accessed in the unit of work. return (cachePolicy.shouldRefreshInvalidObjectsInUnitOfWork() && cachePolicy.isInvalidated(cacheKey)); } return false; } /** * INTERNAL: * This method will transition the previously acquired active * locks to deferred locks in the case a readlock could not be acquired for * a related object. Deferred locks must be employed to prevent deadlock * when waiting for the readlock while still protecting readers from * incomplete data. */ public void transitionToDeferredLocks(MergeManager mergeManager){ if (mergeManager.isTransitionedToDeferredLocks()) return; for (CacheKey cacheKey : mergeManager.getAcquiredLocks()){ cacheKey.transitionToDeferredLock(); } mergeManager.transitionToDeferredLocks(); } /** * INTERNAL: * Traverse the object and acquire locks on all related objects. */ public CacheKey traverseRelatedLocks(Object objectForClone, Map lockedObjects, Map refreshedObjects, ClassDescriptor descriptor, AbstractSession session, UnitOfWorkImpl unitOfWork) { // If all mappings have indirection short-circuit. if (descriptor.shouldAcquireCascadedLocks()) { FetchGroupManager fetchGroupManager = descriptor.getFetchGroupManager(); boolean isPartialObject = (fetchGroupManager != null) && fetchGroupManager.isPartialObject(objectForClone); for (Iterator mappings = descriptor.getLockableMappings().iterator(); mappings.hasNext();) { DatabaseMapping mapping = (DatabaseMapping)mappings.next(); // Only cascade fetched mappings. if (!isPartialObject || (fetchGroupManager.isAttributeFetched(objectForClone, mapping.getAttributeName()))) { // any mapping in this list must not have indirection. Object objectToLock = mapping.getAttributeValueFromObject(objectForClone); if (mapping.isCollectionMapping()) { // Ignore null, means empty. if (objectToLock != null) { ContainerPolicy cp = mapping.getContainerPolicy(); Object iterator = cp.iteratorFor(objectToLock); while (cp.hasNext(iterator)) { Object object = cp.next(iterator, session); if (mapping.getReferenceDescriptor().hasWrapperPolicy()) { object = mapping.getReferenceDescriptor().getWrapperPolicy().unwrapObject(object, session); } CacheKey toWaitOn = checkAndLockObject(object, lockedObjects, refreshedObjects, mapping, session, unitOfWork); if (toWaitOn != null) { return toWaitOn; } } } } else { if (mapping.getReferenceDescriptor().hasWrapperPolicy()) { objectToLock = mapping.getReferenceDescriptor().getWrapperPolicy().unwrapObject(objectToLock, session); } CacheKey toWaitOn = checkAndLockObject(objectToLock, lockedObjects, refreshedObjects, mapping, session, unitOfWork); if (toWaitOn != null) { return toWaitOn; } } } } } return null; } /** * INTERNAL: * This method will be the entry point for threads attempting to acquire locks for all objects that have * a changeset. This method will hand off the processing of the deadlock algorithm to other member * methods. The mergeManager must be the active mergemanager for the calling thread. * Returns true if all required locks were acquired */ public void acquireRequiredLocks(MergeManager mergeManager, UnitOfWorkChangeSet changeSet) { if (!MergeManager.LOCK_ON_MERGE) {//lockOnMerge is a backdoor and not public return; } boolean locksToAcquire = true; boolean isForDistributedMerge = false; //while that thread has locks to acquire continue to loop. try { // initialize the MergeManager during this commit or merge for insert/updates only // this call is not required in acquireLocksForClone() or acquireLockAndRelatedLocks() mergeManager.setLockThread(Thread.currentThread()); AbstractSession session = mergeManager.getSession(); if (session.isUnitOfWork()) { session = ((UnitOfWorkImpl)session).getParent(); } else { // if the session in the mergemanager is not a unit of work then the //merge is of a changeSet into a distributed session. isForDistributedMerge = true; } while (locksToAcquire) { //lets assume all locks will be acquired locksToAcquire = false; //first access the changeSet and begin to acquire locks Iterator classIterator = changeSet.getObjectChanges().keySet().iterator(); while (classIterator.hasNext()) { // Bug 3294426 - objectChanges is now indexed by class name instead of class String objectClassName = (String)classIterator.next(); Hashtable changeSetTable = (Hashtable)changeSet.getObjectChanges().get(objectClassName); //the order here does not matter as the deadlock avoidance code will handle any conflicts and maintaining //order would be costly Iterator changeSetIterator = changeSetTable.keySet().iterator(); // Perf: Bug 3324418 - Reduce the number of Class.forName() calls Class objectClass = null; while (changeSetIterator.hasNext()) { ObjectChangeSet objectChangeSet = (ObjectChangeSet)changeSetIterator.next(); if (objectChangeSet.getCacheKey() == null) { //skip this process as we will be unable to acquire the correct cachekey anyway //this is a new object with identity after write sequencing continue; } if (objectClass == null) { objectClass = objectChangeSet.getClassType(session); } // It would be so much nicer if the change set was keyed by the class instead of class name, // so this could be done once. We should key on class, and only convert to keying on name when broadcasting changes. ClassDescriptor descriptor = session.getDescriptor(objectClass); // PERF: Do not merge nor lock into the session cache if descriptor set to unit of work isolated. if (descriptor.shouldIsolateObjectsInUnitOfWork()) { break; } CacheKey activeCacheKey = attemptToAcquireLock(objectClass, objectChangeSet.getCacheKey(), session); if (activeCacheKey == null) { // if cacheKey is null then the lock was not available no need to synchronize this block,because if the // check fails then this thread will just return to the queue until it gets woken up. if (this.prevailingQueue.getFirst() == mergeManager) { // wait on this object until it is free, or until wait time expires because // this thread is the prevailing thread activeCacheKey = waitOnObjectLock(objectClass, objectChangeSet.getCacheKey(), session,(int)Math.round((Math.random()*500))); } if (activeCacheKey == null) { // failed to acquire lock, release all acquired // locks and place thread on waiting list releaseAllAcquiredLocks(mergeManager); // get cacheKey activeCacheKey = session.getIdentityMapAccessorInstance().getCacheKeyForObjectForLock(objectChangeSet.getCacheKey().getKey(), objectClass, descriptor); if (session.shouldLog(SessionLog.FINER, SessionLog.CACHE)) { Object[] params = new Object[3]; params[0] = objectClass; params[1] = objectChangeSet.getCacheKey() != null ? objectChangeSet.getCacheKey().getKey() : new Vector(); params[2] = Thread.currentThread().getName(); session.log(SessionLog.FINER, SessionLog.CACHE, "dead_lock_encountered_on_write_no_cachekey", params, null, true); } if (mergeManager.getWriteLockQueued() == null) { // thread is entering the wait queue for the // first time // set the QueueNode to be the node from the // linked list for quick removal upon // acquiring all locks synchronized (this.prevailingQueue) { mergeManager.setQueueNode(this.prevailingQueue.addLast(mergeManager)); } } // set the cache key on the merge manager for // the object that could not be acquired mergeManager.setWriteLockQueued(objectChangeSet.getCacheKey()); try { if (activeCacheKey != null){ //wait on the lock of the object that we couldn't get. synchronized (activeCacheKey.getMutex()) { // verify that the cache key is still locked before we wait on it, as //it may have been releases since we tried to acquire it. if (activeCacheKey.getMutex().isAcquired() && (activeCacheKey.getMutex().getActiveThread() != Thread.currentThread())) { activeCacheKey.getMutex().wait(); } } } } catch (InterruptedException exception) { throw org.eclipse.persistence.exceptions.ConcurrencyException.waitWasInterrupted(exception.getMessage()); } locksToAcquire = true; //failed to acquire, exit this loop and ensure that the original loop will continue break; }else{ objectChangeSet.setActiveCacheKey(activeCacheKey); mergeManager.getAcquiredLocks().add(activeCacheKey); } } else { objectChangeSet.setActiveCacheKey(activeCacheKey); mergeManager.getAcquiredLocks().add(activeCacheKey); } } //if a lock failed reset to the beginning if (locksToAcquire) { break; } } } } catch (RuntimeException exception) { // if there was an exception then release. //must not release in a finally block as release only occurs in this method // if there is a problem or all of the locks can not be acquired. releaseAllAcquiredLocks(mergeManager); throw exception; } finally { if (mergeManager.getWriteLockQueued() != null) { //the merge manager entered the wait queue and must be cleaned up synchronized(this.prevailingQueue) { this.prevailingQueue.remove(mergeManager.getQueueNode()); } mergeManager.setWriteLockQueued(null); } } } /** * INTERNAL: * This method will be called by a merging thread that is attempting to lock * a new object that was not locked previously. Unlike the other methods * within this class this method will lock only this object. */ public Object appendLock(Vector primaryKeys, Object objectToLock, ClassDescriptor descriptor, MergeManager mergeManager, AbstractSession session) { CacheKey lockedCacheKey = session.getIdentityMapAccessorInstance().acquireLockNoWait(primaryKeys, descriptor.getJavaClass(), true, descriptor); if (lockedCacheKey == null) { session.getIdentityMapAccessorInstance().getWriteLockManager().transitionToDeferredLocks(mergeManager); lockedCacheKey = session.getIdentityMapAccessorInstance().acquireDeferredLock(primaryKeys, descriptor.getJavaClass(), descriptor); Object cachedObject = lockedCacheKey.getObject(); if (cachedObject == null) { cachedObject = lockedCacheKey.waitForObject(); } lockedCacheKey.releaseDeferredLock(); return cachedObject; } else { if (lockedCacheKey.getObject() == null) { lockedCacheKey.setObject(objectToLock); // set the object in the // cachekey // for others to find an prevent cycles } mergeManager.getAcquiredLocks().add(lockedCacheKey); return lockedCacheKey.getObject(); } } /** * INTERNAL: * This method performs the operations of finding the cacheKey and locking it if possible. * Returns True if the lock was acquired, false otherwise */ protected CacheKey attemptToAcquireLock(Class objectClass, CacheKey cacheKey, AbstractSession session) { return session.getIdentityMapAccessorInstance().acquireLockNoWait(cacheKey.getKey(), objectClass, true, session.getDescriptor(objectClass)); } /** * INTERNAL: * Simply check that the object is not already locked then pass it on to the locking method */ protected CacheKey checkAndLockObject(Object objectToLock, Map lockedObjects, Map refreshedObjects, DatabaseMapping mapping, AbstractSession session, UnitOfWorkImpl unitOfWork) { //the cachekey should always reference an object otherwise what would we be cloning. if ((objectToLock != null) && !lockedObjects.containsKey(objectToLock)) { Vector primaryKeysToLock = null; ClassDescriptor referenceDescriptor = null; if (mapping.getReferenceDescriptor().hasInheritance() || mapping.getReferenceDescriptor().isDescriptorForInterface()) { referenceDescriptor = session.getDescriptor(objectToLock); } else { referenceDescriptor = mapping.getReferenceDescriptor(); } // Need to traverse aggregates, but not lock aggregates directly. if (referenceDescriptor.isDescriptorTypeAggregate()) { traverseRelatedLocks(objectToLock, lockedObjects, refreshedObjects, referenceDescriptor, session, unitOfWork); } else { primaryKeysToLock = referenceDescriptor.getObjectBuilder().extractPrimaryKeyFromObject(objectToLock, session); CacheKey cacheKey = session.getIdentityMapAccessorInstance().getCacheKeyForObjectForLock(primaryKeysToLock, objectToLock.getClass(), referenceDescriptor); if (cacheKey == null) { // Cache key may be null for no-identity map, missing or deleted object, just create a new one to be locked. cacheKey = new CacheKey(primaryKeysToLock); cacheKey.setReadTime(System.currentTimeMillis()); } CacheKey toWaitOn = acquireLockAndRelatedLocks(objectToLock, lockedObjects, refreshedObjects, cacheKey, referenceDescriptor, session, unitOfWork); if (toWaitOn != null) { return toWaitOn; } } } return null; } /** * INTERNAL: * This method will release all acquired locks */ public void releaseAllAcquiredLocks(MergeManager mergeManager) { if (!MergeManager.LOCK_ON_MERGE) {//lockOnMerge is a backdoor and not public return; } Iterator locks = mergeManager.getAcquiredLocks().iterator(); while (locks.hasNext()) { CacheKey cacheKeyToRemove = (CacheKey) locks.next(); if (cacheKeyToRemove.getObject() == null) { cacheKeyToRemove.removeFromOwningMap(); } if (mergeManager.isTransitionedToDeferredLocks()) { cacheKeyToRemove.releaseDeferredLock(); } else { cacheKeyToRemove.release(); } locks.remove(); } } /** * INTERNAL: * This method performs the operations of finding the cacheKey and locking it if possible. * Waits until the lock can be acquired */ protected CacheKey waitOnObjectLock(Class objectClass, CacheKey cacheKey, AbstractSession session, int waitTime) { return session.getIdentityMapAccessorInstance().acquireLockWithWait(cacheKey.getKey(), objectClass, true, session.getDescriptor(objectClass), waitTime); } }
foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/helper/WriteLockManager.java
/******************************************************************************* * Copyright (c) 1998, 2008 Oracle. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 02/11/2009-1.1 Michael O'Brien * - 259993: As part 2) During mergeClonesAfterCompletion() * If the the acquire and release threads are different * switch back to the stored acquire thread stored on the mergeManager. ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.util.*; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.descriptors.FetchGroupManager; import org.eclipse.persistence.descriptors.invalidation.CacheInvalidationPolicy; import org.eclipse.persistence.exceptions.ConcurrencyException; import org.eclipse.persistence.internal.queries.ContainerPolicy; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.internal.identitymaps.*; import org.eclipse.persistence.internal.helper.linkedlist.*; import org.eclipse.persistence.logging.SessionLog; /** * INTERNAL: * <p> * <b>Purpose</b>: Acquires all required locks for a particular merge process. * Implements a deadlock avoidance algorithm to prevent concurrent merge conflicts * * <p> * <b>Responsibilities</b>: * <ul> * <li> Acquires locks for writing threads. * <li> Provides deadlock avoidance behavior. * <li> Releases locks for writing threads. * </ul> * @author Gordon Yorke * @since 10.0.3 */ public class WriteLockManager { /* This attribute stores the list of threads that have had a problem acquiring locks */ /* the first element in this list will be the prevailing thread */ protected ExposedNodeLinkedList prevailingQueue; public WriteLockManager() { this.prevailingQueue = new ExposedNodeLinkedList(); } // this will allow us to prevent a readlock thread from looping forever. public static int MAXTRIES = 10000; /** * INTERNAL: * This method will return once the object is locked and all non-indirect * related objects are also locked. */ public Map acquireLocksForClone(Object objectForClone, ClassDescriptor descriptor, CacheKey cacheKey, AbstractSession session, UnitOfWorkImpl unitOfWork) { boolean successful = false; IdentityHashMap lockedObjects = new IdentityHashMap(); IdentityHashMap refreshedObjects = new IdentityHashMap(); try { // if the descriptor has indirection for all mappings then wait as there will be no deadlock risks CacheKey toWaitOn = acquireLockAndRelatedLocks(objectForClone, lockedObjects, refreshedObjects, cacheKey, descriptor, session, unitOfWork); int tries = 0; while (toWaitOn != null) {// loop until we've tried too many times. for (Iterator lockedList = lockedObjects.values().iterator(); lockedList.hasNext();) { ((CacheKey)lockedList.next()).releaseReadLock(); lockedList.remove(); } synchronized (toWaitOn.getMutex()) { try { if (toWaitOn.isAcquired()) {//last minute check to insure it is still locked. toWaitOn.getMutex().wait();// wait for lock on object to be released } } catch (InterruptedException ex) { // Ignore exception thread should continue. } } Object waitObject = toWaitOn.getObject(); // Object may be null for loss of identity. if (waitObject != null) { unitOfWork.checkInvalidObject(waitObject, toWaitOn, session.getDescriptor(waitObject)); refreshedObjects.put(waitObject, waitObject); } toWaitOn = acquireLockAndRelatedLocks(objectForClone, lockedObjects, refreshedObjects, cacheKey, descriptor, session, unitOfWork); if ((toWaitOn != null) && ((++tries) > MAXTRIES)) { // If we've tried too many times abort. throw ConcurrencyException.maxTriesLockOnCloneExceded(objectForClone); } } successful = true;//successfully acquired all locks } finally { if (!successful) {//did not acquire locks but we are exiting for (Iterator lockedList = lockedObjects.values().iterator(); lockedList.hasNext();) { ((CacheKey)lockedList.next()).releaseReadLock(); lockedList.remove(); } } } return lockedObjects; } /** * INTERNAL: * This is a recursive method used to acquire read locks on all objects that * will be cloned. These include all related objects for which there is no * indirection. * The returned object is the first object that the lock could not be acquired for. * The caller must try for exceptions and release locked objects in the case * of an exception. */ public CacheKey acquireLockAndRelatedLocks(Object objectForClone, Map lockedObjects, Map refreshedObjects, CacheKey cacheKey, ClassDescriptor descriptor, AbstractSession session, UnitOfWorkImpl unitOfWork) { if (!refreshedObjects.containsKey(objectForClone) && this.checkInvalidObject(objectForClone, cacheKey, descriptor, unitOfWork)) { return cacheKey; } // Attempt to get a read-lock, null is returned if cannot be read-locked. if (cacheKey.acquireReadLockNoWait()) { if (cacheKey.getObject() == null) { // This will be the case for deleted objects, NoIdentityMap, and aggregates. lockedObjects.put(objectForClone, cacheKey); } else { objectForClone = cacheKey.getObject(); if (lockedObjects.containsKey(objectForClone)) { // This is a check for loss of identity, the original check in // checkAndLockObject() will shortcircuit in the usual case. cacheKey.releaseReadLock(); return null; } // Store locked cachekey for release later. lockedObjects.put(objectForClone, cacheKey); } return traverseRelatedLocks(objectForClone, lockedObjects, refreshedObjects, descriptor, session, unitOfWork); } else { // Return the cache key that could not be locked. return cacheKey; } } /** * INTERNAL: * Check if the object is invalid and should be refreshed, return true, otherwise return false. * This is used to ensure that no invalid objects are registered. */ public boolean checkInvalidObject(Object object, CacheKey cacheKey, ClassDescriptor descriptor, UnitOfWorkImpl unitOfWork) { if (!unitOfWork.isNestedUnitOfWork() && (cacheKey.getObject() != null)) { CacheInvalidationPolicy cachePolicy = descriptor.getCacheInvalidationPolicy(); // BUG#6671556 refresh invalid objects when accessed in the unit of work. return (cachePolicy.shouldRefreshInvalidObjectsInUnitOfWork() && cachePolicy.isInvalidated(cacheKey)); } return false; } /** * INTERNAL: * This method will transition the previously acquired active * locks to deferred locks in the case a readlock could not be acquired for * a related object. Deferred locks must be employed to prevent deadlock * when waiting for the readlock while still protecting readers from * incomplete data. */ public void transitionToDeferredLocks(MergeManager mergeManager){ if (mergeManager.isTransitionedToDeferredLocks()) return; for (CacheKey cacheKey : mergeManager.getAcquiredLocks()){ cacheKey.transitionToDeferredLock(); } mergeManager.transitionToDeferredLocks(); } /** * INTERNAL: * Traverse the object and acquire locks on all related objects. */ public CacheKey traverseRelatedLocks(Object objectForClone, Map lockedObjects, Map refreshedObjects, ClassDescriptor descriptor, AbstractSession session, UnitOfWorkImpl unitOfWork) { // If all mappings have indirection short-circuit. if (descriptor.shouldAcquireCascadedLocks()) { FetchGroupManager fetchGroupManager = descriptor.getFetchGroupManager(); boolean isPartialObject = (fetchGroupManager != null) && fetchGroupManager.isPartialObject(objectForClone); for (Iterator mappings = descriptor.getLockableMappings().iterator(); mappings.hasNext();) { DatabaseMapping mapping = (DatabaseMapping)mappings.next(); // Only cascade fetched mappings. if (!isPartialObject || (fetchGroupManager.isAttributeFetched(objectForClone, mapping.getAttributeName()))) { // any mapping in this list must not have indirection. Object objectToLock = mapping.getAttributeValueFromObject(objectForClone); if (mapping.isCollectionMapping()) { // Ignore null, means empty. if (objectToLock != null) { ContainerPolicy cp = mapping.getContainerPolicy(); Object iterator = cp.iteratorFor(objectToLock); while (cp.hasNext(iterator)) { Object object = cp.next(iterator, session); if (mapping.getReferenceDescriptor().hasWrapperPolicy()) { object = mapping.getReferenceDescriptor().getWrapperPolicy().unwrapObject(object, session); } CacheKey toWaitOn = checkAndLockObject(object, lockedObjects, refreshedObjects, mapping, session, unitOfWork); if (toWaitOn != null) { return toWaitOn; } } } } else { if (mapping.getReferenceDescriptor().hasWrapperPolicy()) { objectToLock = mapping.getReferenceDescriptor().getWrapperPolicy().unwrapObject(objectToLock, session); } CacheKey toWaitOn = checkAndLockObject(objectToLock, lockedObjects, refreshedObjects, mapping, session, unitOfWork); if (toWaitOn != null) { return toWaitOn; } } } } } return null; } /** * INTERNAL: * This method will be the entry point for threads attempting to acquire locks for all objects that have * a changeset. This method will hand off the processing of the deadlock algorithm to other member * methods. The mergeManager must be the active mergemanager for the calling thread. * Returns true if all required locks were acquired */ public void acquireRequiredLocks(MergeManager mergeManager, UnitOfWorkChangeSet changeSet) { if (!MergeManager.LOCK_ON_MERGE) {//lockOnMerge is a backdoor and not public return; } boolean locksToAcquire = true; boolean isForDistributedMerge = false; //while that thread has locks to acquire continue to loop. try { // initialize the MergeManager during this commit or merge for insert/updates only // this call is not required in acquireLocksForClone() or acquireLockAndRelatedLocks() mergeManager.setLockThread(Thread.currentThread()); AbstractSession session = mergeManager.getSession(); if (session.isUnitOfWork()) { session = ((UnitOfWorkImpl)session).getParent(); } else { // if the session in the mergemanager is not a unit of work then the //merge is of a changeSet into a distributed session. isForDistributedMerge = true; } while (locksToAcquire) { //lets assume all locks will be acquired locksToAcquire = false; //first access the changeSet and begin to acquire locks Iterator classIterator = changeSet.getObjectChanges().keySet().iterator(); while (classIterator.hasNext()) { // Bug 3294426 - objectChanges is now indexed by class name instead of class String objectClassName = (String)classIterator.next(); Hashtable changeSetTable = (Hashtable)changeSet.getObjectChanges().get(objectClassName); //the order here does not matter as the deadlock avoidance code will handle any conflicts and maintaining //order would be costly Iterator changeSetIterator = changeSetTable.keySet().iterator(); // Perf: Bug 3324418 - Reduce the number of Class.forName() calls Class objectClass = null; while (changeSetIterator.hasNext()) { ObjectChangeSet objectChangeSet = (ObjectChangeSet)changeSetIterator.next(); if (objectChangeSet.getCacheKey() == null) { //skip this process as we will be unable to acquire the correct cachekey anyway //this is a new object with identity after write sequencing continue; } if (objectClass == null) { objectClass = objectChangeSet.getClassType(session); } // It would be so much nicer if the change set was keyed by the class instead of class name, // so this could be done once. We should key on class, and only convert to keying on name when broadcasting changes. ClassDescriptor descriptor = session.getDescriptor(objectClass); // PERF: Do not merge nor lock into the session cache if descriptor set to unit of work isolated. if (descriptor.shouldIsolateObjectsInUnitOfWork()) { break; } CacheKey activeCacheKey = attemptToAcquireLock(objectClass, objectChangeSet.getCacheKey(), session); if (activeCacheKey == null) { // if cacheKey is null then the lock was not available no need to synchronize this block,because if the // check fails then this thread will just return to the queue until it gets woken up. if (this.prevailingQueue.getFirst() == mergeManager) { // wait on this object until it is free, or until wait time expires because // this thread is the prevailing thread activeCacheKey = waitOnObjectLock(objectClass, objectChangeSet.getCacheKey(), session,(int)Math.round((Math.random()*500))); } if (activeCacheKey == null) { // failed to acquire lock, release all acquired // locks and place thread on waiting list releaseAllAcquiredLocks(mergeManager); // get cacheKey activeCacheKey = session.getIdentityMapAccessorInstance().getCacheKeyForObjectForLock(objectChangeSet.getCacheKey().getKey(), objectClass, descriptor); if (session.shouldLog(SessionLog.FINER, SessionLog.CACHE)) { Object[] params = new Object[3]; params[0] = objectClass; params[1] = objectChangeSet.getCacheKey() != null ? objectChangeSet.getCacheKey().getKey() : new Vector(); params[2] = Thread.currentThread().getName(); session.log(SessionLog.FINER, SessionLog.CACHE, "dead_lock_encountered_on_write_no_cachekey", params, null, true); } if (mergeManager.getWriteLockQueued() == null) { // thread is entering the wait queue for the // first time // set the QueueNode to be the node from the // linked list for quick removal upon // acquiring all locks synchronized (this.prevailingQueue) { mergeManager.setQueueNode(this.prevailingQueue.addLast(mergeManager)); } } // set the cache key on the merge manager for // the object that could not be acquired mergeManager.setWriteLockQueued(objectChangeSet.getCacheKey()); try { if (activeCacheKey != null){ //wait on the lock of the object that we couldn't get. synchronized (activeCacheKey.getMutex()) { // verify that the cache key is still locked before we wait on it, as //it may have been releases since we tried to acquire it. if (activeCacheKey.getMutex().isAcquired() && (activeCacheKey.getMutex().getActiveThread() != Thread.currentThread())) { activeCacheKey.getMutex().wait(); } } } } catch (InterruptedException exception) { throw org.eclipse.persistence.exceptions.ConcurrencyException.waitWasInterrupted(exception.getMessage()); } locksToAcquire = true; //failed to acquire, exit this loop and ensure that the original loop will continue break; }else{ objectChangeSet.setActiveCacheKey(activeCacheKey); mergeManager.getAcquiredLocks().add(activeCacheKey); } } else { objectChangeSet.setActiveCacheKey(activeCacheKey); mergeManager.getAcquiredLocks().add(activeCacheKey); } } //if a lock failed reset to the beginning if (locksToAcquire) { break; } } } } catch (RuntimeException exception) { // if there was an exception then release. //must not release in a finally block as release only occurs in this method // if there is a problem or all of the locks can not be acquired. releaseAllAcquiredLocks(mergeManager); throw exception; } finally { if (mergeManager.getWriteLockQueued() != null) { //the merge manager entered the wait queue and must be cleaned up synchronized(this.prevailingQueue) { this.prevailingQueue.remove(mergeManager.getQueueNode()); } mergeManager.setWriteLockQueued(null); } } } /** * INTERNAL: * This method will be called by a merging thread that is attempting to lock * a new object that was not locked previously. Unlike the other methods * within this class this method will lock only this object. */ public Object appendLock(Vector primaryKeys, Object objectToLock, ClassDescriptor descriptor, MergeManager mergeManager, AbstractSession session) { CacheKey lockedCacheKey = session.getIdentityMapAccessorInstance().acquireLockNoWait(primaryKeys, descriptor.getJavaClass(), true, descriptor); if (lockedCacheKey == null) { session.getIdentityMapAccessorInstance().getWriteLockManager().transitionToDeferredLocks(mergeManager); lockedCacheKey.acquireDeferredLock(); Object cachedObject = lockedCacheKey.getObject(); if (cachedObject == null) { cachedObject = lockedCacheKey.waitForObject(); } lockedCacheKey.releaseDeferredLock(); return cachedObject; } else { if (lockedCacheKey.getObject() == null) { lockedCacheKey.setObject(objectToLock); // set the object in the // cachekey // for others to find an prevent cycles } mergeManager.getAcquiredLocks().add(lockedCacheKey); return lockedCacheKey.getObject(); } } /** * INTERNAL: * This method performs the operations of finding the cacheKey and locking it if possible. * Returns True if the lock was acquired, false otherwise */ protected CacheKey attemptToAcquireLock(Class objectClass, CacheKey cacheKey, AbstractSession session) { return session.getIdentityMapAccessorInstance().acquireLockNoWait(cacheKey.getKey(), objectClass, true, session.getDescriptor(objectClass)); } /** * INTERNAL: * Simply check that the object is not already locked then pass it on to the locking method */ protected CacheKey checkAndLockObject(Object objectToLock, Map lockedObjects, Map refreshedObjects, DatabaseMapping mapping, AbstractSession session, UnitOfWorkImpl unitOfWork) { //the cachekey should always reference an object otherwise what would we be cloning. if ((objectToLock != null) && !lockedObjects.containsKey(objectToLock)) { Vector primaryKeysToLock = null; ClassDescriptor referenceDescriptor = null; if (mapping.getReferenceDescriptor().hasInheritance() || mapping.getReferenceDescriptor().isDescriptorForInterface()) { referenceDescriptor = session.getDescriptor(objectToLock); } else { referenceDescriptor = mapping.getReferenceDescriptor(); } // Need to traverse aggregates, but not lock aggregates directly. if (referenceDescriptor.isDescriptorTypeAggregate()) { traverseRelatedLocks(objectToLock, lockedObjects, refreshedObjects, referenceDescriptor, session, unitOfWork); } else { primaryKeysToLock = referenceDescriptor.getObjectBuilder().extractPrimaryKeyFromObject(objectToLock, session); CacheKey cacheKey = session.getIdentityMapAccessorInstance().getCacheKeyForObjectForLock(primaryKeysToLock, objectToLock.getClass(), referenceDescriptor); if (cacheKey == null) { // Cache key may be null for no-identity map, missing or deleted object, just create a new one to be locked. cacheKey = new CacheKey(primaryKeysToLock); cacheKey.setReadTime(System.currentTimeMillis()); } CacheKey toWaitOn = acquireLockAndRelatedLocks(objectToLock, lockedObjects, refreshedObjects, cacheKey, referenceDescriptor, session, unitOfWork); if (toWaitOn != null) { return toWaitOn; } } } return null; } /** * INTERNAL: * This method will release all acquired locks */ public void releaseAllAcquiredLocks(MergeManager mergeManager) { if (!MergeManager.LOCK_ON_MERGE) {//lockOnMerge is a backdoor and not public return; } Iterator locks = mergeManager.getAcquiredLocks().iterator(); while (locks.hasNext()) { CacheKey cacheKeyToRemove = (CacheKey) locks.next(); if (cacheKeyToRemove.getObject() == null) { cacheKeyToRemove.removeFromOwningMap(); } if (mergeManager.isTransitionedToDeferredLocks()) { cacheKeyToRemove.releaseDeferredLock(); } else { cacheKeyToRemove.release(); } locks.remove(); } } /** * INTERNAL: * This method performs the operations of finding the cacheKey and locking it if possible. * Waits until the lock can be acquired */ protected CacheKey waitOnObjectLock(Class objectClass, CacheKey cacheKey, AbstractSession session, int waitTime) { return session.getIdentityMapAccessorInstance().acquireLockWithWait(cacheKey.getKey(), objectClass, true, session.getDescriptor(objectClass), waitTime); } }
Simple update to ask identity map accessor for deferred lock instead of cachekey which will be null Former-commit-id: 0da00c3ccc737623f3417efaa01e4e4658664b03
foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/helper/WriteLockManager.java
Simple update to ask identity map accessor for deferred lock instead of cachekey which will be null
Java
epl-1.0
858812202358a29514678b39e86cf944767959e1
0
kopl/SPLevo,kopl/SPLevo
/** */ package org.splevo.modisco.java.diffing.java2kdmdiff.impl; import java.util.Collection; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.compare.diff.metamodel.AbstractDiffExtension; import org.eclipse.emf.compare.diff.metamodel.DiffElement; import org.eclipse.emf.compare.diff.metamodel.DiffPackage; import org.eclipse.emf.compare.diff.metamodel.DifferenceKind; import org.eclipse.emf.compare.diff.metamodel.impl.AbstractDiffExtensionImpl; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.EObjectWithInverseResolvingEList; import org.eclipse.emf.ecore.util.InternalEList; import org.splevo.modisco.java.diffing.java2kdmdiff.ClassDelete; import org.splevo.modisco.java.diffing.java2kdmdiff.ClassInsert; import org.splevo.modisco.java.diffing.java2kdmdiff.FieldDelete; import org.splevo.modisco.java.diffing.java2kdmdiff.FieldInsert; import org.splevo.modisco.java.diffing.java2kdmdiff.ImplementsInterfaceDelete; import org.splevo.modisco.java.diffing.java2kdmdiff.ImplementsInterfaceInsert; import org.splevo.modisco.java.diffing.java2kdmdiff.ImportDelete; import org.splevo.modisco.java.diffing.java2kdmdiff.ImportInsert; import org.splevo.modisco.java.diffing.java2kdmdiff.Java2KDMDiffExtension; import org.splevo.modisco.java.diffing.java2kdmdiff.Java2KDMDiffPackage; import org.splevo.modisco.java.diffing.java2kdmdiff.MethodDelete; import org.splevo.modisco.java.diffing.java2kdmdiff.MethodInsert; import org.splevo.modisco.java.diffing.java2kdmdiff.PackageDelete; import org.splevo.modisco.java.diffing.java2kdmdiff.PackageInsert; import org.splevo.modisco.java.diffing.java2kdmdiff.StatementChange; import org.splevo.modisco.java.diffing.java2kdmdiff.StatementDelete; import org.splevo.modisco.java.diffing.java2kdmdiff.StatementInsert; /** * <!-- begin-user-doc --> An implementation of the model object ' * <em><b>Extension</b></em>'. <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li> * {@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#getSubDiffElements * <em>Sub Diff Elements</em>}</li> * <li> * {@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#getIsHiddenBy * <em>Is Hidden By</em>}</li> * <li> * {@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#isConflicting * <em>Conflicting</em>}</li> * <li> * {@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#getKind * <em>Kind</em>}</li> * <li> * {@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#isRemote * <em>Remote</em>}</li> * <li> * {@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#getRequires * <em>Requires</em>}</li> * <li> * {@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#getRequiredBy * <em>Required By</em>}</li> * </ul> * </p> * * @generated */ public abstract class Java2KDMDiffExtensionImpl extends AbstractDiffExtensionImpl implements Java2KDMDiffExtension { /** * The cached value of the '{@link #getSubDiffElements() * <em>Sub Diff Elements</em>}' containment reference list. <!-- * begin-user-doc --> <!-- end-user-doc --> * * @see #getSubDiffElements() * @generated * @ordered */ protected EList<DiffElement> subDiffElements; /** * The cached value of the '{@link #getIsHiddenBy() <em>Is Hidden By</em>}' * reference list. <!-- begin-user-doc --> <!-- end-user-doc --> * * @see #getIsHiddenBy() * @generated * @ordered */ protected EList<AbstractDiffExtension> isHiddenBy; /** * The default value of the '{@link #isConflicting() <em>Conflicting</em>}' * attribute. <!-- begin-user-doc --> <!-- end-user-doc --> * * @see #isConflicting() * @generated * @ordered */ protected static final boolean CONFLICTING_EDEFAULT = false; /** * The cached value of the '{@link #isConflicting() <em>Conflicting</em>}' * attribute. <!-- begin-user-doc --> <!-- end-user-doc --> * * @see #isConflicting() * @generated * @ordered */ protected boolean conflicting = CONFLICTING_EDEFAULT; /** * The default value of the '{@link #getKind() <em>Kind</em>}' attribute. * <!-- begin-user-doc --> <!-- end-user-doc --> * * @see #getKind() * @generated * @ordered */ protected static final DifferenceKind KIND_EDEFAULT = DifferenceKind.ADDITION; /** * The cached value of the '{@link #getKind() <em>Kind</em>}' attribute. * <!-- begin-user-doc --> <!-- end-user-doc --> * * @see #getKind() * @generated * @ordered */ protected DifferenceKind kind = KIND_EDEFAULT; /** * The default value of the '{@link #isRemote() <em>Remote</em>}' attribute. * <!-- begin-user-doc --> <!-- end-user-doc --> * * @see #isRemote() * @generated * @ordered */ protected static final boolean REMOTE_EDEFAULT = false; /** * The cached value of the '{@link #isRemote() <em>Remote</em>}' attribute. * <!-- begin-user-doc --> <!-- end-user-doc --> * * @see #isRemote() * @generated * @ordered */ protected boolean remote = REMOTE_EDEFAULT; /** * The cached value of the '{@link #getRequires() <em>Requires</em>}' * reference list. <!-- begin-user-doc --> <!-- end-user-doc --> * * @see #getRequires() * @generated * @ordered */ protected EList<DiffElement> requires; /** * The cached value of the '{@link #getRequiredBy() <em>Required By</em>}' * reference list. <!-- begin-user-doc --> <!-- end-user-doc --> * * @see #getRequiredBy() * @generated * @ordered */ protected EList<DiffElement> requiredBy; /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ protected Java2KDMDiffExtensionImpl() { super(); } /** * <!-- begin-user-doc -->. <!-- end-user-doc --> * * @generated */ @Override protected EClass eStaticClass() { return Java2KDMDiffPackage.Literals.JAVA2_KDM_DIFF_EXTENSION; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public EList<DiffElement> getSubDiffElements() { if (subDiffElements == null) { subDiffElements = new EObjectContainmentEList<DiffElement>( DiffElement.class, this, Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS); } return subDiffElements; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public EList<AbstractDiffExtension> getIsHiddenBy() { if (isHiddenBy == null) { isHiddenBy = new EObjectWithInverseResolvingEList.ManyInverse<AbstractDiffExtension>( AbstractDiffExtension.class, this, Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY, DiffPackage.ABSTRACT_DIFF_EXTENSION__HIDE_ELEMENTS); } return isHiddenBy; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public boolean isConflicting() { return conflicting; } /** * <!-- begin-user-doc --> Adapted to determine the difference kind from the * concrete type of diff element. {@inheritDoc} <!-- end-user-doc --> * * @generated not */ public DifferenceKind getKind() { if (this instanceof StatementDelete || this instanceof ClassDelete || this instanceof FieldDelete || this instanceof ImportDelete || this instanceof ImplementsInterfaceDelete || this instanceof MethodDelete || this instanceof PackageDelete ) { kind = DifferenceKind.DELETION; } else if (this instanceof StatementInsert || this instanceof ClassInsert || this instanceof FieldInsert || this instanceof ImportInsert || this instanceof ImplementsInterfaceInsert || this instanceof MethodInsert || this instanceof PackageInsert ) { kind = DifferenceKind.ADDITION; } else if (this instanceof StatementChange ) { kind = DifferenceKind.CHANGE; } return kind; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public boolean isRemote() { return remote; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public void setRemote(boolean newRemote) { boolean oldRemote = remote; remote = newRemote; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE, oldRemote, remote)); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public EList<DiffElement> getRequires() { if (requires == null) { requires = new EObjectWithInverseResolvingEList.ManyInverse<DiffElement>( DiffElement.class, this, Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES, DiffPackage.DIFF_ELEMENT__REQUIRED_BY); } return requires; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ public EList<DiffElement> getRequiredBy() { if (requiredBy == null) { requiredBy = new EObjectWithInverseResolvingEList.ManyInverse<DiffElement>( DiffElement.class, this, Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY, DiffPackage.DIFF_ELEMENT__REQUIRES); } return requiredBy; } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @SuppressWarnings("unchecked") @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: return ((InternalEList<InternalEObject>) (InternalEList<?>) getIsHiddenBy()) .basicAdd(otherEnd, msgs); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: return ((InternalEList<InternalEObject>) (InternalEList<?>) getRequires()) .basicAdd(otherEnd, msgs); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: return ((InternalEList<InternalEObject>) (InternalEList<?>) getRequiredBy()) .basicAdd(otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS: return ((InternalEList<?>) getSubDiffElements()).basicRemove( otherEnd, msgs); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: return ((InternalEList<?>) getIsHiddenBy()).basicRemove(otherEnd, msgs); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: return ((InternalEList<?>) getRequires()).basicRemove(otherEnd, msgs); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: return ((InternalEList<?>) getRequiredBy()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS: return getSubDiffElements(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: return getIsHiddenBy(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__CONFLICTING: return isConflicting(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__KIND: return getKind(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE: return isRemote(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: return getRequires(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: return getRequiredBy(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS: getSubDiffElements().clear(); getSubDiffElements().addAll( (Collection<? extends DiffElement>) newValue); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: getIsHiddenBy().clear(); getIsHiddenBy().addAll( (Collection<? extends AbstractDiffExtension>) newValue); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE: setRemote((Boolean) newValue); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: getRequires().clear(); getRequires().addAll((Collection<? extends DiffElement>) newValue); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: getRequiredBy().clear(); getRequiredBy() .addAll((Collection<? extends DiffElement>) newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS: getSubDiffElements().clear(); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: getIsHiddenBy().clear(); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE: setRemote(REMOTE_EDEFAULT); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: getRequires().clear(); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: getRequiredBy().clear(); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS: return subDiffElements != null && !subDiffElements.isEmpty(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: return isHiddenBy != null && !isHiddenBy.isEmpty(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__CONFLICTING: return conflicting != CONFLICTING_EDEFAULT; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__KIND: return kind != KIND_EDEFAULT; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE: return remote != REMOTE_EDEFAULT; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: return requires != null && !requires.isEmpty(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: return requiredBy != null && !requiredBy.isEmpty(); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @Override public int eBaseStructuralFeatureID(int derivedFeatureID, Class<?> baseClass) { if (baseClass == DiffElement.class) { switch (derivedFeatureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS: return DiffPackage.DIFF_ELEMENT__SUB_DIFF_ELEMENTS; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: return DiffPackage.DIFF_ELEMENT__IS_HIDDEN_BY; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__CONFLICTING: return DiffPackage.DIFF_ELEMENT__CONFLICTING; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__KIND: return DiffPackage.DIFF_ELEMENT__KIND; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE: return DiffPackage.DIFF_ELEMENT__REMOTE; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: return DiffPackage.DIFF_ELEMENT__REQUIRES; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: return DiffPackage.DIFF_ELEMENT__REQUIRED_BY; default: return -1; } } return super.eBaseStructuralFeatureID(derivedFeatureID, baseClass); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @Override public int eDerivedStructuralFeatureID(int baseFeatureID, Class<?> baseClass) { if (baseClass == DiffElement.class) { switch (baseFeatureID) { case DiffPackage.DIFF_ELEMENT__SUB_DIFF_ELEMENTS: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS; case DiffPackage.DIFF_ELEMENT__IS_HIDDEN_BY: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY; case DiffPackage.DIFF_ELEMENT__CONFLICTING: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__CONFLICTING; case DiffPackage.DIFF_ELEMENT__KIND: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__KIND; case DiffPackage.DIFF_ELEMENT__REMOTE: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE; case DiffPackage.DIFF_ELEMENT__REQUIRES: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES; case DiffPackage.DIFF_ELEMENT__REQUIRED_BY: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY; default: return -1; } } return super.eDerivedStructuralFeatureID(baseFeatureID, baseClass); } /** * <!-- begin-user-doc --> <!-- end-user-doc --> * * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (conflicting: "); result.append(conflicting); result.append(", kind: "); result.append(kind); result.append(", remote: "); result.append(remote); result.append(')'); return result.toString(); } } // Java2KDMDiffExtensionImpl
MoDisco/org.splevo.modisco.java.diffing/src-gen/org/splevo/modisco/java/diffing/java2kdmdiff/impl/Java2KDMDiffExtensionImpl.java
/** */ package org.splevo.modisco.java.diffing.java2kdmdiff.impl; import java.util.Collection; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.compare.diff.metamodel.AbstractDiffExtension; import org.eclipse.emf.compare.diff.metamodel.DiffElement; import org.eclipse.emf.compare.diff.metamodel.DiffPackage; import org.eclipse.emf.compare.diff.metamodel.DifferenceKind; import org.eclipse.emf.compare.diff.metamodel.impl.AbstractDiffExtensionImpl; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.EObjectWithInverseResolvingEList; import org.eclipse.emf.ecore.util.InternalEList; import org.splevo.modisco.java.diffing.java2kdmdiff.Java2KDMDiffExtension; import org.splevo.modisco.java.diffing.java2kdmdiff.Java2KDMDiffPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Extension</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#getSubDiffElements <em>Sub Diff Elements</em>}</li> * <li>{@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#getIsHiddenBy <em>Is Hidden By</em>}</li> * <li>{@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#isConflicting <em>Conflicting</em>}</li> * <li>{@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#getKind <em>Kind</em>}</li> * <li>{@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#isRemote <em>Remote</em>}</li> * <li>{@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#getRequires <em>Requires</em>}</li> * <li>{@link org.splevo.modisco.java.diffing.java2kdmdiff.impl.Java2KDMDiffExtensionImpl#getRequiredBy <em>Required By</em>}</li> * </ul> * </p> * * @generated */ public abstract class Java2KDMDiffExtensionImpl extends AbstractDiffExtensionImpl implements Java2KDMDiffExtension { /** * The cached value of the '{@link #getSubDiffElements() <em>Sub Diff Elements</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSubDiffElements() * @generated * @ordered */ protected EList<DiffElement> subDiffElements; /** * The cached value of the '{@link #getIsHiddenBy() <em>Is Hidden By</em>}' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getIsHiddenBy() * @generated * @ordered */ protected EList<AbstractDiffExtension> isHiddenBy; /** * The default value of the '{@link #isConflicting() <em>Conflicting</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isConflicting() * @generated * @ordered */ protected static final boolean CONFLICTING_EDEFAULT = false; /** * The cached value of the '{@link #isConflicting() <em>Conflicting</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isConflicting() * @generated * @ordered */ protected boolean conflicting = CONFLICTING_EDEFAULT; /** * The default value of the '{@link #getKind() <em>Kind</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getKind() * @generated * @ordered */ protected static final DifferenceKind KIND_EDEFAULT = DifferenceKind.ADDITION; /** * The cached value of the '{@link #getKind() <em>Kind</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getKind() * @generated * @ordered */ protected DifferenceKind kind = KIND_EDEFAULT; /** * The default value of the '{@link #isRemote() <em>Remote</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isRemote() * @generated * @ordered */ protected static final boolean REMOTE_EDEFAULT = false; /** * The cached value of the '{@link #isRemote() <em>Remote</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isRemote() * @generated * @ordered */ protected boolean remote = REMOTE_EDEFAULT; /** * The cached value of the '{@link #getRequires() <em>Requires</em>}' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRequires() * @generated * @ordered */ protected EList<DiffElement> requires; /** * The cached value of the '{@link #getRequiredBy() <em>Required By</em>}' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRequiredBy() * @generated * @ordered */ protected EList<DiffElement> requiredBy; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Java2KDMDiffExtensionImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return Java2KDMDiffPackage.Literals.JAVA2_KDM_DIFF_EXTENSION; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<DiffElement> getSubDiffElements() { if (subDiffElements == null) { subDiffElements = new EObjectContainmentEList<DiffElement>( DiffElement.class, this, Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS); } return subDiffElements; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<AbstractDiffExtension> getIsHiddenBy() { if (isHiddenBy == null) { isHiddenBy = new EObjectWithInverseResolvingEList.ManyInverse<AbstractDiffExtension>( AbstractDiffExtension.class, this, Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY, DiffPackage.ABSTRACT_DIFF_EXTENSION__HIDE_ELEMENTS); } return isHiddenBy; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public boolean isConflicting() { return conflicting; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DifferenceKind getKind() { return kind; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public boolean isRemote() { return remote; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setRemote(boolean newRemote) { boolean oldRemote = remote; remote = newRemote; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE, oldRemote, remote)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<DiffElement> getRequires() { if (requires == null) { requires = new EObjectWithInverseResolvingEList.ManyInverse<DiffElement>( DiffElement.class, this, Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES, DiffPackage.DIFF_ELEMENT__REQUIRED_BY); } return requires; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<DiffElement> getRequiredBy() { if (requiredBy == null) { requiredBy = new EObjectWithInverseResolvingEList.ManyInverse<DiffElement>( DiffElement.class, this, Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY, DiffPackage.DIFF_ELEMENT__REQUIRES); } return requiredBy; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: return ((InternalEList<InternalEObject>) (InternalEList<?>) getIsHiddenBy()) .basicAdd(otherEnd, msgs); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: return ((InternalEList<InternalEObject>) (InternalEList<?>) getRequires()) .basicAdd(otherEnd, msgs); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: return ((InternalEList<InternalEObject>) (InternalEList<?>) getRequiredBy()) .basicAdd(otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS: return ((InternalEList<?>) getSubDiffElements()).basicRemove( otherEnd, msgs); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: return ((InternalEList<?>) getIsHiddenBy()).basicRemove(otherEnd, msgs); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: return ((InternalEList<?>) getRequires()).basicRemove(otherEnd, msgs); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: return ((InternalEList<?>) getRequiredBy()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS: return getSubDiffElements(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: return getIsHiddenBy(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__CONFLICTING: return isConflicting(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__KIND: return getKind(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE: return isRemote(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: return getRequires(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: return getRequiredBy(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS: getSubDiffElements().clear(); getSubDiffElements().addAll( (Collection<? extends DiffElement>) newValue); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: getIsHiddenBy().clear(); getIsHiddenBy().addAll( (Collection<? extends AbstractDiffExtension>) newValue); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE: setRemote((Boolean) newValue); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: getRequires().clear(); getRequires().addAll((Collection<? extends DiffElement>) newValue); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: getRequiredBy().clear(); getRequiredBy() .addAll((Collection<? extends DiffElement>) newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS: getSubDiffElements().clear(); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: getIsHiddenBy().clear(); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE: setRemote(REMOTE_EDEFAULT); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: getRequires().clear(); return; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: getRequiredBy().clear(); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS: return subDiffElements != null && !subDiffElements.isEmpty(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: return isHiddenBy != null && !isHiddenBy.isEmpty(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__CONFLICTING: return conflicting != CONFLICTING_EDEFAULT; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__KIND: return kind != KIND_EDEFAULT; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE: return remote != REMOTE_EDEFAULT; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: return requires != null && !requires.isEmpty(); case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: return requiredBy != null && !requiredBy.isEmpty(); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public int eBaseStructuralFeatureID(int derivedFeatureID, Class<?> baseClass) { if (baseClass == DiffElement.class) { switch (derivedFeatureID) { case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS: return DiffPackage.DIFF_ELEMENT__SUB_DIFF_ELEMENTS; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY: return DiffPackage.DIFF_ELEMENT__IS_HIDDEN_BY; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__CONFLICTING: return DiffPackage.DIFF_ELEMENT__CONFLICTING; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__KIND: return DiffPackage.DIFF_ELEMENT__KIND; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE: return DiffPackage.DIFF_ELEMENT__REMOTE; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES: return DiffPackage.DIFF_ELEMENT__REQUIRES; case Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY: return DiffPackage.DIFF_ELEMENT__REQUIRED_BY; default: return -1; } } return super.eBaseStructuralFeatureID(derivedFeatureID, baseClass); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public int eDerivedStructuralFeatureID(int baseFeatureID, Class<?> baseClass) { if (baseClass == DiffElement.class) { switch (baseFeatureID) { case DiffPackage.DIFF_ELEMENT__SUB_DIFF_ELEMENTS: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__SUB_DIFF_ELEMENTS; case DiffPackage.DIFF_ELEMENT__IS_HIDDEN_BY: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__IS_HIDDEN_BY; case DiffPackage.DIFF_ELEMENT__CONFLICTING: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__CONFLICTING; case DiffPackage.DIFF_ELEMENT__KIND: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__KIND; case DiffPackage.DIFF_ELEMENT__REMOTE: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REMOTE; case DiffPackage.DIFF_ELEMENT__REQUIRES: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRES; case DiffPackage.DIFF_ELEMENT__REQUIRED_BY: return Java2KDMDiffPackage.JAVA2_KDM_DIFF_EXTENSION__REQUIRED_BY; default: return -1; } } return super.eDerivedStructuralFeatureID(baseFeatureID, baseClass); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (conflicting: "); result.append(conflicting); result.append(", kind: "); result.append(kind); result.append(", remote: "); result.append(remote); result.append(')'); return result.toString(); } } //Java2KDMDiffExtensionImpl
Implemented difference kind detection
MoDisco/org.splevo.modisco.java.diffing/src-gen/org/splevo/modisco/java/diffing/java2kdmdiff/impl/Java2KDMDiffExtensionImpl.java
Implemented difference kind detection
Java
agpl-3.0
dcb5535843bd9346fb12097c72604db58a1c5a0b
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
4fad189c-2e61-11e5-9284-b827eb9e62be
hello.java
4fa7ad1c-2e61-11e5-9284-b827eb9e62be
4fad189c-2e61-11e5-9284-b827eb9e62be
hello.java
4fad189c-2e61-11e5-9284-b827eb9e62be
Java
agpl-3.0
6b536bb38aa943208218e75f3f30c83529c5656c
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
d17020f0-2e60-11e5-9284-b827eb9e62be
hello.java
d16a85dc-2e60-11e5-9284-b827eb9e62be
d17020f0-2e60-11e5-9284-b827eb9e62be
hello.java
d17020f0-2e60-11e5-9284-b827eb9e62be
Java
agpl-3.0
30697dde19a8b4528a5aa251570e8d8f16b14f61
0
Levis92/proton-text
package edu.chl.proton.model; import java.io.*; import java.util.List; /** * @author Stina Werme * Created by stinawerme on 01/05/17. */ public class File extends FileSystemEntity { private boolean isSaved; public File(String name) { this.setName(name); } public File(String name, Folder parentFolder) { this.setName(name); parentFolder.addFile(this); } protected void setIsSaved(boolean state) { isSaved = state; } protected boolean isSaved() { return isSaved; } protected void save(List<String> text) throws IOException { File file = new File(this.getPath()); BufferedWriter out = new BufferedWriter(new FileWriter(String.valueOf(file))); for(String line : text) { out.write(line); } out.close(); setIsSaved(true); } // TODO protected String lastEdited() { return ""; } // TODO protected void remove() { } // Aqcuires the text from the file we opened. protected void aqcuireText(){ // This will reference one line at a time String line = null; try { // FileReader reads text files in the default encoding. FileReader fileReader = new FileReader(file.getName()); // Always wrap FileReader in BufferedReader. BufferedReader bufferedReader = new BufferedReader(fileReader); while((line = bufferedReader.readLine()) != null) { lines.add(line); } // Close file. bufferedReader.close(); } catch(FileNotFoundException ex) { System.out.println( "Unable to open file '" + file.getName() + "'"); } catch(IOException ex) { System.out.println( "Error reading file '" + file.getName() + "'"); } } }
src/main/java/edu/chl/proton/model/File.java
package edu.chl.proton.model; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; import java.util.List; /** * @author Stina Werme * Created by stinawerme on 01/05/17. */ public class File extends FileSystemEntity { private boolean isSaved; public File(String name) { this.setName(name); } public File(String name, Folder parentFolder) { this.setName(name); parentFolder.addFile(this); } protected void setIsSaved(boolean state) { isSaved = state; } protected boolean isSaved() { return isSaved; } protected void save(List<String> text) throws IOException { File file = new File(this.getPath()); BufferedWriter out = new BufferedWriter(new FileWriter(String.valueOf(file))); for(String line : text) { out.write(line); } out.close(); setIsSaved(true); } // TODO protected String lastEdited() { return ""; } // TODO protected void remove() { } }
Added aqcuire text in File
src/main/java/edu/chl/proton/model/File.java
Added aqcuire text in File
Java
agpl-3.0
b9bcf2df8659adcf520a2667b875c210f217893b
0
digitalbazaar/monarch,digitalbazaar/monarch,digitalbazaar/monarch,digitalbazaar/monarch,digitalbazaar/monarch
/* * Copyright (c) 2006 Digital Bazaar, Inc. All rights reserved. */ package com.db.net; import com.db.logging.Logger; import com.db.logging.LoggerManager; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.util.HashMap; import java.util.Iterator; /** * An abstract http web request servicer. Contains convenience methods for * setting relative path permissions, sending and receiving files, and * storing the servicer's base path. * * @author Dave Longley */ public abstract class AbstractHttpWebRequestServicer implements HttpWebRequestServicer { /** * The servicer's path. This is the path http clients must hit to * get a response from this servicer. */ protected String mServicerPath; /** * A map of relative paths to their permissions. */ protected HashMap mPathToPermissions; /** * Creates an abstract http servicer. */ public AbstractHttpWebRequestServicer() { setHttpWebRequestServicerPath("/"); mPathToPermissions = new HashMap(); } /** * Reads a file from an http web request and saves it to disk. * * @param request the http web request to read the file from. * @param filename the name for the file. * * @return true if successfully read, false if not. */ protected boolean readFile(HttpWebRequest request, String filename) { return readFile(request, new File(filename)); } /** * Reads a file from an http web request and saves it to disk. * * @param request the http web request to read the file from. * @param file the file to write to. * * @return true if successfully read, false if not. */ protected boolean readFile(HttpWebRequest request, File file) { boolean rval = false; try { FileOutputStream fos = new FileOutputStream(file); // read the body into a file request.receiveBody(fos); // close the file fos.close(); rval = true; } catch(Throwable t) { getLogger().error(getClass(), "could not read file from http web request!"); getLogger().debug(getClass(), Logger.getStackTrace(t)); } return rval; } /** * Reads a file from an http web request body part body and saves it to disk. * * @param request the http web request to read the file from. * @param header the http body part header. * @param filename the name for the file. * * @return true if successfully read, false if not. */ protected boolean readFileFromBodyPartBody( HttpWebRequest request, HttpBodyPartHeader header, String filename) { return readFileFromBodyPartBody(request, header, new File(filename)); } /** * Reads a file from an http web request body part body and saves it to disk. * * @param request the http web request to read the file from. * @param header the http body part header. * @param file the file to write to. * * @return true if successfully read, false if not. */ protected boolean readFileFromBodyPartBody( HttpWebRequest request, HttpBodyPartHeader header, File file) { boolean rval = false; try { FileOutputStream fos = new FileOutputStream(file); // read the body into a file request.receiveBodyPartBody(fos, null); // close the file fos.close(); rval = true; } catch(Throwable t) { getLogger().error(getClass(), "could not read file from http web request body part body!"); getLogger().debug(getClass(), Logger.getStackTrace(t)); } return rval; } /** * Reads from a file and writes it with the passed http web response. * * @param response the http web response to write to. * @param filename the name of the file to read from. * * @return true if successfully sent, false if not. */ protected boolean sendFile(HttpWebResponse response, String filename) { return sendFile(response, new File(filename)); } /** * Reads from a file and writes it with the passed http web response. * * @param response the http web response to write to. * @param file the file to read from. * * @return true if successfully sent, false if not. */ protected boolean sendFile(HttpWebResponse response, File file) { boolean rval = false; try { FileInputStream fis = new FileInputStream(file); // send the file data via the response response.sendBody(fis); // close the file fis.close(); rval = true; } catch(Throwable t) { getLogger().error(getClass(), "could not send file in http web response!"); getLogger().debug(getClass(), Logger.getStackTrace(t)); } return rval; } /** * Reads from a file and writes it with the passed http web response in * a body part body. * * @param response the http web response to write to. * @param bodyPartHeader the http body part header to use. * @param filename the name of the file to read from. * @param lastBodyPart true if the file is the last body part, false if not. * * @return true if successfully sent, false if not. */ protected boolean sendFileInBodyPartBody( HttpWebResponse response, HttpBodyPartHeader bodyPartHeader, String filename, boolean lastBodyPart) { return sendFileInBodyPartBody(response, bodyPartHeader, new File(filename), lastBodyPart); } /** * Reads from a file and writes it with the passed http web response. * * @param response the http web response to write to. * @param bodyPartHeader the http body part header to use. * @param file the file to read from. * @param lastBodyPart true if the file is the last body part, false if not. * * @return true if successfully sent, false if not. */ protected boolean sendFileInBodyPartBody( HttpWebResponse response, HttpBodyPartHeader bodyPartHeader, File file, boolean lastBodyPart) { boolean rval = false; try { // send body part header if(response.sendBodyPartHeader(bodyPartHeader)) { FileInputStream fis = new FileInputStream(file); // send the file data in a body part body response.sendBodyPartBody(fis, bodyPartHeader, lastBodyPart); // close the file fis.close(); rval = true; } } catch(Throwable t) { getLogger().error(getClass(), "could not send file in http web response body part body!"); getLogger().debug(getClass(), Logger.getStackTrace(t)); } return rval; } /** * Gets a path that's relative to this servicer's path from * a request path. * * @param path the request path. * * @return the path relative to this servicer. */ protected String getServicerRelativePath(String path) { // convert all "\\" to "/" path = path.replaceAll("\\\\", "/"); getLogger().debug(getClass(), "http web request servicer path is:\n'" + getHttpWebRequestServicerPath() + "'"); // strip off servicer path int index = path.indexOf(getHttpWebRequestServicerPath()); if(index == 0) { int length = index + getHttpWebRequestServicerPath().length(); if(path.length() > length) { path = path.substring(length); } else { path = ""; } } // get path up to last slash index = path.lastIndexOf("/"); if(index != -1) { path = path.substring(0, index + 1); } else { path = ""; } return path; } /** * Determines acceptable http versions for an http web request. * * @param request the http web request to check. * * @return true if the passed request has an acceptable version * false if not. */ protected boolean versionSupported(HttpWebRequest request) { boolean rval = false; // supports only http/1.0, http/1.1 by default if(request.getHeader().getVersion().equals("HTTP/1.0") || request.getHeader().getVersion().equals("HTTP/1.1")) { rval = true; } return rval; } /** * Services an http web request (via whatever means is appropriate) and * responds using the passed HttpWebResponse object. * * @param request the http web request. * @param response the http web response. */ public abstract void serviceHttpWebRequest( HttpWebRequest request, HttpWebResponse response); /** * Allows an http web request path to be set for this servicer. This is the * base path http clients must request for this servicer to be given the * http request to service. A servicer may need to know this information. * * @param path the servicer's path. */ public void setHttpWebRequestServicerPath(String path) { mServicerPath = path; } /** * Gets the http web request servicer's path. This is the path http clients * must hit to get a response from this servicer. * * @return the servicer's path. */ public String getHttpWebRequestServicerPath() { return mServicerPath; } /** * Sets the path permissions for a path. * * @param path the path, relative to the servicer path, to set the * permissions for. * @param permissions the permissions to set (i.e.: "rw" for read-write). * @param recursive true if subpaths should receive the same permissions, * false if not. */ public void setPathPermissions( String path, String permissions, boolean recursive) { // add a "c" for recursive, if appropriate if(recursive) { permissions += "c"; } // convert all "\\" to "/" path = path.replaceAll("\\\\", "/"); // make sure path doesn't begin with a slash if(path.startsWith("/")) { if(path.length() > 1) { path = path.substring(1); } else { path = ""; } } // make sure the path ends with a slash if not blank if(!path.equals("") && !path.endsWith("/")) { path = path + "/"; } mPathToPermissions.put(path, permissions); } /** * Gets the permissions for the passed path. * * @param path the path to get permissions for. * * @return the permissions for the passed path. */ public String getPathPermissions(String path) { String permissions = ""; // makes sure the path is relative to the servicer path path = getServicerRelativePath(path); getLogger().debug(getClass(), "get permissions for servicer relative path:\n'" + path + "'"); // see if the exact path is in the map String value = (String)mPathToPermissions.get(path); if(value != null) { // found permissions permissions = value; getLogger().debug(getClass(), "permissions found:\n'" + permissions + "'"); } else { getLogger().debug(getClass(), "checking for recursive path permissions..."); // get the path components of the path String[] pathComps = path.split("/"); Iterator i = mPathToPermissions.keySet().iterator(); while(i.hasNext()) { String nextPath = (String)i.next(); boolean recursive = hasRecursivePermissions(nextPath); String gluedComps = ""; for(int n = 0; n < pathComps.length && gluedComps.length() < nextPath.length(); n++) { // if the glued components equal nextPath and the path // is recursive, then get permissions if(gluedComps.equals(nextPath) && recursive) { permissions = (String)mPathToPermissions.get(nextPath); getLogger().debug(getClass(), "permissions found:\n'" + permissions + "'"); break; } // glue another component on gluedComps += pathComps[n] + "/"; } } } return permissions; } /** * Returns true if the passed path is readable, false if not. * * @param path the path to inspect. * * @return true if the path is readable, false if not. */ public boolean isReadable(String path) { boolean rval = false; String permissions = getPathPermissions(path); if(permissions != null) { rval = (permissions.indexOf("r") != -1); } return rval; } /** * Returns true if the passed path is writable, false if not. * * @param path the path to inspect. * * @return true if the path is writable, false if not. */ public boolean isWritable(String path) { boolean rval = false; String permissions = getPathPermissions(path); if(permissions != null) { rval = (permissions.indexOf("r") != -1); } return rval; } /** * Returns true if the passed path has recursive permissions, false if * not. * * @param path the path to inspect. * * @return true if path permissions are recursive, false if not. */ public boolean hasRecursivePermissions(String path) { boolean rval = false; // convert all "\\" to "/" path = path.replaceAll("\\\\", "/"); String permissions = (String)mPathToPermissions.get(path); rval = (permissions.indexOf("c") != -1); return rval; } /** * Gets the logger. * * @return the logger. */ public Logger getLogger() { return LoggerManager.getLogger("dbnet"); } }
net/java/src/com/db/net/AbstractHttpWebRequestServicer.java
/* * Copyright (c) 2006 Digital Bazaar, Inc. All rights reserved. */ package com.db.net; import com.db.logging.Logger; import com.db.logging.LoggerManager; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.util.HashMap; import java.util.Iterator; /** * An abstract http web request servicer. Contains convenience methods for * setting relative path permissions, sending and receiving files, and * storing the servicer's base path. * * @author Dave Longley */ public abstract class AbstractHttpWebRequestServicer implements HttpWebRequestServicer { /** * The servicer's path. This is the path http clients must hit to * get a response from this servicer. */ protected String mServicerPath; /** * A map of relative paths to their permissions. */ protected HashMap mPathToPermissions; /** * Creates an abstract http servicer. */ public AbstractHttpWebRequestServicer() { setHttpWebRequestServicerPath("/"); mPathToPermissions = new HashMap(); } /** * Reads a file from an http web request and saves it to disk. * * @param request the http web request to read the file from. * @param filename the name for the file. * * @return true if successfully read, false if not. */ protected boolean readFile(HttpWebRequest request, String filename) { return readFile(request, new File(filename)); } /** * Reads a file from an http web request and saves it to disk. * * @param request the http web request to read the file from. * @param file the file to write to. * * @return true if successfully read, false if not. */ protected boolean readFile(HttpWebRequest request, File file) { boolean rval = false; try { FileOutputStream fos = new FileOutputStream(file); // read the body into a file request.receiveBody(fos); // close the file fos.close(); rval = true; } catch(Throwable t) { getLogger().error(getClass(), "could not read file from http web request!"); getLogger().debug(getClass(), Logger.getStackTrace(t)); } return rval; } /** * Reads a file from an http web request body part body and saves it to disk. * * @param request the http web request to read the file from. * @param header the http body part header. * @param filename the name for the file. * * @return true if successfully read, false if not. */ protected boolean readFileFromBodyPartBody( HttpWebRequest request, HttpBodyPartHeader header, String filename) { return readFileFromBodyPartBody(request, header, new File(filename)); } /** * Reads a file from an http web request body part body and saves it to disk. * * @param request the http web request to read the file from. * @param header the http body part header. * @param file the file to write to. * * @return true if successfully read, false if not. */ protected boolean readFileFromBodyPartBody( HttpWebRequest request, HttpBodyPartHeader header, File file) { boolean rval = false; try { FileOutputStream fos = new FileOutputStream(file); // read the body into a file request.receiveBodyPartBody(fos, null); // close the file fos.close(); rval = true; } catch(Throwable t) { getLogger().error(getClass(), "could not read file from http web request body part body!"); getLogger().debug(getClass(), Logger.getStackTrace(t)); } return rval; } /** * Reads from a file and writes it with the passed http web response. * * @param response the http web response to write to. * @param filename the name of the file to read from. * * @return true if successfully sent, false if not. */ protected boolean sendFile(HttpWebResponse response, String filename) { return sendFile(response, new File(filename)); } /** * Reads from a file and writes it with the passed http web response. * * @param response the http web response to write to. * @param file the file to read from. * * @return true if successfully sent, false if not. */ protected boolean sendFile(HttpWebResponse response, File file) { boolean rval = false; try { FileInputStream fis = new FileInputStream(file); // send the file data via the response response.sendBody(fis); // close the file fis.close(); rval = true; } catch(Throwable t) { getLogger().error(getClass(), "could not send file in http web response!"); getLogger().debug(getClass(), Logger.getStackTrace(t)); } return rval; } /** * Reads from a file and writes it with the passed http web response in * a body part body. * * @param response the http web response to write to. * @param bodyPartHeader the http body part header to use. * @param filename the name of the file to read from. * @param lastBodyPart true if the file is the last body part, false if not. * * @return true if successfully sent, false if not. */ protected boolean sendFileInBodyPartBody( HttpWebResponse response, HttpBodyPartHeader bodyPartHeader, String filename, boolean lastBodyPart) { return sendFileInBodyPartBody(response, bodyPartHeader, new File(filename), lastBodyPart); } /** * Reads from a file and writes it with the passed http web response. * * @param response the http web response to write to. * @param bodyPartHeader the http body part header to use. * @param file the file to read from. * @param lastBodyPart true if the file is the last body part, false if not. * * @return true if successfully sent, false if not. */ protected boolean sendFileInBodyPartBody( HttpWebResponse response, HttpBodyPartHeader bodyPartHeader, File file, boolean lastBodyPart) { boolean rval = false; try { // send body part header if(response.sendBodyPartHeader(bodyPartHeader)) { FileInputStream fis = new FileInputStream(file); // send the file data in a body part body response.sendBodyPartBody(fis, bodyPartHeader, lastBodyPart); // close the file fis.close(); rval = true; } } catch(Throwable t) { getLogger().error(getClass(), "could not send file in http web response body part body!"); getLogger().debug(getClass(), Logger.getStackTrace(t)); } return rval; } /** * Gets a path that's relative to this servicer's path from * a request path. * * @param path the request path. * * @return the path relative to this servicer. */ protected String getServicerRelativePath(String path) { // convert all "\\" to "/" path = path.replaceAll("\\\\", "/"); getLogger().debug(getClass(), "http web request servicer path is:\n'" + getHttpWebRequestServicerPath() + "'"); // strip off servicer path int index = path.indexOf(getHttpWebRequestServicerPath()); if(index == 0) { int length = index + getHttpWebRequestServicerPath().length(); if(path.length() > length) { path = path.substring(length); } else { path = ""; } } // get path up to last slash index = path.lastIndexOf("/"); if(index != -1) { path = path.substring(0, index + 1); } else { path = ""; } return path; } /** * Determines acceptable http versions for an http web request. * * @param request the http web request to check. * * @return true if the passed request has an acceptable version * false if not. */ protected boolean versionSupported(HttpWebRequest request) { boolean rval = false; // supports only http/1.0, http/1.1 by default if(request.getHeader().getVersion().equals("HTTP/1.0") || request.getHeader().getVersion().equals("HTTP/1.1")) { rval = true; } return rval; } /** * Services an http web request (via whatever means is appropriate) and * responds using the passed HttpWebResponse object. * * @param request the http web request. * @param response the http web response. */ public abstract void serviceHttpWebRequest( HttpWebRequest request, HttpWebResponse response); /** * Allows an http web request path to be set for this servicer. This is the * base path http clients must request for this servicer to be given the * http request to service. A servicer may need to know this information. * * @param path the servicer's path. */ public void setHttpWebRequestServicerPath(String path) { mServicerPath = path; } /** * Gets the http web request servicer's path. This is the path http clients * must hit to get a response from this servicer. * * @return the servicer's path. */ public String getHttpWebRequestServicerPath() { return mServicerPath; } /** * Sets the path permissions for a path. * * @param path the path, relative to the servicer path, to set the * permissions for. * @param permissions the permissions to set (i.e.: "rw" for read-write). * @param recursive true if subpaths should receive the same permissions, * false if not. */ public void setPathPermissions(String path, String permissions, boolean recursive) { // add a "c" for recursive, if appropriate if(recursive) { permissions += "c"; } // convert all "\\" to "/" path = path.replaceAll("\\\\", "/"); // make sure path doesn't begin with a slash if(path.startsWith("/")) { if(path.length() > 1) { path = path.substring(1); } else { path = ""; } } // make sure the path ends with a slash if not blank if(!path.equals("") && !path.endsWith("/")) { path = path + "/"; } mPathToPermissions.put(path, permissions); } /** * Gets the permissions for the passed path. * * @param path the path to get permissions for. * * @return the permissions for the passed path. */ public String getPathPermissions(String path) { String permissions = ""; // makes sure the path is relative to the servicer path path = getServicerRelativePath(path); getLogger().debug(getClass(), "get permissions for servicer relative path:\n'" + path + "'"); // see if the exact path is in the map String value = (String)mPathToPermissions.get(path); if(value != null) { // found permissions permissions = value; getLogger().debug(getClass(), "permissions found:\n'" + permissions + "'"); } else { getLogger().debug(getClass(), "checking for recursive path permissions..."); // get the path components of the path String[] pathComps = path.split("/"); Iterator i = mPathToPermissions.keySet().iterator(); while(i.hasNext()) { String nextPath = (String)i.next(); boolean recursive = hasRecursivePermissions(nextPath); String gluedComps = ""; for(int n = 0; n < pathComps.length && gluedComps.length() < nextPath.length(); n++) { // if the glued components equal nextPath and the path // is recursive, then get permissions if(gluedComps.equals(nextPath) && recursive) { permissions = (String)mPathToPermissions.get(nextPath); getLogger().debug(getClass(), "permissions found:\n'" + permissions + "'"); break; } // glue another component on gluedComps += pathComps[n] + "/"; } } } return permissions; } /** * Returns true if the passed path is readable, false if not. * * @param path the path to inspect. * * @return true if the path is readable, false if not. */ public boolean isReadable(String path) { boolean rval = false; String permissions = getPathPermissions(path); if(permissions != null) { rval = (permissions.indexOf("r") != -1); } return rval; } /** * Returns true if the passed path is writable, false if not. * * @param path the path to inspect. * * @return true if the path is writable, false if not. */ public boolean isWritable(String path) { boolean rval = false; String permissions = getPathPermissions(path); if(permissions != null) { rval = (permissions.indexOf("r") != -1); } return rval; } /** * Returns true if the passed path has recursive permissions, false if * not. * * @param path the path to inspect. * * @return true if path permissions are recursive, false if not. */ public boolean hasRecursivePermissions(String path) { boolean rval = false; // convert all "\\" to "/" path = path.replaceAll("\\\\", "/"); String permissions = (String)mPathToPermissions.get(path); rval = (permissions.indexOf("c") != -1); return rval; } /** * Gets the logger. * * @return the logger. */ public Logger getLogger() { return LoggerManager.getLogger("dbnet"); } }
Minor formatting change.
net/java/src/com/db/net/AbstractHttpWebRequestServicer.java
Minor formatting change.
Java
agpl-3.0
bc754992f5152c6692552a02298178af325b96de
0
torakiki/sejda
/* * Created on Jul 4, 2011 * Copyright 2011 by Eduard Weissmann (edi.weissmann@gmail.com). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sejda.cli; import java.util.HashMap; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.sejda.core.Sejda; import org.sejda.core.exception.SejdaRuntimeException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import uk.co.flamingpenguin.jewel.cli.ArgumentValidationException; import uk.co.flamingpenguin.jewel.cli.ArgumentValidationException.ValidationError; import uk.co.flamingpenguin.jewel.cli.Cli; import uk.co.flamingpenguin.jewel.cli.CliFactory; /** * Sejda command line service. Responsible for interpreting the arguments, displaying help requests or delegating execution of commands * * @author Eduard Weissmann * */ public class SejdaConsole { public static final String EXECUTABLE_NAME = "sejda-console"; private static final Logger LOG = LoggerFactory.getLogger(SejdaConsole.class); private final Cli<GeneralCliArguments> generalCli = CliFactory.createCli(GeneralCliArguments.class); private final RawArguments arguments; private final TaskExecutionAdapter taskExecutionAdapter; private GeneralCliArguments generalCliArguments; public SejdaConsole(String[] rawArguments, TaskExecutionAdapter taskExecutionAdapter) { this.arguments = new RawArguments(rawArguments.clone()); this.taskExecutionAdapter = taskExecutionAdapter; } /** * Interprets and executes the console command */ public void execute() { try { doExecute(); } catch (ArgumentValidationException e) { LOG.info(e.getMessage()); } catch (SejdaRuntimeException e) { LOG.error(e.getMessage(), e); } } private void doExecute() throws ArgumentValidationException { LOG.debug("Starting execution with arguments: " + arguments); parseGeneralCliArguments(); if (isNoCommandSpecified()) { if (isVersionRequest() || isLicenseRequest()) { printVersionAndLicense(); } else { printGeneralHelp(); } } else { CliCommand command = getCommandSpecified(); if (isCommandHelpRequested()) { printCommandHelp(command); } else { validateNoDuplicateCommandArguments(); executeCommand(command); } } LOG.debug("Completed execution"); } /** * throws an exception if there are duplicate option:value pairs specified, that would override each other silently otherwise * * @throws ArgumentValidationException */ private void validateNoDuplicateCommandArguments() throws ArgumentValidationException { Map<String, Object> uniqueArguments = new HashMap<String, Object>(); for (final String eachArgument : arguments.getCommandArguments()) { if (uniqueArguments.containsKey(eachArgument) && StringUtils.startsWith(eachArgument, "-")) { throw new ArgumentValidationException(new ValidationError() { public String getMessage() { return "Option '" + eachArgument + "' is specified twice. Please note that the correct way to specify a list of values for an option is to repeat the values after the option, without re-stating the option name. Example: --files /tmp/file1.pdf /tmp/files2.pdf"; } public ErrorType getErrorType() { return ErrorType.AdditionalValue; } @Override public String toString() { return getMessage(); } }); } uniqueArguments.put(eachArgument, eachArgument); } } private void executeCommand(CliCommand command) throws ArgumentValidationException { getTaskExecutionAdapter().execute(command.parseTaskParameters(arguments.getCommandArguments())); } private void parseGeneralCliArguments() throws ArgumentValidationException { generalCliArguments = generalCli.parseArguments(arguments.getGeneralArguments()); } private void printCommandHelp(CliCommand command) { LOG.info(command.getHelpMessage()); } private boolean isCommandHelpRequested() { return generalCliArguments.isHelp(); } private CliCommand getCommandSpecified() { return generalCliArguments.getCommand().getCommand(); } private void printGeneralHelp() { LOG.info(new GeneralHelpFormatter().getFormattedString()); } private void printVersionAndLicense() { StringBuilder info = new StringBuilder(String.format("\nSejda Console (Version %s)\n", Sejda.VERSION)); info.append("(see http://www.sejda.org for more information)\n\n"); info.append("Copyright 2011 by Andrea Vacondio, Eduard Weissmann and others.\n" + "\n" + "Licensed under the Apache License, Version 2.0 (the \"License\");\n" + "you may not use this file except in compliance with the License.\n" + "You may obtain a copy of the License at \n" + "\n" + "http://www.apache.org/licenses/LICENSE-2.0\n" + "\n" + "Unless required by applicable law or agreed to in writing, software\n" + "distributed under the License is distributed on an \"AS IS\" BASIS,\n" + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + "See the License for the specific language governing permissions and \n" + "limitations under the License. "); LOG.info(info.toString()); } private boolean isNoCommandSpecified() { return !generalCliArguments.isCommand(); } private boolean isVersionRequest() { return generalCliArguments.isVersion(); } private boolean isLicenseRequest() { return generalCliArguments.isLicense(); } TaskExecutionAdapter getTaskExecutionAdapter() { return taskExecutionAdapter; } }
sejda-console/src/main/java/org/sejda/cli/SejdaConsole.java
/* * Created on Jul 4, 2011 * Copyright 2011 by Eduard Weissmann (edi.weissmann@gmail.com). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sejda.cli; import java.util.HashMap; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.sejda.core.Sejda; import org.sejda.core.exception.SejdaRuntimeException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import uk.co.flamingpenguin.jewel.cli.ArgumentValidationException; import uk.co.flamingpenguin.jewel.cli.Cli; import uk.co.flamingpenguin.jewel.cli.CliFactory; /** * Sejda command line service. Responsible for interpreting the arguments, displaying help requests or delegating execution of commands * * @author Eduard Weissmann * */ public class SejdaConsole { public static final String EXECUTABLE_NAME = "sejda-console"; private static final Logger LOG = LoggerFactory.getLogger(SejdaConsole.class); private final Cli<GeneralCliArguments> generalCli = CliFactory.createCli(GeneralCliArguments.class); private final RawArguments arguments; private final TaskExecutionAdapter taskExecutionAdapter; private GeneralCliArguments generalCliArguments; public SejdaConsole(String[] rawArguments, TaskExecutionAdapter taskExecutionAdapter) { this.arguments = new RawArguments(rawArguments.clone()); this.taskExecutionAdapter = taskExecutionAdapter; } /** * Interprets and executes the console command */ public void execute() { try { doExecute(); } catch (ArgumentValidationException e) { LOG.info(e.getMessage()); } catch (SejdaRuntimeException e) { LOG.error(e.getMessage(), e); } } private void doExecute() throws ArgumentValidationException { LOG.debug("Starting execution with arguments: " + arguments); parseGeneralCliArguments(); if (isNoCommandSpecified()) { if (isVersionRequest() || isLicenseRequest()) { printVersionAndLicense(); } else { printGeneralHelp(); } } else { CliCommand command = getCommandSpecified(); if (isCommandHelpRequested()) { printCommandHelp(command); } else { validateNoDuplicateCommandArguments(); executeCommand(command); } } LOG.debug("Completed execution"); } /** * throws an exception if there are duplicate option:value pairs specified, that would override each other silently otherwise */ private void validateNoDuplicateCommandArguments() { Map<String, Object> uniqueArguments = new HashMap<String, Object>(); for (String eachArgument : arguments.getCommandArguments()) { if (uniqueArguments.containsKey(eachArgument) && StringUtils.startsWith(eachArgument, "-")) { throw new SejdaRuntimeException( "Option '" + eachArgument + "' is specified twice. Please note that the correct way to specify a list of values for an option is to repeat the values after the option, without re-stating the option name. Example: --files /tmp/file1.pdf /tmp/files2.pdf"); } uniqueArguments.put(eachArgument, eachArgument); } } private void executeCommand(CliCommand command) throws ArgumentValidationException { getTaskExecutionAdapter().execute(command.parseTaskParameters(arguments.getCommandArguments())); } private void parseGeneralCliArguments() throws ArgumentValidationException { generalCliArguments = generalCli.parseArguments(arguments.getGeneralArguments()); } private void printCommandHelp(CliCommand command) { LOG.info(command.getHelpMessage()); } private boolean isCommandHelpRequested() { return generalCliArguments.isHelp(); } private CliCommand getCommandSpecified() { return generalCliArguments.getCommand().getCommand(); } private void printGeneralHelp() { LOG.info(new GeneralHelpFormatter().getFormattedString()); } private void printVersionAndLicense() { StringBuilder info = new StringBuilder(String.format("\nSejda Console (Version %s)\n", Sejda.VERSION)); info.append("(see http://www.sejda.org for more information)\n\n"); info.append("Copyright 2011 by Andrea Vacondio, Eduard Weissmann and others.\n" + "\n" + "Licensed under the Apache License, Version 2.0 (the \"License\");\n" + "you may not use this file except in compliance with the License.\n" + "You may obtain a copy of the License at \n" + "\n" + "http://www.apache.org/licenses/LICENSE-2.0\n" + "\n" + "Unless required by applicable law or agreed to in writing, software\n" + "distributed under the License is distributed on an \"AS IS\" BASIS,\n" + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + "See the License for the specific language governing permissions and \n" + "limitations under the License. "); LOG.info(info.toString()); } private boolean isNoCommandSpecified() { return !generalCliArguments.isCommand(); } private boolean isVersionRequest() { return generalCliArguments.isVersion(); } private boolean isLicenseRequest() { return generalCliArguments.isLicense(); } TaskExecutionAdapter getTaskExecutionAdapter() { return taskExecutionAdapter; } }
re #37: changed exception thrown when duplicate options are found to be an ArgumentValidationException instead of SejdaRuntimeException
sejda-console/src/main/java/org/sejda/cli/SejdaConsole.java
re #37: changed exception thrown when duplicate options are found to be an ArgumentValidationException instead of SejdaRuntimeException
Java
agpl-3.0
605a93787a389bb1e9f34b5f552954b1619a65b4
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
cd2500de-2e5f-11e5-9284-b827eb9e62be
hello.java
cd1f92e8-2e5f-11e5-9284-b827eb9e62be
cd2500de-2e5f-11e5-9284-b827eb9e62be
hello.java
cd2500de-2e5f-11e5-9284-b827eb9e62be
Java
agpl-3.0
421f6e3bd7edda7e8f378cd08a46cff6b21ebcb4
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
d3e14f58-2e60-11e5-9284-b827eb9e62be
hello.java
d3dbd7bc-2e60-11e5-9284-b827eb9e62be
d3e14f58-2e60-11e5-9284-b827eb9e62be
hello.java
d3e14f58-2e60-11e5-9284-b827eb9e62be
Java
apache-2.0
0d6eba4d5a5cf2e5e4a9fafa5d88d2fe59260b39
0
jerome79/OG-Platform,McLeodMoores/starling,jeorme/OG-Platform,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,nssales/OG-Platform,nssales/OG-Platform,ChinaQuants/OG-Platform,nssales/OG-Platform,codeaudit/OG-Platform,nssales/OG-Platform,jerome79/OG-Platform,ChinaQuants/OG-Platform,jerome79/OG-Platform,DevStreet/FinanceAnalytics,DevStreet/FinanceAnalytics,McLeodMoores/starling,codeaudit/OG-Platform,McLeodMoores/starling,codeaudit/OG-Platform,jeorme/OG-Platform,codeaudit/OG-Platform,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,jeorme/OG-Platform,jeorme/OG-Platform,McLeodMoores/starling,jerome79/OG-Platform
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.web.server; import it.unimi.dsi.fastutil.longs.LongArraySet; import it.unimi.dsi.fastutil.longs.LongSet; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicReference; import org.cometd.Client; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.opengamma.engine.ComputationTargetSpecification; import com.opengamma.engine.ComputationTargetType; import com.opengamma.engine.value.ComputedValue; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueSpecification; import com.opengamma.engine.view.ViewCalculationConfiguration; import com.opengamma.engine.view.ViewDefinition; import com.opengamma.engine.view.ViewTargetResultModel; import com.opengamma.engine.view.compilation.CompiledViewDefinition; import com.opengamma.id.UniqueIdentifier; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.tuple.Pair; import com.opengamma.web.server.conversion.ConversionMode; import com.opengamma.web.server.conversion.ResultConverter; import com.opengamma.web.server.conversion.ResultConverterCache; /** * Stores state relating to an individual grid in a web client instance. */ public abstract class WebViewGrid { private static final Logger s_logger = LoggerFactory.getLogger(WebViewGrid.class); private static final String GRID_STRUCTURE_ROOT_CHANNEL = "/gridStructure"; private static final String UPDATES_ROOT_CHANNEL = "/updates"; private static final int HISTORY_SIZE = 20; private final String _name; private final String _updateChannel; private final String _columnStructureChannel; private final WebViewGridStructure _gridStructure; private final ResultConverterCache _resultConverterCache; private final Client _local; private final Client _remote; private final String _nullCellValue; // Row-based state private final AtomicReference<SortedMap<Long, Long>> _viewportMap = new AtomicReference<SortedMap<Long, Long>>(); // Column-based state: few entries expected so using an array set private final LongSet _historyOutputs = new LongArraySet(); // Cell-based state private final Set<WebGridCell> _fullConversionModeCells = new HashSet<WebGridCell>(); private final Map<WebGridCell, SortedMap<Long, Object>> _cellValueHistory = new HashMap<WebGridCell, SortedMap<Long, Object>>(); protected WebViewGrid(String name, CompiledViewDefinition compiledViewDefinition, List<UniqueIdentifier> targets, EnumSet<ComputationTargetType> targetTypes, ResultConverterCache resultConverterCache, Client local, Client remote, String nullCellValue) { ArgumentChecker.notNull(name, "name"); ArgumentChecker.notNull(compiledViewDefinition, "compiledViewDefinition"); ArgumentChecker.notNull(targetTypes, "targetTypes"); ArgumentChecker.notNull(resultConverterCache, "resultConverterCache"); ArgumentChecker.notNull(local, "local"); ArgumentChecker.notNull(remote, "remote"); _name = name; _updateChannel = UPDATES_ROOT_CHANNEL + "/" + name; _columnStructureChannel = GRID_STRUCTURE_ROOT_CHANNEL + "/" + name + "/columns"; List<WebViewGridColumnKey> requirements = getRequirements(compiledViewDefinition.getViewDefinition(), targetTypes); _gridStructure = new WebViewGridStructure(compiledViewDefinition, targetTypes, requirements, targets); _resultConverterCache = resultConverterCache; _local = local; _remote = remote; _nullCellValue = nullCellValue; } public String getName() { return _name; } //------------------------------------------------------------------------- public void processTargetResult(ComputationTargetSpecification target, ViewTargetResultModel resultModel, Long resultTimestamp) { Long rowId = getGridStructure().getRowId(target.getUniqueId()); if (rowId == null) { // Result not in the grid return; } boolean rowInViewport = getViewport().containsKey(rowId); Long lastHistoryTime = getViewport().get(rowId); Map<String, Object> valuesToSend = null; if (rowInViewport) { valuesToSend = new HashMap<String, Object>(); valuesToSend.put("rowId", rowId); } for (String configName : resultModel.getCalculationConfigurationNames()) { for (ComputedValue value : resultModel.getAllValues(configName)) { ValueSpecification specification = value.getSpecification(); WebViewGridColumn column = getGridStructure().getColumn(configName, specification); if (column == null) { s_logger.warn("Could not find column for calculation configuration {} with value specification {}", configName, specification); continue; } long colId = column.getId(); // s_logger.debug("{} {} = {} {}", new Object[] {target.getUniqueId(), columnName, value.getValue().getValue(), value.getValue().getSpecification().getProperties()}); WebGridCell cell = WebGridCell.of(rowId, colId); ConversionMode mode = getConversionMode(cell); Object originalValue = value.getValue(); ResultConverter<Object> converter = originalValue != null ? getConverter(column, value.getSpecification().getValueName(), originalValue.getClass()) : null; Object displayValue; if (originalValue != null) { try { displayValue = converter.convertForDisplay(_resultConverterCache, value.getSpecification(), originalValue, mode); } catch (Exception e) { s_logger.error("Exception when converting: ", e); displayValue = "Conversion Error"; } } else { displayValue = null; } boolean isHistoryOutput = isHistoryOutput(colId); if (isHistoryOutput) { Object historyValue; if (originalValue != null) { historyValue = converter.convertForHistory(_resultConverterCache, value.getSpecification(), originalValue); } else { historyValue = null; } addCellHistory(cell, resultTimestamp, historyValue); } Object cellValue; if (rowInViewport) { // Client requires this row if (isHistoryOutput) { Map<String, Object> cellData = new HashMap<String, Object>(); cellData.put("display", displayValue); SortedMap<Long, Object> history = getCellHistory(cell, lastHistoryTime); if (history != null) { cellData.put("history", history.values()); } cellValue = cellData; } else { cellValue = displayValue; } if (cellValue != null) { valuesToSend.put(Long.toString(colId), cellValue); } } } } if (rowInViewport) { _remote.deliver(_local, _updateChannel, valuesToSend, null); } } @SuppressWarnings("unchecked") private ResultConverter<Object> getConverter(WebViewGridColumn column, String valueName, Class<?> valueType) { // Ensure the converter is cached against the value name before sending the column details ResultConverter<Object> converter = (ResultConverter<Object>) _resultConverterCache.getAndCacheConverter(valueName, valueType); if (!column.isTypeKnown()) { sendColumnDetails(Collections.singleton(column)); } return converter; } public ConversionMode getConversionMode(WebGridCell cell) { return _fullConversionModeCells.contains(cell) ? ConversionMode.FULL : ConversionMode.SUMMARY; } public void setConversionMode(WebGridCell cell, ConversionMode mode) { if (mode == ConversionMode.SUMMARY) { _fullConversionModeCells.remove(cell); } else { _fullConversionModeCells.add(cell); } } //------------------------------------------------------------------------- public Object getJsonGridStructure() { Map<String, Object> gridStructure = new HashMap<String, Object>(); gridStructure.put("name", getName()); gridStructure.put("rows", getJsonRowStructures()); gridStructure.put("columns", getJsonColumnStructures(getGridStructure().getColumns())); return gridStructure; } private void sendColumnDetails(Collection<WebViewGridColumn> columnDetails) { _remote.deliver(_local, _columnStructureChannel, getJsonColumnStructures(columnDetails), null); } private Map<String, Object> getJsonColumnStructures(Collection<WebViewGridColumn> columns) { Map<String, Object> columnStructures = new HashMap<String, Object>(); for (WebViewGridColumn columnDetails : columns) { columnStructures.put(Long.toString(columnDetails.getId()), getJsonColumnStructure(columnDetails)); } return columnStructures; } private Map<String, Object> getJsonColumnStructure(WebViewGridColumn column) { Map<String, Object> detailsToSend = new HashMap<String, Object>(); long colId = column.getId(); detailsToSend.put("colId", colId); detailsToSend.put("header", column.getHeader()); detailsToSend.put("description", column.getDescription()); detailsToSend.put("nullValue", _nullCellValue); String resultType = _resultConverterCache.getKnownResultTypeName(column.getValueName()); if (resultType != null) { column.setTypeKnown(true); detailsToSend.put("dataType", resultType); // Hack - the client should decide which columns it requires history for, taking into account the capabilities of // the renderer. if (resultType.equals("PRIMITIVE")) { addHistoryOutput(column.getId()); } } return detailsToSend; } private List<Object> getJsonRowStructures() { List<Object> rowStructures = new ArrayList<Object>(); for (Map.Entry<UniqueIdentifier, Long> targetEntry : getGridStructure().getTargets().entrySet()) { Map<String, Object> rowDetails = new HashMap<String, Object>(); UniqueIdentifier target = targetEntry.getKey(); long rowId = targetEntry.getValue(); rowDetails.put("rowId", rowId); addRowDetails(target, rowId, rowDetails); rowStructures.add(rowDetails); } return rowStructures; } protected abstract void addRowDetails(UniqueIdentifier target, long rowId, Map<String, Object> details); //------------------------------------------------------------------------- public SortedMap<Long, Long> getViewport() { return _viewportMap.get(); } public void setViewport(SortedMap<Long, Long> viewportMap) { _viewportMap.set(viewportMap); } protected WebViewGridStructure getGridStructure() { return _gridStructure; } //------------------------------------------------------------------------- private void addHistoryOutput(long colId) { _historyOutputs.add(colId); } private boolean isHistoryOutput(long colId) { return _historyOutputs.contains(colId); } private void addCellHistory(WebGridCell cell, Long timestamp, Object value) { SortedMap<Long, Object> history = _cellValueHistory.get(cell); if (history == null) { history = new TreeMap<Long, Object>(); _cellValueHistory.put(cell, history); } if (history.size() > HISTORY_SIZE) { history.remove(history.entrySet().iterator().next().getKey()); } history.put(timestamp, value); } private SortedMap<Long, Object> getCellHistory(WebGridCell cell, Long lastTimestamp) { SortedMap<Long, Object> history = _cellValueHistory.get(cell); if (history == null) { return null; } if (lastTimestamp == null) { return history; } return history.tailMap(lastTimestamp + 1); } //------------------------------------------------------------------------- private static List<WebViewGridColumnKey> getRequirements(ViewDefinition viewDefinition, EnumSet<ComputationTargetType> targetTypes) { List<WebViewGridColumnKey> result = new ArrayList<WebViewGridColumnKey>(); for (ViewCalculationConfiguration calcConfig : viewDefinition.getAllCalculationConfigurations()) { String calcConfigName = calcConfig.getName(); if (targetTypes.contains(ComputationTargetType.POSITION) || targetTypes.contains(ComputationTargetType.PORTFOLIO_NODE)) { for (Pair<String, ValueProperties> portfolioOutput : calcConfig.getAllPortfolioRequirements()) { String valueName = portfolioOutput.getFirst(); ValueProperties constraints = portfolioOutput.getSecond(); WebViewGridColumnKey columnKey = new WebViewGridColumnKey(calcConfigName, valueName, constraints); result.add(columnKey); } } for (ValueRequirement specificRequirement : calcConfig.getSpecificRequirements()) { if (!targetTypes.contains(specificRequirement.getTargetSpecification().getType())) { continue; } String valueName = specificRequirement.getValueName(); ValueProperties constraints = specificRequirement.getConstraints(); WebViewGridColumnKey columnKey = new WebViewGridColumnKey(calcConfigName, valueName, constraints); result.add(columnKey); } } return result; } }
projects/OG-Web/src/com/opengamma/web/server/WebViewGrid.java
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.web.server; import it.unimi.dsi.fastutil.longs.LongArraySet; import it.unimi.dsi.fastutil.longs.LongSet; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicReference; import org.cometd.Client; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.opengamma.engine.ComputationTargetSpecification; import com.opengamma.engine.ComputationTargetType; import com.opengamma.engine.value.ComputedValue; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueSpecification; import com.opengamma.engine.view.ViewCalculationConfiguration; import com.opengamma.engine.view.ViewDefinition; import com.opengamma.engine.view.ViewTargetResultModel; import com.opengamma.engine.view.compilation.CompiledViewDefinition; import com.opengamma.id.UniqueIdentifier; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.tuple.Pair; import com.opengamma.web.server.conversion.ConversionMode; import com.opengamma.web.server.conversion.ResultConverter; import com.opengamma.web.server.conversion.ResultConverterCache; /** * Stores state relating to an individual grid in a web client instance. */ public abstract class WebViewGrid { private static final Logger s_logger = LoggerFactory.getLogger(WebViewGrid.class); private static final String GRID_STRUCTURE_ROOT_CHANNEL = "/gridStructure"; private static final String UPDATES_ROOT_CHANNEL = "/updates"; private static final int HISTORY_SIZE = 20; private final String _name; private final String _updateChannel; private final String _columnStructureChannel; private final WebViewGridStructure _gridStructure; private final ResultConverterCache _resultConverterCache; private final Client _local; private final Client _remote; private final String _nullCellValue; // Row-based state private final AtomicReference<SortedMap<Long, Long>> _viewportMap = new AtomicReference<SortedMap<Long, Long>>(); // Column-based state: few entries expected so using an array set private final LongSet _historyOutputs = new LongArraySet(); // Cell-based state private final Set<WebGridCell> _fullConversionModeCells = new HashSet<WebGridCell>(); private final Map<WebGridCell, SortedMap<Long, Object>> _cellValueHistory = new HashMap<WebGridCell, SortedMap<Long, Object>>(); protected WebViewGrid(String name, CompiledViewDefinition compiledViewDefinition, List<UniqueIdentifier> targets, EnumSet<ComputationTargetType> targetTypes, ResultConverterCache resultConverterCache, Client local, Client remote, String nullCellValue) { ArgumentChecker.notNull(name, "name"); ArgumentChecker.notNull(compiledViewDefinition, "compiledViewDefinition"); ArgumentChecker.notNull(targetTypes, "targetTypes"); ArgumentChecker.notNull(resultConverterCache, "resultConverterCache"); ArgumentChecker.notNull(local, "local"); ArgumentChecker.notNull(remote, "remote"); _name = name; _updateChannel = UPDATES_ROOT_CHANNEL + "/" + name; _columnStructureChannel = GRID_STRUCTURE_ROOT_CHANNEL + "/" + name + "/columns"; List<WebViewGridColumnKey> requirements = getRequirements(compiledViewDefinition.getViewDefinition(), targetTypes); _gridStructure = new WebViewGridStructure(compiledViewDefinition, targetTypes, requirements, targets); _resultConverterCache = resultConverterCache; _local = local; _remote = remote; _nullCellValue = nullCellValue; } public String getName() { return _name; } //------------------------------------------------------------------------- public void processTargetResult(ComputationTargetSpecification target, ViewTargetResultModel resultModel, Long resultTimestamp) { Long rowId = getGridStructure().getRowId(target.getUniqueId()); if (rowId == null) { // Result not in the grid return; } boolean rowInViewport = getViewport().containsKey(rowId); Long lastHistoryTime = getViewport().get(rowId); Map<String, Object> valuesToSend = null; if (rowInViewport) { valuesToSend = new HashMap<String, Object>(); valuesToSend.put("rowId", rowId); } for (String configName : resultModel.getCalculationConfigurationNames()) { for (ComputedValue value : resultModel.getAllValues(configName)) { ValueSpecification specification = value.getSpecification(); WebViewGridColumn column = getGridStructure().getColumn(configName, specification); if (column == null) { s_logger.warn("Could not find column for calculation configuration {} with value specification {}", configName, specification); continue; } long colId = column.getId(); // s_logger.debug("{} {} = {} {}", new Object[] {target.getUniqueId(), columnName, value.getValue().getValue(), value.getValue().getSpecification().getProperties()}); WebGridCell cell = WebGridCell.of(rowId, colId); ConversionMode mode = getConversionMode(cell); Object originalValue = value.getValue(); ResultConverter<Object> converter = originalValue != null ? getConverter(column, value.getSpecification().getValueName(), originalValue.getClass()) : null; Object displayValue; if (originalValue != null) { try { displayValue = converter.convertForDisplay(_resultConverterCache, value.getSpecification(), originalValue, mode); } catch (Exception e) { s_logger.error("Exception when converting: ", e); displayValue = "Conversion Error"; } } else { displayValue = null; } boolean isHistoryOutput = isHistoryOutput(colId); if (isHistoryOutput) { Object historyValue; if (originalValue != null) { historyValue = converter.convertForHistory(_resultConverterCache, value.getSpecification(), originalValue); } else { historyValue = null; } addCellHistory(cell, resultTimestamp, historyValue); } Object cellValue; if (rowInViewport) { // Client requires this row if (isHistoryOutput) { Map<String, Object> cellData = new HashMap<String, Object>(); cellData.put("display", displayValue); SortedMap<Long, Object> history = getCellHistory(cell, lastHistoryTime); if (history != null) { cellData.put("history", history.values()); } cellValue = cellData; } else { cellValue = displayValue; } if (cellValue != null) { valuesToSend.put(Long.toString(colId), cellValue); } } } } if (rowInViewport) { _remote.deliver(_local, _updateChannel, valuesToSend, null); } } @SuppressWarnings("unchecked") private ResultConverter<Object> getConverter(WebViewGridColumn column, String valueName, Class<?> valueType) { if (!column.isTypeKnown()) { sendColumnDetails(Collections.singleton(column)); } return (ResultConverter<Object>) _resultConverterCache.getAndCacheConverter(valueName, valueType); } public ConversionMode getConversionMode(WebGridCell cell) { return _fullConversionModeCells.contains(cell) ? ConversionMode.FULL : ConversionMode.SUMMARY; } public void setConversionMode(WebGridCell cell, ConversionMode mode) { if (mode == ConversionMode.SUMMARY) { _fullConversionModeCells.remove(cell); } else { _fullConversionModeCells.add(cell); } } //------------------------------------------------------------------------- public Object getJsonGridStructure() { Map<String, Object> gridStructure = new HashMap<String, Object>(); gridStructure.put("name", getName()); gridStructure.put("rows", getJsonRowStructures()); gridStructure.put("columns", getJsonColumnStructures(getGridStructure().getColumns())); return gridStructure; } private void sendColumnDetails(Collection<WebViewGridColumn> columnDetails) { _remote.deliver(_local, _columnStructureChannel, getJsonColumnStructures(columnDetails), null); } private Map<String, Object> getJsonColumnStructures(Collection<WebViewGridColumn> columns) { Map<String, Object> columnStructures = new HashMap<String, Object>(); for (WebViewGridColumn columnDetails : columns) { columnStructures.put(Long.toString(columnDetails.getId()), getJsonColumnStructure(columnDetails)); } return columnStructures; } private Map<String, Object> getJsonColumnStructure(WebViewGridColumn column) { Map<String, Object> detailsToSend = new HashMap<String, Object>(); long colId = column.getId(); detailsToSend.put("colId", colId); detailsToSend.put("header", column.getHeader()); detailsToSend.put("description", column.getDescription()); detailsToSend.put("nullValue", _nullCellValue); String resultType = _resultConverterCache.getKnownResultTypeName(column.getValueName()); if (resultType != null) { column.setTypeKnown(true); detailsToSend.put("dataType", resultType); // Hack - the client should decide which columns it requires history for, taking into account the capabilities of // the renderer. if (resultType.equals("PRIMITIVE")) { addHistoryOutput(column.getId()); } } return detailsToSend; } private List<Object> getJsonRowStructures() { List<Object> rowStructures = new ArrayList<Object>(); for (Map.Entry<UniqueIdentifier, Long> targetEntry : getGridStructure().getTargets().entrySet()) { Map<String, Object> rowDetails = new HashMap<String, Object>(); UniqueIdentifier target = targetEntry.getKey(); long rowId = targetEntry.getValue(); rowDetails.put("rowId", rowId); addRowDetails(target, rowId, rowDetails); rowStructures.add(rowDetails); } return rowStructures; } protected abstract void addRowDetails(UniqueIdentifier target, long rowId, Map<String, Object> details); //------------------------------------------------------------------------- public SortedMap<Long, Long> getViewport() { return _viewportMap.get(); } public void setViewport(SortedMap<Long, Long> viewportMap) { _viewportMap.set(viewportMap); } protected WebViewGridStructure getGridStructure() { return _gridStructure; } //------------------------------------------------------------------------- private void addHistoryOutput(long colId) { _historyOutputs.add(colId); } private boolean isHistoryOutput(long colId) { return _historyOutputs.contains(colId); } private void addCellHistory(WebGridCell cell, Long timestamp, Object value) { SortedMap<Long, Object> history = _cellValueHistory.get(cell); if (history == null) { history = new TreeMap<Long, Object>(); _cellValueHistory.put(cell, history); } if (history.size() > HISTORY_SIZE) { history.remove(history.entrySet().iterator().next().getKey()); } history.put(timestamp, value); } private SortedMap<Long, Object> getCellHistory(WebGridCell cell, Long lastTimestamp) { SortedMap<Long, Object> history = _cellValueHistory.get(cell); if (history == null) { return null; } if (lastTimestamp == null) { return history; } return history.tailMap(lastTimestamp + 1); } //------------------------------------------------------------------------- private static List<WebViewGridColumnKey> getRequirements(ViewDefinition viewDefinition, EnumSet<ComputationTargetType> targetTypes) { List<WebViewGridColumnKey> result = new ArrayList<WebViewGridColumnKey>(); for (ViewCalculationConfiguration calcConfig : viewDefinition.getAllCalculationConfigurations()) { String calcConfigName = calcConfig.getName(); if (targetTypes.contains(ComputationTargetType.POSITION) || targetTypes.contains(ComputationTargetType.PORTFOLIO_NODE)) { for (Pair<String, ValueProperties> portfolioOutput : calcConfig.getAllPortfolioRequirements()) { String valueName = portfolioOutput.getFirst(); ValueProperties constraints = portfolioOutput.getSecond(); WebViewGridColumnKey columnKey = new WebViewGridColumnKey(calcConfigName, valueName, constraints); result.add(columnKey); } } for (ValueRequirement specificRequirement : calcConfig.getSpecificRequirements()) { if (!targetTypes.contains(specificRequirement.getTargetSpecification().getType())) { continue; } String valueName = specificRequirement.getValueName(); ValueProperties constraints = specificRequirement.getConstraints(); WebViewGridColumnKey columnKey = new WebViewGridColumnKey(calcConfigName, valueName, constraints); result.add(columnKey); } } return result; } }
[PLAT-1256] Ensuring type converter is correctly found when sending column metadata ahead of first result
projects/OG-Web/src/com/opengamma/web/server/WebViewGrid.java
[PLAT-1256] Ensuring type converter is correctly found when sending column metadata ahead of first result
Java
apache-2.0
e792d44aeabc0a6856675428b685c8a1bf633a0c
0
ricepanda/rice,ricepanda/rice,ricepanda/rice,ricepanda/rice
/* * Copyright 2007-2009 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kew.framework.document.security; import org.kuali.rice.kew.api.document.Document; import java.io.Serializable; /** * This is an attribute used to implement custom document security for document lookup and the route log. * SecurityAttributes are configured to be associated with the document type against which they should * be applied. For each route log or row that is returned from a document lookup, this authorization * methods will be executed. * * @author Kuali Rice Team (rice.collab@kuali.org) * */ public interface DocumentSecurityAttribute extends Serializable { /** * Determines whether or not a principal is authorized to see information about a given document. * * @param principalId the principalId for which to check authorization * @param document the document for which to check security * * @return true if the principal is authorized to view the document, false otherwise */ boolean isAuthorizedForDocument(String principalId, Document document); }
kew/framework/src/main/java/org/kuali/rice/kew/framework/document/security/DocumentSecurityAttribute.java
/* * Copyright 2007-2009 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kew.framework.document.security; import org.kuali.rice.kew.api.document.Document; import java.io.Serializable; /** * This is an attribute used to implement custom document security for document lookup and the route log. * SecurityAttributes are configured to be associated with the document type against which they should * be applied. For each route log or row that is returned from a document lookup, this authorization * methods will be executed. * * @author Kuali Rice Team (rice.collab@kuali.org) * */ public interface DocumentSecurityAttribute extends Serializable { /** * Determines whether or not a principal is authorized to see information about a given document. * * @param principalId the principalId for which to check authorization * @param document the document for which to check security * * @return true if the principal is authorized to view the workflow document, false otherwise */ boolean isAuthorizedForDocument(String principalId, Document document); }
KULRICE-5056 - small change to document security attribute git-svn-id: 2a5d2b5a02908a0c4ba7967b726d8c4198d1b9ed@22449 7a7aa7f6-c479-11dc-97e2-85a2497f191d
kew/framework/src/main/java/org/kuali/rice/kew/framework/document/security/DocumentSecurityAttribute.java
KULRICE-5056 - small change to document security attribute
Java
apache-2.0
f56b97a8e6ce05bef11c4d63dde80bdf9ba1701c
0
heatre/LeetCode
import java.util.ArrayList; import java.util.List; import com.sun.org.apache.xerces.internal.impl.dv.ValidatedInfo; /** * 2. Add Two Numbers * You are given two linked lists representing two non-negative numbers. * The digits are stored in reverse order and each of their nodes contain a single digit. * Add the two numbers and return it as a linked list. * Input: (2 -> 4 -> 3) + (5 -> 6 -> 4) * Output: 7 -> 0 -> 8 * * @author Swin * */ public class AddTwoNumbers2 { public class ListNode//内部类,定义链表数据结构 { int val; ListNode next; ListNode(int x) { val = x; } } public ListNode addTwoNumbers(ListNode l1,ListNode l2) { if(null == l1 || null == l2) return null; ListNode node = new ListNode(0); ListNode headNode = node; int sum =0; int carry = 0; int tmp = 0; while(l1 != null || l2 != null || carry != 0) { tmp = (l1 != null ? l1.val:0) + (l2 != null ? l2.val:0)+carry; sum = tmp%10; carry = tmp/10; node.next = new ListNode(sum); node = node.next; l1 = (l1 != null) ? l1.next : null; l2 = (l2 != null) ? l2.next : null; } return headNode.next; } public static void main(String[] args) { AddTwoNumbers2 adn = new AddTwoNumbers2(); ListNode l1 = adn.new ListNode(0); ListNode l2 = adn.new ListNode(0); ListNode node = l1; for(int i =0;i<5;i++) { node.next = adn.new ListNode(i); node = node.next; } node = l2; for(int i =3;i>0;i--) { node.next = adn.new ListNode(i); node = node.next; } node = adn.addTwoNumbers(l1.next, l2.next); while(node!= null) { System.out.println(node.val); node = node.next; } } }
src/AddTwoNumbers2.java
import com.sun.org.apache.xerces.internal.impl.dv.ValidatedInfo; /** * 2. Add Two Numbers * You are given two linked lists representing two non-negative numbers. * The digits are stored in reverse order and each of their nodes contain a single digit. * Add the two numbers and return it as a linked list. * Input: (2 -> 4 -> 3) + (5 -> 6 -> 4) * Output: 7 -> 0 -> 8 * * @author Swin * */ public class AddTwoNumbers2 { public class ListNode//内部类,定义链表数据结构 { int val; ListNode next; ListNode(int x) { val = x; } } public ListNode addTwoNumbers(ListNode l1,ListNode l2) { return null; } public static void main(String[] args) { } }
addTwoNum方法
src/AddTwoNumbers2.java
addTwoNum方法
Java
apache-2.0
ba90508b919822d64c68f2c96ec8f44646fe6d78
0
maddin2016/elasticsearch,artnowo/elasticsearch,mikemccand/elasticsearch,i-am-Nathan/elasticsearch,jimczi/elasticsearch,sreeramjayan/elasticsearch,winstonewert/elasticsearch,mortonsykes/elasticsearch,kalimatas/elasticsearch,Stacey-Gammon/elasticsearch,IanvsPoplicola/elasticsearch,gfyoung/elasticsearch,brandonkearby/elasticsearch,cwurm/elasticsearch,MisterAndersen/elasticsearch,markwalkom/elasticsearch,masaruh/elasticsearch,cwurm/elasticsearch,dongjoon-hyun/elasticsearch,GlenRSmith/elasticsearch,brandonkearby/elasticsearch,coding0011/elasticsearch,HonzaKral/elasticsearch,StefanGor/elasticsearch,s1monw/elasticsearch,mikemccand/elasticsearch,glefloch/elasticsearch,i-am-Nathan/elasticsearch,nazarewk/elasticsearch,MaineC/elasticsearch,pozhidaevak/elasticsearch,elasticdog/elasticsearch,ZTE-PaaS/elasticsearch,uschindler/elasticsearch,girirajsharma/elasticsearch,brandonkearby/elasticsearch,winstonewert/elasticsearch,nazarewk/elasticsearch,mohit/elasticsearch,mjason3/elasticsearch,avikurapati/elasticsearch,lks21c/elasticsearch,MisterAndersen/elasticsearch,HonzaKral/elasticsearch,strapdata/elassandra,geidies/elasticsearch,Shepard1212/elasticsearch,ricardocerq/elasticsearch,palecur/elasticsearch,JervyShi/elasticsearch,ricardocerq/elasticsearch,nilabhsagar/elasticsearch,glefloch/elasticsearch,HonzaKral/elasticsearch,LewayneNaidoo/elasticsearch,gmarz/elasticsearch,fernandozhu/elasticsearch,vroyer/elassandra,wangtuo/elasticsearch,StefanGor/elasticsearch,a2lin/elasticsearch,njlawton/elasticsearch,MisterAndersen/elasticsearch,girirajsharma/elasticsearch,MisterAndersen/elasticsearch,Stacey-Gammon/elasticsearch,mortonsykes/elasticsearch,alexshadow007/elasticsearch,mohit/elasticsearch,wenpos/elasticsearch,scottsom/elasticsearch,rajanm/elasticsearch,qwerty4030/elasticsearch,yanjunh/elasticsearch,jprante/elasticsearch,markwalkom/elasticsearch,elasticdog/elasticsearch,dongjoon-hyun/elasticsearch,lks21c/elasticsearch,uschindler/elasticsearch,dpursehouse/elasticsearch,wuranbo/elasticsearch,rajanm/elasticsearch,strapdata/elassandra5-rc,pozhidaevak/elasticsearch,s1monw/elasticsearch,njlawton/elasticsearch,Shepard1212/elasticsearch,avikurapati/elasticsearch,gmarz/elasticsearch,IanvsPoplicola/elasticsearch,markwalkom/elasticsearch,mortonsykes/elasticsearch,wenpos/elasticsearch,nezirus/elasticsearch,njlawton/elasticsearch,JervyShi/elasticsearch,elasticdog/elasticsearch,girirajsharma/elasticsearch,yanjunh/elasticsearch,maddin2016/elasticsearch,jprante/elasticsearch,mortonsykes/elasticsearch,rlugojr/elasticsearch,zkidkid/elasticsearch,sreeramjayan/elasticsearch,GlenRSmith/elasticsearch,mjason3/elasticsearch,JackyMai/elasticsearch,JervyShi/elasticsearch,geidies/elasticsearch,geidies/elasticsearch,sreeramjayan/elasticsearch,wenpos/elasticsearch,avikurapati/elasticsearch,sneivandt/elasticsearch,obourgain/elasticsearch,Stacey-Gammon/elasticsearch,nezirus/elasticsearch,zkidkid/elasticsearch,maddin2016/elasticsearch,dpursehouse/elasticsearch,jprante/elasticsearch,dpursehouse/elasticsearch,Helen-Zhao/elasticsearch,fred84/elasticsearch,njlawton/elasticsearch,umeshdangat/elasticsearch,ThiagoGarciaAlves/elasticsearch,Helen-Zhao/elasticsearch,bawse/elasticsearch,scorpionvicky/elasticsearch,i-am-Nathan/elasticsearch,alexshadow007/elasticsearch,ThiagoGarciaAlves/elasticsearch,rajanm/elasticsearch,gingerwizard/elasticsearch,wuranbo/elasticsearch,sneivandt/elasticsearch,Helen-Zhao/elasticsearch,artnowo/elasticsearch,C-Bish/elasticsearch,Shepard1212/elasticsearch,ThiagoGarciaAlves/elasticsearch,vroyer/elassandra,a2lin/elasticsearch,lks21c/elasticsearch,jimczi/elasticsearch,fred84/elasticsearch,nknize/elasticsearch,dongjoon-hyun/elasticsearch,a2lin/elasticsearch,elasticdog/elasticsearch,scottsom/elasticsearch,nknize/elasticsearch,spiegela/elasticsearch,fforbeck/elasticsearch,scottsom/elasticsearch,jprante/elasticsearch,fernandozhu/elasticsearch,awislowski/elasticsearch,gfyoung/elasticsearch,geidies/elasticsearch,awislowski/elasticsearch,alexshadow007/elasticsearch,uschindler/elasticsearch,strapdata/elassandra5-rc,masaruh/elasticsearch,i-am-Nathan/elasticsearch,obourgain/elasticsearch,scorpionvicky/elasticsearch,geidies/elasticsearch,spiegela/elasticsearch,wuranbo/elasticsearch,MaineC/elasticsearch,coding0011/elasticsearch,girirajsharma/elasticsearch,coding0011/elasticsearch,jimczi/elasticsearch,JervyShi/elasticsearch,sreeramjayan/elasticsearch,markwalkom/elasticsearch,LeoYao/elasticsearch,JSCooke/elasticsearch,Shepard1212/elasticsearch,mikemccand/elasticsearch,JackyMai/elasticsearch,ZTE-PaaS/elasticsearch,ThiagoGarciaAlves/elasticsearch,bawse/elasticsearch,shreejay/elasticsearch,rajanm/elasticsearch,vroyer/elasticassandra,mohit/elasticsearch,robin13/elasticsearch,liweinan0423/elasticsearch,girirajsharma/elasticsearch,LewayneNaidoo/elasticsearch,henakamaMSFT/elasticsearch,scottsom/elasticsearch,ZTE-PaaS/elasticsearch,yanjunh/elasticsearch,StefanGor/elasticsearch,LewayneNaidoo/elasticsearch,shreejay/elasticsearch,henakamaMSFT/elasticsearch,C-Bish/elasticsearch,alexshadow007/elasticsearch,wuranbo/elasticsearch,brandonkearby/elasticsearch,winstonewert/elasticsearch,fernandozhu/elasticsearch,markwalkom/elasticsearch,winstonewert/elasticsearch,LeoYao/elasticsearch,kalimatas/elasticsearch,wangtuo/elasticsearch,nezirus/elasticsearch,cwurm/elasticsearch,palecur/elasticsearch,ZTE-PaaS/elasticsearch,qwerty4030/elasticsearch,naveenhooda2000/elasticsearch,Stacey-Gammon/elasticsearch,mortonsykes/elasticsearch,sreeramjayan/elasticsearch,zkidkid/elasticsearch,qwerty4030/elasticsearch,GlenRSmith/elasticsearch,ThiagoGarciaAlves/elasticsearch,LewayneNaidoo/elasticsearch,lks21c/elasticsearch,shreejay/elasticsearch,MaineC/elasticsearch,henakamaMSFT/elasticsearch,uschindler/elasticsearch,kalimatas/elasticsearch,rlugojr/elasticsearch,Shepard1212/elasticsearch,dpursehouse/elasticsearch,wenpos/elasticsearch,fforbeck/elasticsearch,mikemccand/elasticsearch,gfyoung/elasticsearch,vroyer/elasticassandra,cwurm/elasticsearch,njlawton/elasticsearch,gingerwizard/elasticsearch,nilabhsagar/elasticsearch,elasticdog/elasticsearch,strapdata/elassandra,kalimatas/elasticsearch,vroyer/elasticassandra,mjason3/elasticsearch,gingerwizard/elasticsearch,avikurapati/elasticsearch,winstonewert/elasticsearch,strapdata/elassandra,gfyoung/elasticsearch,i-am-Nathan/elasticsearch,maddin2016/elasticsearch,nazarewk/elasticsearch,maddin2016/elasticsearch,nazarewk/elasticsearch,LeoYao/elasticsearch,rajanm/elasticsearch,robin13/elasticsearch,rlugojr/elasticsearch,LeoYao/elasticsearch,strapdata/elassandra5-rc,alexshadow007/elasticsearch,Helen-Zhao/elasticsearch,vroyer/elassandra,s1monw/elasticsearch,fred84/elasticsearch,strapdata/elassandra,wangtuo/elasticsearch,nezirus/elasticsearch,brandonkearby/elasticsearch,liweinan0423/elasticsearch,strapdata/elassandra,robin13/elasticsearch,LeoYao/elasticsearch,kalimatas/elasticsearch,obourgain/elasticsearch,ZTE-PaaS/elasticsearch,jimczi/elasticsearch,geidies/elasticsearch,wangtuo/elasticsearch,palecur/elasticsearch,fernandozhu/elasticsearch,LewayneNaidoo/elasticsearch,nazarewk/elasticsearch,IanvsPoplicola/elasticsearch,MisterAndersen/elasticsearch,uschindler/elasticsearch,awislowski/elasticsearch,wuranbo/elasticsearch,gingerwizard/elasticsearch,LeoYao/elasticsearch,masaruh/elasticsearch,cwurm/elasticsearch,robin13/elasticsearch,glefloch/elasticsearch,umeshdangat/elasticsearch,yanjunh/elasticsearch,scottsom/elasticsearch,spiegela/elasticsearch,umeshdangat/elasticsearch,awislowski/elasticsearch,naveenhooda2000/elasticsearch,pozhidaevak/elasticsearch,qwerty4030/elasticsearch,naveenhooda2000/elasticsearch,artnowo/elasticsearch,fforbeck/elasticsearch,HonzaKral/elasticsearch,palecur/elasticsearch,ricardocerq/elasticsearch,liweinan0423/elasticsearch,MaineC/elasticsearch,JackyMai/elasticsearch,coding0011/elasticsearch,gmarz/elasticsearch,scorpionvicky/elasticsearch,pozhidaevak/elasticsearch,spiegela/elasticsearch,avikurapati/elasticsearch,masaruh/elasticsearch,liweinan0423/elasticsearch,mjason3/elasticsearch,C-Bish/elasticsearch,gingerwizard/elasticsearch,obourgain/elasticsearch,palecur/elasticsearch,obourgain/elasticsearch,glefloch/elasticsearch,yanjunh/elasticsearch,zkidkid/elasticsearch,dpursehouse/elasticsearch,markwalkom/elasticsearch,dongjoon-hyun/elasticsearch,dongjoon-hyun/elasticsearch,lks21c/elasticsearch,coding0011/elasticsearch,artnowo/elasticsearch,s1monw/elasticsearch,s1monw/elasticsearch,mohit/elasticsearch,fforbeck/elasticsearch,gmarz/elasticsearch,Helen-Zhao/elasticsearch,sneivandt/elasticsearch,fred84/elasticsearch,ThiagoGarciaAlves/elasticsearch,rlugojr/elasticsearch,LeoYao/elasticsearch,shreejay/elasticsearch,a2lin/elasticsearch,robin13/elasticsearch,scorpionvicky/elasticsearch,mjason3/elasticsearch,girirajsharma/elasticsearch,liweinan0423/elasticsearch,JackyMai/elasticsearch,wangtuo/elasticsearch,Stacey-Gammon/elasticsearch,nilabhsagar/elasticsearch,GlenRSmith/elasticsearch,henakamaMSFT/elasticsearch,gfyoung/elasticsearch,wenpos/elasticsearch,nilabhsagar/elasticsearch,mohit/elasticsearch,C-Bish/elasticsearch,masaruh/elasticsearch,jimczi/elasticsearch,nilabhsagar/elasticsearch,IanvsPoplicola/elasticsearch,umeshdangat/elasticsearch,JervyShi/elasticsearch,bawse/elasticsearch,spiegela/elasticsearch,IanvsPoplicola/elasticsearch,scorpionvicky/elasticsearch,nknize/elasticsearch,glefloch/elasticsearch,StefanGor/elasticsearch,awislowski/elasticsearch,fred84/elasticsearch,naveenhooda2000/elasticsearch,naveenhooda2000/elasticsearch,JSCooke/elasticsearch,ricardocerq/elasticsearch,nknize/elasticsearch,pozhidaevak/elasticsearch,mikemccand/elasticsearch,nezirus/elasticsearch,gingerwizard/elasticsearch,a2lin/elasticsearch,rlugojr/elasticsearch,henakamaMSFT/elasticsearch,JervyShi/elasticsearch,C-Bish/elasticsearch,MaineC/elasticsearch,artnowo/elasticsearch,shreejay/elasticsearch,sneivandt/elasticsearch,ricardocerq/elasticsearch,nknize/elasticsearch,gingerwizard/elasticsearch,StefanGor/elasticsearch,zkidkid/elasticsearch,sneivandt/elasticsearch,fernandozhu/elasticsearch,sreeramjayan/elasticsearch,umeshdangat/elasticsearch,qwerty4030/elasticsearch,bawse/elasticsearch,JSCooke/elasticsearch,JSCooke/elasticsearch,strapdata/elassandra5-rc,rajanm/elasticsearch,gmarz/elasticsearch,strapdata/elassandra5-rc,JackyMai/elasticsearch,fforbeck/elasticsearch,GlenRSmith/elasticsearch,bawse/elasticsearch,jprante/elasticsearch,JSCooke/elasticsearch
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.query.SpatialArgs; import org.apache.lucene.spatial.query.SpatialOperation; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; import java.io.IOException; import java.util.Objects; import java.util.Optional; /** * {@link QueryBuilder} that builds a GeoShape Query */ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuilder> { public static final String NAME = "geo_shape"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); public static final String DEFAULT_SHAPE_INDEX_NAME = "shapes"; public static final String DEFAULT_SHAPE_FIELD_NAME = "shape"; public static final ShapeRelation DEFAULT_SHAPE_RELATION = ShapeRelation.INTERSECTS; /** * The default value for ignore_unmapped. */ public static final boolean DEFAULT_IGNORE_UNMAPPED = false; private static final ParseField SHAPE_FIELD = new ParseField("shape"); private static final ParseField STRATEGY_FIELD = new ParseField("strategy"); private static final ParseField RELATION_FIELD = new ParseField("relation"); private static final ParseField INDEXED_SHAPE_FIELD = new ParseField("indexed_shape"); private static final ParseField SHAPE_ID_FIELD = new ParseField("id"); private static final ParseField SHAPE_TYPE_FIELD = new ParseField("type"); private static final ParseField SHAPE_INDEX_FIELD = new ParseField("index"); private static final ParseField SHAPE_PATH_FIELD = new ParseField("path"); private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped"); private final String fieldName; private final ShapeBuilder shape; private SpatialStrategy strategy; private final String indexedShapeId; private final String indexedShapeType; private String indexedShapeIndex = DEFAULT_SHAPE_INDEX_NAME; private String indexedShapePath = DEFAULT_SHAPE_FIELD_NAME; private ShapeRelation relation = DEFAULT_SHAPE_RELATION; private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; /** * Creates a new GeoShapeQueryBuilder whose Query will be against the given * field name using the given Shape * * @param fieldName * Name of the field that will be queried * @param shape * Shape used in the Query */ public GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape) { this(fieldName, shape, null, null); } /** * Creates a new GeoShapeQueryBuilder whose Query will be against the given * field name and will use the Shape found with the given ID in the given * type * * @param fieldName * Name of the field that will be filtered * @param indexedShapeId * ID of the indexed Shape that will be used in the Query * @param indexedShapeType * Index type of the indexed Shapes */ public GeoShapeQueryBuilder(String fieldName, String indexedShapeId, String indexedShapeType) { this(fieldName, (ShapeBuilder) null, indexedShapeId, indexedShapeType); } private GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape, String indexedShapeId, String indexedShapeType) { if (fieldName == null) { throw new IllegalArgumentException("fieldName is required"); } if (shape == null && indexedShapeId == null) { throw new IllegalArgumentException("either shapeBytes or indexedShapeId and indexedShapeType are required"); } if (indexedShapeId != null && indexedShapeType == null) { throw new IllegalArgumentException("indexedShapeType is required if indexedShapeId is specified"); } this.fieldName = fieldName; this.shape = shape; this.indexedShapeId = indexedShapeId; this.indexedShapeType = indexedShapeType; } /** * Read from a stream. */ public GeoShapeQueryBuilder(StreamInput in) throws IOException { super(in); fieldName = in.readString(); if (in.readBoolean()) { shape = in.readNamedWriteable(ShapeBuilder.class); indexedShapeId = null; indexedShapeType = null; } else { shape = null; indexedShapeId = in.readOptionalString(); indexedShapeType = in.readOptionalString(); indexedShapeIndex = in.readOptionalString(); indexedShapePath = in.readOptionalString(); } relation = ShapeRelation.readFromStream(in); strategy = in.readOptionalWriteable(SpatialStrategy::readFromStream); ignoreUnmapped = in.readBoolean(); } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); boolean hasShape = shape != null; out.writeBoolean(hasShape); if (hasShape) { out.writeNamedWriteable(shape); } else { out.writeOptionalString(indexedShapeId); out.writeOptionalString(indexedShapeType); out.writeOptionalString(indexedShapeIndex); out.writeOptionalString(indexedShapePath); } relation.writeTo(out); out.writeOptionalWriteable(strategy); out.writeBoolean(ignoreUnmapped); } /** * @return the name of the field that will be queried */ public String fieldName() { return fieldName; } /** * @return the shape used in the Query */ public ShapeBuilder shape() { return shape; } /** * @return the ID of the indexed Shape that will be used in the Query */ public String indexedShapeId() { return indexedShapeId; } /** * @return the document type of the indexed Shape that will be used in the * Query */ public String indexedShapeType() { return indexedShapeType; } /** * Defines which spatial strategy will be used for building the geo shape * Query. When not set, the strategy that will be used will be the one that * is associated with the geo shape field in the mappings. * * @param strategy * The spatial strategy to use for building the geo shape Query * @return this */ public GeoShapeQueryBuilder strategy(SpatialStrategy strategy) { if (strategy != null && strategy == SpatialStrategy.TERM && relation != ShapeRelation.INTERSECTS) { throw new IllegalArgumentException("strategy [" + strategy.getStrategyName() + "] only supports relation [" + ShapeRelation.INTERSECTS.getRelationName() + "] found relation [" + relation.getRelationName() + "]"); } this.strategy = strategy; return this; } /** * @return The spatial strategy to use for building the geo shape Query */ public SpatialStrategy strategy() { return strategy; } /** * Sets the name of the index where the indexed Shape can be found * * @param indexedShapeIndex Name of the index where the indexed Shape is * @return this */ public GeoShapeQueryBuilder indexedShapeIndex(String indexedShapeIndex) { this.indexedShapeIndex = indexedShapeIndex; return this; } /** * @return the index name for the indexed Shape that will be used in the * Query */ public String indexedShapeIndex() { return indexedShapeIndex; } /** * Sets the path of the field in the indexed Shape document that has the Shape itself * * @param indexedShapePath Path of the field where the Shape itself is defined * @return this */ public GeoShapeQueryBuilder indexedShapePath(String indexedShapePath) { this.indexedShapePath = indexedShapePath; return this; } /** * @return the path of the indexed Shape that will be used in the Query */ public String indexedShapePath() { return indexedShapePath; } /** * Sets the relation of query shape and indexed shape. * * @param relation relation of the shapes * @return this */ public GeoShapeQueryBuilder relation(ShapeRelation relation) { if (relation == null) { throw new IllegalArgumentException("No Shape Relation defined"); } if (strategy != null && strategy == SpatialStrategy.TERM && relation != ShapeRelation.INTERSECTS) { throw new IllegalArgumentException("current strategy [" + strategy.getStrategyName() + "] only supports relation [" + ShapeRelation.INTERSECTS.getRelationName() + "] found relation [" + relation.getRelationName() + "]"); } this.relation = relation; return this; } /** * @return the relation of query shape and indexed shape to use in the Query */ public ShapeRelation relation() { return relation; } /** * Sets whether the query builder should ignore unmapped fields (and run a * {@link MatchNoDocsQuery} in place of this query) or throw an exception if * the field is unmapped. */ public GeoShapeQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) { this.ignoreUnmapped = ignoreUnmapped; return this; } /** * Gets whether the query builder will ignore unmapped fields (and run a * {@link MatchNoDocsQuery} in place of this query) or throw an exception if * the field is unmapped. */ public boolean ignoreUnmapped() { return ignoreUnmapped; } @Override protected Query doToQuery(QueryShardContext context) { if (shape == null) { throw new UnsupportedOperationException("query must be rewritten first"); } final ShapeBuilder shapeToQuery = shape; final MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType == null) { if (ignoreUnmapped) { return new MatchNoDocsQuery(); } else { throw new QueryShardException(context, "failed to find geo_shape field [" + fieldName + "]"); } } // TODO: This isn't the nicest way to check this if (!(fieldType instanceof GeoShapeFieldMapper.GeoShapeFieldType)) { throw new QueryShardException(context, "Field [" + fieldName + "] is not a geo_shape"); } final GeoShapeFieldMapper.GeoShapeFieldType shapeFieldType = (GeoShapeFieldMapper.GeoShapeFieldType) fieldType; PrefixTreeStrategy strategy = shapeFieldType.defaultStrategy(); if (this.strategy != null) { strategy = shapeFieldType.resolveStrategy(this.strategy); } Query query; if (strategy instanceof RecursivePrefixTreeStrategy && relation == ShapeRelation.DISJOINT) { // this strategy doesn't support disjoint anymore: but it did // before, including creating lucene fieldcache (!) // in this case, execute disjoint as exists && !intersects BooleanQuery.Builder bool = new BooleanQuery.Builder(); Query exists = ExistsQueryBuilder.newFilter(context, fieldName); Query intersects = strategy.makeQuery(getArgs(shapeToQuery, ShapeRelation.INTERSECTS)); bool.add(exists, BooleanClause.Occur.MUST); bool.add(intersects, BooleanClause.Occur.MUST_NOT); query = new ConstantScoreQuery(bool.build()); } else { query = new ConstantScoreQuery(strategy.makeQuery(getArgs(shapeToQuery, relation))); } return query; } /** * Fetches the Shape with the given ID in the given type and index. * * @param getRequest * GetRequest containing index, type and id * @param path * Name or path of the field in the Shape Document where the * Shape itself is located * @return Shape with the given ID * @throws IOException * Can be thrown while parsing the Shape Document and extracting * the Shape */ private ShapeBuilder fetch(Client client, GetRequest getRequest, String path) throws IOException { if (ShapesAvailability.JTS_AVAILABLE == false) { throw new IllegalStateException("JTS not available"); } getRequest.preference("_local"); getRequest.operationThreaded(false); GetResponse response = client.get(getRequest).actionGet(); if (!response.isExists()) { throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() + "] not found"); } if (response.isSourceEmpty()) { throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() + "] source disabled"); } String[] pathElements = path.split("\\."); int currentPathSlot = 0; try (XContentParser parser = XContentHelper.createParser(response.getSourceAsBytesRef())) { XContentParser.Token currentToken; while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (currentToken == XContentParser.Token.FIELD_NAME) { if (pathElements[currentPathSlot].equals(parser.currentName())) { parser.nextToken(); if (++currentPathSlot == pathElements.length) { return ShapeBuilder.parse(parser); } } else { parser.nextToken(); parser.skipChildren(); } } } throw new IllegalStateException("Shape with name [" + getRequest.id() + "] found but missing " + path + " field"); } } public static SpatialArgs getArgs(ShapeBuilder shape, ShapeRelation relation) { switch (relation) { case DISJOINT: return new SpatialArgs(SpatialOperation.IsDisjointTo, shape.build()); case INTERSECTS: return new SpatialArgs(SpatialOperation.Intersects, shape.build()); case WITHIN: return new SpatialArgs(SpatialOperation.IsWithin, shape.build()); case CONTAINS: return new SpatialArgs(SpatialOperation.Contains, shape.build()); default: throw new IllegalArgumentException("invalid relation [" + relation + "]"); } } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); builder.startObject(fieldName); if (strategy != null) { builder.field(STRATEGY_FIELD.getPreferredName(), strategy.getStrategyName()); } if (shape != null) { builder.field(SHAPE_FIELD.getPreferredName()); shape.toXContent(builder, params); } else { builder.startObject(INDEXED_SHAPE_FIELD.getPreferredName()) .field(SHAPE_ID_FIELD.getPreferredName(), indexedShapeId) .field(SHAPE_TYPE_FIELD.getPreferredName(), indexedShapeType); if (indexedShapeIndex != null) { builder.field(SHAPE_INDEX_FIELD.getPreferredName(), indexedShapeIndex); } if (indexedShapePath != null) { builder.field(SHAPE_PATH_FIELD.getPreferredName(), indexedShapePath); } builder.endObject(); } if(relation != null) { builder.field(RELATION_FIELD.getPreferredName(), relation.getRelationName()); } builder.endObject(); builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped); printBoostAndQueryName(builder); builder.endObject(); } public static Optional<GeoShapeQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String fieldName = null; ShapeRelation shapeRelation = null; SpatialStrategy strategy = null; ShapeBuilder shape = null; String id = null; String type = null; String index = null; String shapePath = null; XContentParser.Token token; String currentFieldName = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if (fieldName != null) { throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] point specified twice. [" + currentFieldName + "]"); } fieldName = currentFieldName; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); token = parser.nextToken(); if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_FIELD)) { shape = ShapeBuilder.parse(parser); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, STRATEGY_FIELD)) { String strategyName = parser.text(); strategy = SpatialStrategy.fromString(strategyName); if (strategy == null) { throw new ParsingException(parser.getTokenLocation(), "Unknown strategy [" + strategyName + " ]"); } } else if (parseContext.getParseFieldMatcher().match(currentFieldName, RELATION_FIELD)) { shapeRelation = ShapeRelation.getRelationByName(parser.text()); if (shapeRelation == null) { throw new ParsingException(parser.getTokenLocation(), "Unknown shape operation [" + parser.text() + " ]"); } } else if (parseContext.getParseFieldMatcher().match(currentFieldName, INDEXED_SHAPE_FIELD)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_ID_FIELD)) { id = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_TYPE_FIELD)) { type = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_INDEX_FIELD)) { index = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_PATH_FIELD)) { shapePath = parser.text(); } } else { throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]"); } } } else { throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } } } } else if (token.isValue()) { if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { boost = parser.floatValue(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { queryName = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) { ignoreUnmapped = parser.booleanValue(); } else { throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } } } GeoShapeQueryBuilder builder; if (shape != null) { builder = new GeoShapeQueryBuilder(fieldName, shape); } else { builder = new GeoShapeQueryBuilder(fieldName, id, type); } if (index != null) { builder.indexedShapeIndex(index); } if (shapePath != null) { builder.indexedShapePath(shapePath); } if (shapeRelation != null) { builder.relation(shapeRelation); } if (strategy != null) { builder.strategy(strategy); } if (queryName != null) { builder.queryName(queryName); } builder.boost(boost); builder.ignoreUnmapped(ignoreUnmapped); return Optional.of(builder); } @Override protected boolean doEquals(GeoShapeQueryBuilder other) { return Objects.equals(fieldName, other.fieldName) && Objects.equals(indexedShapeId, other.indexedShapeId) && Objects.equals(indexedShapeIndex, other.indexedShapeIndex) && Objects.equals(indexedShapePath, other.indexedShapePath) && Objects.equals(indexedShapeType, other.indexedShapeType) && Objects.equals(relation, other.relation) && Objects.equals(shape, other.shape) && Objects.equals(strategy, other.strategy) && Objects.equals(ignoreUnmapped, other.ignoreUnmapped); } @Override protected int doHashCode() { return Objects.hash(fieldName, indexedShapeId, indexedShapeIndex, indexedShapePath, indexedShapeType, relation, shape, strategy, ignoreUnmapped); } @Override public String getWriteableName() { return NAME; } @Override protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { if (this.shape == null) { GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId); ShapeBuilder shape = fetch(queryShardContext.getClient(), getRequest, indexedShapePath); return new GeoShapeQueryBuilder(this.fieldName, shape).relation(relation).strategy(strategy); } return this; } }
core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.query.SpatialArgs; import org.apache.lucene.spatial.query.SpatialOperation; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; import java.io.IOException; import java.util.Objects; import java.util.Optional; /** * {@link QueryBuilder} that builds a GeoShape Query */ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuilder> { public static final String NAME = "geo_shape"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); public static final String DEFAULT_SHAPE_INDEX_NAME = "shapes"; public static final String DEFAULT_SHAPE_FIELD_NAME = "shape"; public static final ShapeRelation DEFAULT_SHAPE_RELATION = ShapeRelation.INTERSECTS; /** * The default value for ignore_unmapped. */ public static final boolean DEFAULT_IGNORE_UNMAPPED = false; private static final ParseField SHAPE_FIELD = new ParseField("shape"); private static final ParseField STRATEGY_FIELD = new ParseField("strategy"); private static final ParseField RELATION_FIELD = new ParseField("relation"); private static final ParseField INDEXED_SHAPE_FIELD = new ParseField("indexed_shape"); private static final ParseField SHAPE_ID_FIELD = new ParseField("id"); private static final ParseField SHAPE_TYPE_FIELD = new ParseField("type"); private static final ParseField SHAPE_INDEX_FIELD = new ParseField("index"); private static final ParseField SHAPE_PATH_FIELD = new ParseField("path"); private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped"); private final String fieldName; private final ShapeBuilder shape; private SpatialStrategy strategy; private final String indexedShapeId; private final String indexedShapeType; private String indexedShapeIndex = DEFAULT_SHAPE_INDEX_NAME; private String indexedShapePath = DEFAULT_SHAPE_FIELD_NAME; private ShapeRelation relation = DEFAULT_SHAPE_RELATION; private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; /** * Creates a new GeoShapeQueryBuilder whose Query will be against the given * field name using the given Shape * * @param fieldName * Name of the field that will be queried * @param shape * Shape used in the Query */ public GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape) { this(fieldName, shape, null, null); } /** * Creates a new GeoShapeQueryBuilder whose Query will be against the given * field name and will use the Shape found with the given ID in the given * type * * @param fieldName * Name of the field that will be filtered * @param indexedShapeId * ID of the indexed Shape that will be used in the Query * @param indexedShapeType * Index type of the indexed Shapes */ public GeoShapeQueryBuilder(String fieldName, String indexedShapeId, String indexedShapeType) { this(fieldName, (ShapeBuilder) null, indexedShapeId, indexedShapeType); } private GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape, String indexedShapeId, String indexedShapeType) { if (fieldName == null) { throw new IllegalArgumentException("fieldName is required"); } if (shape == null && indexedShapeId == null) { throw new IllegalArgumentException("either shapeBytes or indexedShapeId and indexedShapeType are required"); } if (indexedShapeId != null && indexedShapeType == null) { throw new IllegalArgumentException("indexedShapeType is required if indexedShapeId is specified"); } this.fieldName = fieldName; this.shape = shape; this.indexedShapeId = indexedShapeId; this.indexedShapeType = indexedShapeType; } /** * Read from a stream. */ public GeoShapeQueryBuilder(StreamInput in) throws IOException { super(in); fieldName = in.readString(); if (in.readBoolean()) { shape = in.readNamedWriteable(ShapeBuilder.class); indexedShapeId = null; indexedShapeType = null; } else { shape = null; indexedShapeId = in.readOptionalString(); indexedShapeType = in.readOptionalString(); indexedShapeIndex = in.readOptionalString(); indexedShapePath = in.readOptionalString(); } relation = ShapeRelation.readFromStream(in); strategy = in.readOptionalWriteable(SpatialStrategy::readFromStream); ignoreUnmapped = in.readBoolean(); } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); boolean hasShape = shape != null; out.writeBoolean(hasShape); if (hasShape) { out.writeNamedWriteable(shape); } else { out.writeOptionalString(indexedShapeId); out.writeOptionalString(indexedShapeType); out.writeOptionalString(indexedShapeIndex); out.writeOptionalString(indexedShapePath); } relation.writeTo(out); out.writeOptionalWriteable(strategy); out.writeBoolean(ignoreUnmapped); } /** * @return the name of the field that will be queried */ public String fieldName() { return fieldName; } /** * @return the shape used in the Query */ public ShapeBuilder shape() { return shape; } /** * @return the ID of the indexed Shape that will be used in the Query */ public String indexedShapeId() { return indexedShapeId; } /** * @return the document type of the indexed Shape that will be used in the * Query */ public String indexedShapeType() { return indexedShapeType; } /** * Defines which spatial strategy will be used for building the geo shape * Query. When not set, the strategy that will be used will be the one that * is associated with the geo shape field in the mappings. * * @param strategy * The spatial strategy to use for building the geo shape Query * @return this */ public GeoShapeQueryBuilder strategy(SpatialStrategy strategy) { if (strategy != null && strategy == SpatialStrategy.TERM && relation != ShapeRelation.INTERSECTS) { throw new IllegalArgumentException("strategy [" + strategy.getStrategyName() + "] only supports relation [" + ShapeRelation.INTERSECTS.getRelationName() + "] found relation [" + relation.getRelationName() + "]"); } this.strategy = strategy; return this; } /** * @return The spatial strategy to use for building the geo shape Query */ public SpatialStrategy strategy() { return strategy; } /** * Sets the name of the index where the indexed Shape can be found * * @param indexedShapeIndex Name of the index where the indexed Shape is * @return this */ public GeoShapeQueryBuilder indexedShapeIndex(String indexedShapeIndex) { this.indexedShapeIndex = indexedShapeIndex; return this; } /** * @return the index name for the indexed Shape that will be used in the * Query */ public String indexedShapeIndex() { return indexedShapeIndex; } /** * Sets the path of the field in the indexed Shape document that has the Shape itself * * @param indexedShapePath Path of the field where the Shape itself is defined * @return this */ public GeoShapeQueryBuilder indexedShapePath(String indexedShapePath) { this.indexedShapePath = indexedShapePath; return this; } /** * @return the path of the indexed Shape that will be used in the Query */ public String indexedShapePath() { return indexedShapePath; } /** * Sets the relation of query shape and indexed shape. * * @param relation relation of the shapes * @return this */ public GeoShapeQueryBuilder relation(ShapeRelation relation) { if (relation == null) { throw new IllegalArgumentException("No Shape Relation defined"); } if (strategy != null && strategy == SpatialStrategy.TERM && relation != ShapeRelation.INTERSECTS) { throw new IllegalArgumentException("current strategy [" + strategy.getStrategyName() + "] only supports relation [" + ShapeRelation.INTERSECTS.getRelationName() + "] found relation [" + relation.getRelationName() + "]"); } this.relation = relation; return this; } /** * @return the relation of query shape and indexed shape to use in the Query */ public ShapeRelation relation() { return relation; } /** * Sets whether the query builder should ignore unmapped fields (and run a * {@link MatchNoDocsQuery} in place of this query) or throw an exception if * the field is unmapped. */ public GeoShapeQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) { this.ignoreUnmapped = ignoreUnmapped; return this; } /** * Gets whether the query builder will ignore unmapped fields (and run a * {@link MatchNoDocsQuery} in place of this query) or throw an exception if * the field is unmapped. */ public boolean ignoreUnmapped() { return ignoreUnmapped; } @Override protected Query doToQuery(QueryShardContext context) { if (shape == null) { throw new UnsupportedOperationException("query must be rewritten first"); } final ShapeBuilder shapeToQuery = shape; final MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType == null) { if (ignoreUnmapped) { return new MatchNoDocsQuery(); } else { throw new QueryShardException(context, "failed to find geo_shape field [" + fieldName + "]"); } } // TODO: This isn't the nicest way to check this if (!(fieldType instanceof GeoShapeFieldMapper.GeoShapeFieldType)) { throw new QueryShardException(context, "Field [" + fieldName + "] is not a geo_shape"); } final GeoShapeFieldMapper.GeoShapeFieldType shapeFieldType = (GeoShapeFieldMapper.GeoShapeFieldType) fieldType; PrefixTreeStrategy strategy = shapeFieldType.defaultStrategy(); if (this.strategy != null) { strategy = shapeFieldType.resolveStrategy(this.strategy); } Query query; if (strategy instanceof RecursivePrefixTreeStrategy && relation == ShapeRelation.DISJOINT) { // this strategy doesn't support disjoint anymore: but it did // before, including creating lucene fieldcache (!) // in this case, execute disjoint as exists && !intersects BooleanQuery.Builder bool = new BooleanQuery.Builder(); Query exists = ExistsQueryBuilder.newFilter(context, fieldName); Query intersects = strategy.makeQuery(getArgs(shapeToQuery, ShapeRelation.INTERSECTS)); bool.add(exists, BooleanClause.Occur.MUST); bool.add(intersects, BooleanClause.Occur.MUST_NOT); query = new ConstantScoreQuery(bool.build()); } else { query = new ConstantScoreQuery(strategy.makeQuery(getArgs(shapeToQuery, relation))); } return query; } /** * Fetches the Shape with the given ID in the given type and index. * * @param getRequest * GetRequest containing index, type and id * @param path * Name or path of the field in the Shape Document where the * Shape itself is located * @return Shape with the given ID * @throws IOException * Can be thrown while parsing the Shape Document and extracting * the Shape */ private ShapeBuilder fetch(Client client, GetRequest getRequest, String path) throws IOException { if (ShapesAvailability.JTS_AVAILABLE == false) { throw new IllegalStateException("JTS not available"); } getRequest.preference("_local"); getRequest.operationThreaded(false); GetResponse response = client.get(getRequest).actionGet(); if (!response.isExists()) { throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() + "] not found"); } if (response.isSourceEmpty()) { throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() + "] source disabled"); } String[] pathElements = path.split("\\."); int currentPathSlot = 0; try (XContentParser parser = XContentHelper.createParser(response.getSourceAsBytesRef())) { XContentParser.Token currentToken; while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (currentToken == XContentParser.Token.FIELD_NAME) { if (pathElements[currentPathSlot].equals(parser.currentName())) { parser.nextToken(); if (++currentPathSlot == pathElements.length) { return ShapeBuilder.parse(parser); } } else { parser.nextToken(); parser.skipChildren(); } } } throw new IllegalStateException("Shape with name [" + getRequest.id() + "] found but missing " + path + " field"); } } public static SpatialArgs getArgs(ShapeBuilder shape, ShapeRelation relation) { switch (relation) { case DISJOINT: return new SpatialArgs(SpatialOperation.IsDisjointTo, shape.build()); case INTERSECTS: return new SpatialArgs(SpatialOperation.Intersects, shape.build()); case WITHIN: return new SpatialArgs(SpatialOperation.IsWithin, shape.build()); case CONTAINS: return new SpatialArgs(SpatialOperation.Contains, shape.build()); default: throw new IllegalArgumentException("invalid relation [" + relation + "]"); } } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); builder.startObject(fieldName); if (strategy != null) { builder.field(STRATEGY_FIELD.getPreferredName(), strategy.getStrategyName()); } if (shape != null) { builder.field(SHAPE_FIELD.getPreferredName()); shape.toXContent(builder, params); } else { builder.startObject(INDEXED_SHAPE_FIELD.getPreferredName()) .field(SHAPE_ID_FIELD.getPreferredName(), indexedShapeId) .field(SHAPE_TYPE_FIELD.getPreferredName(), indexedShapeType); if (indexedShapeIndex != null) { builder.field(SHAPE_INDEX_FIELD.getPreferredName(), indexedShapeIndex); } if (indexedShapePath != null) { builder.field(SHAPE_PATH_FIELD.getPreferredName(), indexedShapePath); } builder.endObject(); } if(relation != null) { builder.field(RELATION_FIELD.getPreferredName(), relation.getRelationName()); } builder.endObject(); builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped); printBoostAndQueryName(builder); builder.endObject(); } public static Optional<GeoShapeQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String fieldName = null; ShapeRelation shapeRelation = null; SpatialStrategy strategy = null; ShapeBuilder shape = null; String id = null; String type = null; String index = null; String shapePath = null; XContentParser.Token token; String currentFieldName = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if (fieldName != null) { throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] point specified twice. [" + currentFieldName + "]"); } fieldName = currentFieldName; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); token = parser.nextToken(); if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_FIELD)) { shape = ShapeBuilder.parse(parser); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, STRATEGY_FIELD)) { String strategyName = parser.text(); strategy = SpatialStrategy.fromString(strategyName); if (strategy == null) { throw new ParsingException(parser.getTokenLocation(), "Unknown strategy [" + strategyName + " ]"); } } else if (parseContext.getParseFieldMatcher().match(currentFieldName, RELATION_FIELD)) { shapeRelation = ShapeRelation.getRelationByName(parser.text()); if (shapeRelation == null) { throw new ParsingException(parser.getTokenLocation(), "Unknown shape operation [" + parser.text() + " ]"); } } else if (parseContext.getParseFieldMatcher().match(currentFieldName, INDEXED_SHAPE_FIELD)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_ID_FIELD)) { id = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_TYPE_FIELD)) { type = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_INDEX_FIELD)) { index = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_PATH_FIELD)) { shapePath = parser.text(); } } else { throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]"); } } } else { throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } } } } else if (token.isValue()) { if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { boost = parser.floatValue(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { queryName = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) { ignoreUnmapped = parser.booleanValue(); } else { throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } } } GeoShapeQueryBuilder builder; if (shape != null) { builder = new GeoShapeQueryBuilder(fieldName, shape); } else { builder = new GeoShapeQueryBuilder(fieldName, id, type); } if (index != null) { builder.indexedShapeIndex(index); } if (shapePath != null) { builder.indexedShapePath(shapePath); } if (shapeRelation != null) { builder.relation(shapeRelation); } if (strategy != null) { builder.strategy(strategy); } if (queryName != null) { builder.queryName(queryName); } builder.boost(boost); builder.ignoreUnmapped(ignoreUnmapped); return Optional.of(builder); } @Override protected boolean doEquals(GeoShapeQueryBuilder other) { return Objects.equals(fieldName, other.fieldName) && Objects.equals(indexedShapeId, other.indexedShapeId) && Objects.equals(indexedShapeIndex, other.indexedShapeIndex) && Objects.equals(indexedShapePath, other.indexedShapePath) && Objects.equals(indexedShapeType, other.indexedShapeType) && Objects.equals(relation, other.relation) && Objects.equals(shape, other.shape) && Objects.equals(strategy, other.strategy) && Objects.equals(ignoreUnmapped, other.ignoreUnmapped); } @Override protected int doHashCode() { return Objects.hash(fieldName, indexedShapeId, indexedShapeIndex, indexedShapePath, indexedShapeType, relation, shape, strategy, ignoreUnmapped); } @Override public String getWriteableName() { return NAME; } @Override protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { if (this.shape == null) { GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId); ShapeBuilder shape = fetch(queryShardContext.getClient(), getRequest, indexedShapePath); return new GeoShapeQueryBuilder(this.fieldName, shape).relation(relation).strategy(strategy); } return this; } }
fix checkstyle issue
core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java
fix checkstyle issue
Java
apache-2.0
2218c3da2d61b19e76f4a670b0469186580432e0
0
openwide-java/owsi-core-parent,openwide-java/owsi-core-parent,openwide-java/owsi-core-parent,openwide-java/owsi-core-parent
package fr.openwide.core.showcase.web.application.util.template.styles; import java.util.List; import org.apache.wicket.markup.head.CssHeaderItem; import org.apache.wicket.markup.head.HeaderItem; import com.google.common.collect.Lists; import fr.openwide.core.wicket.more.lesscss.LessCssResourceReference; import fr.openwide.core.wicket.more.markup.html.template.css.bootstrap2.jqueryui.JQueryUiCssResourceReference; public class StylesLessCssResourceReference extends LessCssResourceReference { private static final long serialVersionUID = 4656765761895221782L; private static final StylesLessCssResourceReference INSTANCE = new StylesLessCssResourceReference(); private StylesLessCssResourceReference() { super(StylesLessCssResourceReference.class, "styles.less"); } @Override public Iterable<? extends HeaderItem> getDependencies() { List<HeaderItem> dependencies = Lists.newArrayListWithExpectedSize(1); dependencies.add(CssHeaderItem.forReference(JQueryUiCssResourceReference.get())); return dependencies; } public static StylesLessCssResourceReference get() { return INSTANCE; } }
owsi-core/owsi-core-examples/wicket-showcase/wicket-showcase-webapp/src/main/java/fr/openwide/core/showcase/web/application/util/template/styles/StylesLessCssResourceReference.java
package fr.openwide.core.showcase.web.application.util.template.styles; import java.util.List; import org.apache.wicket.markup.head.CssHeaderItem; import org.apache.wicket.markup.head.HeaderItem; import com.google.common.collect.Lists; import fr.openwide.core.wicket.more.lesscss.LessCssResourceReference; import fr.openwide.core.wicket.more.markup.html.template.css.jqueryui.JQueryUiCssResourceReference; public class StylesLessCssResourceReference extends LessCssResourceReference { private static final long serialVersionUID = 4656765761895221782L; private static final StylesLessCssResourceReference INSTANCE = new StylesLessCssResourceReference(); private StylesLessCssResourceReference() { super(StylesLessCssResourceReference.class, "styles.less"); } @Override public Iterable<? extends HeaderItem> getDependencies() { List<HeaderItem> dependencies = Lists.newArrayListWithExpectedSize(1); dependencies.add(CssHeaderItem.forReference(JQueryUiCssResourceReference.get())); return dependencies; } public static StylesLessCssResourceReference get() { return INSTANCE; } }
réorganisation bootstrap2 / bootstrap3 git-svn-id: 317c7b6ea9a0016e15d9372f7eb7e8aa97d3c3a3@2571 d3474844-eb8e-4abc-b058-2b321fed648b
owsi-core/owsi-core-examples/wicket-showcase/wicket-showcase-webapp/src/main/java/fr/openwide/core/showcase/web/application/util/template/styles/StylesLessCssResourceReference.java
réorganisation bootstrap2 / bootstrap3
Java
apache-2.0
62d7999bd8e8f58cde2bcdc97a1368d1b8b5c236
0
leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,apache/incubator-shardingsphere,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere
/* * Copyright 2016-2018 shardingsphere.io. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * </p> */ package io.shardingsphere.jdbc.orchestration.spring.namespace.parser; import com.google.common.base.Strings; import io.shardingsphere.jdbc.orchestration.spring.namespace.constants.EtcdRegistryCenterBeanDefinitionParserTag; import io.shardingsphere.jdbc.orchestration.spring.namespace.constants.ZookeeperRegistryCenterBeanDefinitionParserTag; import io.shardingsphere.orchestration.reg.etcd.EtcdConfiguration; import io.shardingsphere.orchestration.reg.zookeeper.ZookeeperConfiguration; import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser; import org.springframework.beans.factory.xml.ParserContext; import org.w3c.dom.Element; /** * Registry parser for spring namespace. * * @author panjuan */ public final class RegBeanDefinitionParser extends AbstractBeanDefinitionParser { @Override protected AbstractBeanDefinition parseInternal(final Element element, final ParserContext parserContext) { return ZookeeperRegistryCenterBeanDefinitionParserTag.ROOT_TAG.equals(element.getLocalName()) ? getZookeeperConfiguration(element) : getEtcdConfiguration(element); } private AbstractBeanDefinition getZookeeperConfiguration(final Element element) { BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(ZookeeperConfiguration.class); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.SERVER_LISTS_TAG, "serverLists", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.NAMESPACE_TAG, "namespace", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.BASE_SLEEP_TIME_MILLISECONDS_TAG, "baseSleepTimeMilliseconds", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.MAX_SLEEP_TIME_MILLISECONDS_TAG, "maxSleepTimeMilliseconds", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.MAX_RETRIES_TAG, "maxRetries", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.SESSION_TIMEOUT_MILLISECONDS_TAG, "sessionTimeoutMilliseconds", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.CONNECTION_TIMEOUT_MILLISECONDS_TAG, "connectionTimeoutMilliseconds", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.DIGEST_TAG, "digest", element, factory); return factory.getBeanDefinition(); } private AbstractBeanDefinition getEtcdConfiguration(final Element element) { BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(EtcdConfiguration.class); addPropertyValueIfNotEmpty(EtcdRegistryCenterBeanDefinitionParserTag.SERVER_LISTS_TAG, "serverLists", element, factory); addPropertyValueIfNotEmpty(EtcdRegistryCenterBeanDefinitionParserTag.TIME_TO_LIVE_SECONDS_TAG, "timeToLiveSeconds", element, factory); addPropertyValueIfNotEmpty(EtcdRegistryCenterBeanDefinitionParserTag.TIMEOUT_MILLISECONDS_TAG, "timeoutMilliseconds", element, factory); addPropertyValueIfNotEmpty(EtcdRegistryCenterBeanDefinitionParserTag.RETRY_INTERVAL_MILLISECONDS_TAG, "retryIntervalMilliseconds", element, factory); addPropertyValueIfNotEmpty(EtcdRegistryCenterBeanDefinitionParserTag.MAX_RETRIES_TAG, "maxRetries", element, factory); return factory.getBeanDefinition(); } private void addPropertyValueIfNotEmpty(final String attributeName, final String propertyName, final Element element, final BeanDefinitionBuilder factory) { String attributeValue = element.getAttribute(attributeName); if (!Strings.isNullOrEmpty(attributeValue)) { factory.addPropertyValue(propertyName, attributeValue); } } }
sharding-orchestration/sharding-jdbc-orchestration-spring/sharding-jdbc-orchestration-spring-namespace/src/main/java/io/shardingsphere/jdbc/orchestration/spring/namespace/parser/RegBeanDefinitionParser.java
/* * Copyright 2016-2018 shardingsphere.io. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * </p> */ package io.shardingsphere.jdbc.orchestration.spring.namespace.parser; import com.google.common.base.Strings; import io.shardingsphere.jdbc.orchestration.spring.namespace.constants.EtcdRegistryCenterBeanDefinitionParserTag; import io.shardingsphere.jdbc.orchestration.spring.namespace.constants.ZookeeperRegistryCenterBeanDefinitionParserTag; import io.shardingsphere.orchestration.reg.etcd.EtcdConfiguration; import io.shardingsphere.orchestration.reg.zookeeper.ZookeeperConfiguration; import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser; import org.springframework.beans.factory.xml.ParserContext; import org.w3c.dom.Element; /** * Registry parser for spring namespace. * * @author panjuan */ public final class RegBeanDefinitionParser extends AbstractBeanDefinitionParser { @Override protected AbstractBeanDefinition parseInternal(final Element element, final ParserContext parserContext) { return ZookeeperRegistryCenterBeanDefinitionParserTag.ROOT_TAG.equals(element.getTagName()) ? getZookeeperConfiguration(element) : getEtcdConfiguration(element); } private AbstractBeanDefinition getZookeeperConfiguration(final Element element) { BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(ZookeeperConfiguration.class); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.SERVER_LISTS_TAG, "serverLists", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.NAMESPACE_TAG, "namespace", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.BASE_SLEEP_TIME_MILLISECONDS_TAG, "baseSleepTimeMilliseconds", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.MAX_SLEEP_TIME_MILLISECONDS_TAG, "maxSleepTimeMilliseconds", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.MAX_RETRIES_TAG, "maxRetries", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.SESSION_TIMEOUT_MILLISECONDS_TAG, "sessionTimeoutMilliseconds", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.CONNECTION_TIMEOUT_MILLISECONDS_TAG, "connectionTimeoutMilliseconds", element, factory); addPropertyValueIfNotEmpty(ZookeeperRegistryCenterBeanDefinitionParserTag.DIGEST_TAG, "digest", element, factory); return factory.getBeanDefinition(); } private AbstractBeanDefinition getEtcdConfiguration(final Element element) { BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(EtcdConfiguration.class); addPropertyValueIfNotEmpty(EtcdRegistryCenterBeanDefinitionParserTag.SERVER_LISTS_TAG, "serverLists", element, factory); addPropertyValueIfNotEmpty(EtcdRegistryCenterBeanDefinitionParserTag.TIME_TO_LIVE_SECONDS_TAG, "timeToLiveSeconds", element, factory); addPropertyValueIfNotEmpty(EtcdRegistryCenterBeanDefinitionParserTag.TIMEOUT_MILLISECONDS_TAG, "timeoutMilliseconds", element, factory); addPropertyValueIfNotEmpty(EtcdRegistryCenterBeanDefinitionParserTag.RETRY_INTERVAL_MILLISECONDS_TAG, "retryIntervalMilliseconds", element, factory); addPropertyValueIfNotEmpty(EtcdRegistryCenterBeanDefinitionParserTag.MAX_RETRIES_TAG, "maxRetries", element, factory); return factory.getBeanDefinition(); } private void addPropertyValueIfNotEmpty(final String attributeName, final String propertyName, final Element element, final BeanDefinitionBuilder factory) { String attributeValue = element.getAttribute(attributeName); if (!Strings.isNullOrEmpty(attributeValue)) { factory.addPropertyValue(propertyName, attributeValue); } } }
get local name
sharding-orchestration/sharding-jdbc-orchestration-spring/sharding-jdbc-orchestration-spring-namespace/src/main/java/io/shardingsphere/jdbc/orchestration/spring/namespace/parser/RegBeanDefinitionParser.java
get local name
Java
apache-2.0
7545d57ebc4b11f4784a94114820d092c27a99d5
0
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.hosted.controller.api.integration.organization; import java.time.Duration; import java.util.Optional; /** * @author jonmv */ public interface IssueHandler { /** * File an issue with its given property or the default, and with the specific assignee, if present. * * @param issue The issue to file. * @return ID of the created issue. */ IssueId file(Issue issue); /** * Returns the ID of this issue, if it exists and is open, based on a similarity search. * * @param issue The issue to search for; relevant fields are the summary and the owner (propertyId). * @return ID of the issue, if it is found. */ Optional<IssueId> findBySimilarity(Issue issue); /** * Update the description of the issue with the given ID. * * @param issueId ID of the issue to comment on. * @param description The updated description. */ void update(IssueId issueId, String description); /** * Add a comment to the issue with the given ID. * * @param issueId ID of the issue to comment on. * @param comment The comment to add. */ void commentOn(IssueId issueId, String comment); /** * Returns whether the issue is still under investigation. * * @param issueId ID of the issue to examine. * @return Whether the given issue is under investigation. */ boolean isOpen(IssueId issueId); /** * Returns whether there has been significant activity on the issue within the given duration. * * @param issueId ID of the issue to examine. * @return Whether the given issue is actively worked on. */ boolean isActive(IssueId issueId, Duration maxInactivity); /** * Returns the user assigned to the given issue, if any. * * @param issueId ID of the issue for which to find the assignee. * @return The user responsible for fixing the given issue, if found. */ Optional<User> assigneeOf(IssueId issueId); /** * Reassign the issue with the given ID to the given user, and returns the outcome of this. * * @param issueId ID of the issue to be reassigned. * @param assignee User to which the issue shall be assigned. * @return Whether the reassignment was successful. */ boolean reassign(IssueId issueId, User assignee); /** * Reassign the issue with the given ID to the given user, and returns the outcome of this. * * @param issueId ID of the issue to be watched. * @param watcher watcher to add to the issue. * @return Whether adding the watcher was successful. */ boolean addWatcher(IssueId issueId, String watcher); /** * Escalate an issue filed with the given property. * * @param issueId ID of the issue to escalate. * @return User that was assigned issue as a result of the escalation, if any */ Optional<User> escalate(IssueId issueId, Contact contact); /** * Returns whether there exists an issue with an exactly matching summary. * * @param issue The summary of the issue. * @return Whether the issue exists. */ boolean issueExists(Issue issue); /** * Returns information about project identified by the project key * * @param projectKey The project key to find information for * @return Project info for project * @throws RuntimeException exception if project not found */ ProjectInfo projectInfo(String projectKey); }
controller-api/src/main/java/com/yahoo/vespa/hosted/controller/api/integration/organization/IssueHandler.java
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.hosted.controller.api.integration.organization; import com.yahoo.slime.Inspector; import java.time.Duration; import java.util.Optional; /** * @author jonmv */ public interface IssueHandler { /** * File an issue with its given property or the default, and with the specific assignee, if present. * * @param issue The issue to file. * @return ID of the created issue. */ IssueId file(Issue issue); /** * Returns the ID of this issue, if it exists and is open, based on a similarity search. * * @param issue The issue to search for; relevant fields are the summary and the owner (propertyId). * @return ID of the issue, if it is found. */ Optional<IssueId> findBySimilarity(Issue issue); /** * Update the description of the issue with the given ID. * * @param issueId ID of the issue to comment on. * @param description The updated description. */ void update(IssueId issueId, String description); /** * Add a comment to the issue with the given ID. * * @param issueId ID of the issue to comment on. * @param comment The comment to add. */ void commentOn(IssueId issueId, String comment); /** * Returns whether the issue is still under investigation. * * @param issueId ID of the issue to examine. * @return Whether the given issue is under investigation. */ boolean isOpen(IssueId issueId); /** * Returns whether there has been significant activity on the issue within the given duration. * * @param issueId ID of the issue to examine. * @return Whether the given issue is actively worked on. */ boolean isActive(IssueId issueId, Duration maxInactivity); /** * Returns the user assigned to the given issue, if any. * * @param issueId ID of the issue for which to find the assignee. * @return The user responsible for fixing the given issue, if found. */ Optional<User> assigneeOf(IssueId issueId); /** * Reassign the issue with the given ID to the given user, and returns the outcome of this. * * @param issueId ID of the issue to be reassigned. * @param assignee User to which the issue shall be assigned. * @return Whether the reassignment was successful. */ boolean reassign(IssueId issueId, User assignee); /** * Reassign the issue with the given ID to the given user, and returns the outcome of this. * * @param issueId ID of the issue to be watched. * @param watcher watcher to add to the issue. * @return Whether adding the watcher was successful. */ boolean addWatcher(IssueId issueId, String watcher); /** * Escalate an issue filed with the given property. * * @param issueId ID of the issue to escalate. * @return User that was assigned issue as a result of the escalation, if any */ Optional<User> escalate(IssueId issueId, Contact contact); /** * Returns whether there exists an issue with an exactly matching summary. * * @param issue The summary of the issue. * @return Whether the issue exists. */ boolean issueExists(Issue issue); /** * Returns information about project identified by the project key * * @param projectKey The project key to find information for * @return Project info for project * @throws RuntimeException exception if project not found */ ProjectInfo projectInfo(String projectKey); }
remove stray added dependency
controller-api/src/main/java/com/yahoo/vespa/hosted/controller/api/integration/organization/IssueHandler.java
remove stray added dependency
Java
apache-2.0
7a6d1d8f0205b9e2349790d7f49cc27c8748229b
0
serge-rider/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,Sargul/dbeaver,Sargul/dbeaver,dbeaver/dbeaver,serge-rider/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,dbeaver/dbeaver,Sargul/dbeaver
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2021 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.mssql.model; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.ext.mssql.SQLServerUtils; import org.jkiss.dbeaver.model.exec.jdbc.JDBCPreparedStatement; import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSet; import org.jkiss.dbeaver.model.exec.jdbc.JDBCSession; import org.jkiss.dbeaver.model.exec.jdbc.JDBCStatement; import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCObjectLookupCache; import java.sql.SQLException; public class SQLServerExtendedPropertyCache extends JDBCObjectLookupCache<SQLServerExtendedPropertyOwner, SQLServerExtendedProperty> { @NotNull @Override public JDBCStatement prepareLookupStatement(@NotNull JDBCSession session, @NotNull SQLServerExtendedPropertyOwner owner, @Nullable SQLServerExtendedProperty object, @Nullable String objectName) throws SQLException { JDBCPreparedStatement dbStat = session.prepareStatement( "SELECT *, TYPE_ID(CAST(SQL_VARIANT_PROPERTY(value, 'BaseType') as nvarchar)) AS value_type" + " FROM " + SQLServerUtils.getExtendedPropsTableName(owner.getDatabase()) + " WHERE major_id=? AND minor_id=? AND class=? ORDER BY minor_id" ); dbStat.setLong(1, owner.getMajorObjectId()); dbStat.setLong(2, owner.getMinorObjectId()); dbStat.setLong(3, owner.getExtendedPropertyObjectClass().getClassId()); return dbStat; } @Nullable @Override protected SQLServerExtendedProperty fetchObject(@NotNull JDBCSession session, @NotNull SQLServerExtendedPropertyOwner owner, @NotNull JDBCResultSet resultSet) throws SQLException, DBException { return new SQLServerExtendedProperty(session.getProgressMonitor(), owner, resultSet); } }
plugins/org.jkiss.dbeaver.ext.mssql/src/org/jkiss/dbeaver/ext/mssql/model/SQLServerExtendedPropertyCache.java
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2021 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.mssql.model; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.ext.mssql.SQLServerUtils; import org.jkiss.dbeaver.model.exec.jdbc.JDBCPreparedStatement; import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSet; import org.jkiss.dbeaver.model.exec.jdbc.JDBCSession; import org.jkiss.dbeaver.model.exec.jdbc.JDBCStatement; import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCObjectLookupCache; import java.sql.SQLException; public class SQLServerExtendedPropertyCache extends JDBCObjectLookupCache<SQLServerExtendedPropertyOwner, SQLServerExtendedProperty> { @NotNull @Override public JDBCStatement prepareLookupStatement(@NotNull JDBCSession session, @NotNull SQLServerExtendedPropertyOwner owner, @Nullable SQLServerExtendedProperty object, @Nullable String objectName) throws SQLException { JDBCPreparedStatement dbStat = session.prepareStatement( "SELECT *, TYPE_ID(CAST(SQL_VARIANT_PROPERTY(value, 'BaseType') as nvarchar)) AS value_type" + " FROM " + SQLServerUtils.getExtendedPropsTableName(owner.getDatabase()) + " WHERE major_id=? AND minor_id=? ORDER BY minor_id" ); dbStat.setLong(1, owner.getMajorObjectId()); dbStat.setLong(2, owner.getMinorObjectId()); return dbStat; } @Nullable @Override protected SQLServerExtendedProperty fetchObject(@NotNull JDBCSession session, @NotNull SQLServerExtendedPropertyOwner owner, @NotNull JDBCResultSet resultSet) throws SQLException, DBException { return new SQLServerExtendedProperty(session.getProgressMonitor(), owner, resultSet); } }
SQL Server: Extended property read fix (associate property's class)
plugins/org.jkiss.dbeaver.ext.mssql/src/org/jkiss/dbeaver/ext/mssql/model/SQLServerExtendedPropertyCache.java
SQL Server: Extended property read fix (associate property's class)
Java
apache-2.0
99b18323d1cf85c0de7394e9fb1eddf7a2211913
0
chunlinyao/fop,apache/fop,apache/fop,chunlinyao/fop,chunlinyao/fop,apache/fop,chunlinyao/fop,apache/fop,chunlinyao/fop,apache/fop
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id: $ */ package org.apache.fop.fonts.autodetect; import java.io.IOException; import java.net.URL; import java.net.URLConnection; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.regex.Pattern; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.fop.fonts.CachedFontInfo; import org.apache.fop.fonts.CustomFont; import org.apache.fop.fonts.EmbedFontInfo; import org.apache.fop.fonts.Font; import org.apache.fop.fonts.FontCache; import org.apache.fop.fonts.FontLoader; import org.apache.fop.fonts.FontResolver; import org.apache.fop.fonts.FontTriplet; import org.apache.fop.fonts.FontUtil; /** * Attempts to determine correct FontInfo */ public class FontInfoFinder { /** logging instance */ private Log log = LogFactory.getLog(FontInfoFinder.class); /** * Attempts to determine FontTriplets from a given CustomFont. * It seems to be fairly accurate but will probably require some tweaking over time * * @param customFont CustomFont * @param triplet Collection that will take the generated triplets */ private void generateTripletsFromFont(CustomFont customFont, Collection triplets) { if (log.isTraceEnabled()) { log.trace("Font: " + customFont.getFullName() + ", family: " + customFont.getFamilyNames() + ", PS: " + customFont.getFontName() + ", EmbedName: " + customFont.getEmbedFontName()); } // default style and weight triplet vales (fallback) String strippedName = stripQuotes(customFont.getStrippedFontName()); String subName = customFont.getFontSubName(); String searchName = strippedName.toLowerCase(); if (subName != null) { searchName += subName.toLowerCase(); } String style = guessStyle(customFont, searchName); int weight = FontUtil.guessWeight(searchName); //Full Name usually includes style/weight info so don't use these traits //If we still want to use these traits, we have to make FontInfo.fontLookup() smarter String fullName = stripQuotes(customFont.getFullName()); triplets.add(new FontTriplet(fullName, Font.STYLE_NORMAL, Font.WEIGHT_NORMAL)); if (!fullName.equals(strippedName)) { triplets.add(new FontTriplet(strippedName, Font.STYLE_NORMAL, Font.WEIGHT_NORMAL)); } Set familyNames = customFont.getFamilyNames(); Iterator iter = familyNames.iterator(); while (iter.hasNext()) { String familyName = stripQuotes((String)iter.next()); if (!fullName.equals(familyName)) { triplets.add(new FontTriplet(familyName, style, weight)); } } } private final Pattern quotePattern = Pattern.compile("'"); private String stripQuotes(String name) { return quotePattern.matcher(name).replaceAll(""); } private String guessStyle(CustomFont customFont, String fontName) { // style String style = Font.STYLE_NORMAL; if (customFont.getItalicAngle() > 0) { style = Font.STYLE_ITALIC; } else { style = FontUtil.guessStyle(fontName); } return style; } /** * Attempts to determine FontInfo from a given custom font * @param fontUrl the font URL * @param customFont the custom font * @param fontCache font cache (may be null) * @return */ private EmbedFontInfo fontInfoFromCustomFont( URL fontUrl, CustomFont customFont, FontCache fontCache) { List fontTripletList = new java.util.ArrayList(); generateTripletsFromFont(customFont, fontTripletList); String embedUrl; embedUrl = fontUrl.toExternalForm(); EmbedFontInfo fontInfo = new EmbedFontInfo(null, customFont.isKerningEnabled(), fontTripletList, embedUrl); if (fontCache != null) { fontCache.addFont(fontInfo); } return fontInfo; } /** * Attempts to determine EmbedFontInfo from a given font file. * * @param fontUrl font URL. Assumed to be local. * @param resolver font resolver used to resolve font * @param fontCache font cache (may be null) * @return newly created embed font info */ public EmbedFontInfo find(URL fontUrl, FontResolver resolver, FontCache fontCache) { String embedUrl = null; embedUrl = fontUrl.toExternalForm(); long fileLastModified = -1; if (fontCache != null) { try { URLConnection conn = fontUrl.openConnection(); try { fileLastModified = conn.getLastModified(); } finally { //An InputStream is created even if it's not accessed, but we need to close it. IOUtils.closeQuietly(conn.getInputStream()); } } catch (IOException e) { // Should never happen, because URL must be local log.debug("IOError: " + e.getMessage()); fileLastModified = 0; } // firstly try and fetch it from cache before loading/parsing the font file if (fontCache.containsFont(embedUrl)) { CachedFontInfo fontInfo = fontCache.getFont(embedUrl); if (fontInfo.lastModified() == fileLastModified) { return fontInfo; } else { // out of date cache item fontCache.removeFont(embedUrl); } // is this a previously failed parsed font? } else if (fontCache.isFailedFont(embedUrl, fileLastModified)) { if (log.isDebugEnabled()) { log.debug("Skipping font file that failed to load previously: " + embedUrl); } return null; } } // try to determine triplet information from font file CustomFont customFont = null; try { customFont = FontLoader.loadFont(fontUrl, resolver); } catch (Exception e) { //TODO Too verbose (it's an error but we don't care if some fonts can't be loaded) if (log.isErrorEnabled()) { log.error("Unable to load font file: " + embedUrl + ". Reason: " + e.getMessage()); } if (fontCache != null) { fontCache.registerFailedFont(embedUrl, fileLastModified); } return null; } return fontInfoFromCustomFont(fontUrl, customFont, fontCache); } }
src/java/org/apache/fop/fonts/autodetect/FontInfoFinder.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id: $ */ package org.apache.fop.fonts.autodetect; import java.io.IOException; import java.net.URL; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.fop.fonts.CachedFontInfo; import org.apache.fop.fonts.CustomFont; import org.apache.fop.fonts.EmbedFontInfo; import org.apache.fop.fonts.Font; import org.apache.fop.fonts.FontCache; import org.apache.fop.fonts.FontLoader; import org.apache.fop.fonts.FontResolver; import org.apache.fop.fonts.FontTriplet; import org.apache.fop.fonts.FontUtil; /** * Attempts to determine correct FontInfo */ public class FontInfoFinder { /** logging instance */ private Log log = LogFactory.getLog(FontInfoFinder.class); /** * Attempts to determine FontTriplets from a given CustomFont. * It seems to be fairly accurate but will probably require some tweaking over time * * @param customFont CustomFont * @param triplet Collection that will take the generated triplets */ private void generateTripletsFromFont(CustomFont customFont, Collection triplets) { if (log.isTraceEnabled()) { log.trace("Font: " + customFont.getFullName() + ", family: " + customFont.getFamilyNames() + ", PS: " + customFont.getFontName() + ", EmbedName: " + customFont.getEmbedFontName()); } // default style and weight triplet vales (fallback) String strippedName = customFont.getStrippedFontName(); String subName = customFont.getFontSubName(); String searchName = strippedName.toLowerCase(); if (subName != null) { searchName += subName.toLowerCase(); } String style = guessStyle(customFont, searchName); int weight = FontUtil.guessWeight(searchName); //Full Name usually includes style/weight info so don't use these traits //If we still want to use these traits, we have to make FontInfo.fontLookup() smarter String fullName = customFont.getFullName(); triplets.add(new FontTriplet(fullName, Font.STYLE_NORMAL, Font.WEIGHT_NORMAL)); if (!fullName.equals(strippedName)) { triplets.add(new FontTriplet(strippedName, Font.STYLE_NORMAL, Font.WEIGHT_NORMAL)); } Set familyNames = customFont.getFamilyNames(); Iterator iter = familyNames.iterator(); while (iter.hasNext()) { String familyName = (String)iter.next(); if (!fullName.equals(familyName)) { triplets.add(new FontTriplet(familyName, style, weight)); } } } private String guessStyle(CustomFont customFont, String fontName) { // style String style = Font.STYLE_NORMAL; if (customFont.getItalicAngle() > 0) { style = Font.STYLE_ITALIC; } else { style = FontUtil.guessStyle(fontName); } return style; } /** * Attempts to determine FontInfo from a given custom font * @param fontUrl the font URL * @param customFont the custom font * @param fontCache font cache (may be null) * @return */ private EmbedFontInfo fontInfoFromCustomFont( URL fontUrl, CustomFont customFont, FontCache fontCache) { List fontTripletList = new java.util.ArrayList(); generateTripletsFromFont(customFont, fontTripletList); String embedUrl; embedUrl = fontUrl.toExternalForm(); EmbedFontInfo fontInfo = new EmbedFontInfo(null, customFont.isKerningEnabled(), fontTripletList, embedUrl); if (fontCache != null) { fontCache.addFont(fontInfo); } return fontInfo; } /** * Attempts to determine EmbedFontInfo from a given font file. * * @param fontUrl font URL. Assumed to be local. * @param resolver font resolver used to resolve font * @param fontCache font cache (may be null) * @return newly created embed font info */ public EmbedFontInfo find(URL fontUrl, FontResolver resolver, FontCache fontCache) { String embedUrl = null; embedUrl = fontUrl.toExternalForm(); long fileLastModified = -1; if (fontCache != null) { try { fileLastModified = fontUrl.openConnection().getLastModified(); } catch (IOException e) { // Should never happen, because URL must be local log.debug("IOError: " + e.getMessage()); fileLastModified = 0; } // firstly try and fetch it from cache before loading/parsing the font file if (fontCache.containsFont(embedUrl)) { CachedFontInfo fontInfo = fontCache.getFont(embedUrl); if (fontInfo.lastModified() == fileLastModified) { return fontInfo; } else { // out of date cache item fontCache.removeFont(embedUrl); } // is this a previously failed parsed font? } else if (fontCache.isFailedFont(embedUrl, fileLastModified)) { if (log.isDebugEnabled()) { log.debug("Skipping font file that failed to load previously: " + embedUrl); } return null; } } // try to determine triplet information from font file CustomFont customFont = null; try { customFont = FontLoader.loadFont(fontUrl, resolver); } catch (Exception e) { //TODO Too verbose (it's an error but we don't care if some fonts can't be loaded) if (log.isErrorEnabled()) { log.error("Unable to load font file: " + embedUrl + ". Reason: " + e.getMessage()); } if (fontCache != null) { fontCache.registerFailedFont(embedUrl, fileLastModified); } return null; } return fontInfoFromCustomFont(fontUrl, customFont, fontCache); } }
Filter single quotes from the filenames as this will interfere with font-family parsing. Fixed bug: InputStream opened by URL.openConnection() wasn't closed which leads to "too many open files" when you have lots of fonts on Java 1.4. git-svn-id: 102839466c3b40dd9c7e25c0a1a6d26afc40150a@594516 13f79535-47bb-0310-9956-ffa450edef68
src/java/org/apache/fop/fonts/autodetect/FontInfoFinder.java
Filter single quotes from the filenames as this will interfere with font-family parsing. Fixed bug: InputStream opened by URL.openConnection() wasn't closed which leads to "too many open files" when you have lots of fonts on Java 1.4.
Java
apache-2.0
f3aa7fa702e3418268d06028ba7ca372765160ba
0
rabbitcount/jbpm,MetSystem/jbpm,jesuino/jbpm,lukenjmcd/jbpm,romartin/jbpm,jakubschwan/jbpm,sutaakar/jbpm,psakar/jbpm,droolsjbpm/jbpm,OnePaaS/jbpm,Aaron2000/jbpm,DuncanDoyle/jbpm,nmoghadam/BPIM,nmoghadam/BPIM,ibek/jbpm,domhanak/jbpm,selrahal/jbpm,rabbitcount/jbpm,romartin/jbpm,Salaboy/jbpm,akoskm/jbpm,ifu-lobuntu/jbpm,bxf12315/jbpm,ibek/jbpm,xingguang2013/jbpm-1,jomarko/jbpm,akoskm/jbpm,selrahal/jbpm,lukenjmcd/jbpm,jgoldsmith613/jbpm,sutaakar/jbpm,winklerm/jbpm,jomarko/jbpm,jesuino/jbpm,mrietveld/jbpm,livthomas/jbpm,droolsjbpm/jbpm,DuncanDoyle/jbpm,nmoghadam/jbpm,ifu-lobuntu/jbpm,winklerm/jbpm,pleacu/jbpm,Multi-Support/jbpm,Multi-Support/jbpm,Aaron2000/jbpm,nmoghadam/jbpm,jesuino/jbpm,DuncanDoyle/jbpm,Salaboy/jbpm,OnePaaS/jbpm,OnePaaS/jbpm,livthomas/jbpm,jgoldsmith613/jbpm,karthikprabhu17/jbpm,selrahal/jbpm,jgoldsmith613/jbpm,karthikprabhu17/jbpm,domhanak/jbpm,ifu-lobuntu/jbpm,bxf12315/jbpm,psakar/jbpm,lukenjmcd/jbpm,winklerm/jbpm,ibek/jbpm,bxf12315/jbpm,nmoghadam/BPIM,jakubschwan/jbpm,xingguang2013/jbpm-1,livthomas/jbpm,akoskm/jbpm,jakubschwan/jbpm,rabbitcount/jbpm,pleacu/jbpm,karthikprabhu17/jbpm,droolsjbpm/jbpm,jomarko/jbpm,mrietveld/jbpm,Salaboy/jbpm,MetSystem/jbpm,pleacu/jbpm,MetSystem/jbpm,psakar/jbpm,mrietveld/jbpm,nmoghadam/jbpm,romartin/jbpm,xingguang2013/jbpm-1,Multi-Support/jbpm,Aaron2000/jbpm,sutaakar/jbpm,jomarko/jbpm,romartin/jbpm,domhanak/jbpm,jesuino/jbpm
package org.jbpm.services.task.commands; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import org.kie.api.task.model.TaskSummary; import org.kie.internal.command.Context; @XmlRootElement(name = "get-task-by-groups-item-command") @XmlAccessorType(XmlAccessType.NONE) public class GetTaskAssignedByGroupsCommand extends TaskCommand<List<TaskSummary>> { private static final long serialVersionUID = 6296898155907765061L; public GetTaskAssignedByGroupsCommand() { } public GetTaskAssignedByGroupsCommand(List<String> groupIds) { this.groupsIds = groupIds; } public List<TaskSummary> execute(Context cntxt) { TaskContext context = (TaskContext) cntxt; return context.getTaskQueryService().getTasksAssignedByGroups(groupsIds); } }
jbpm-human-task/jbpm-human-task-core/src/main/java/org/jbpm/services/task/commands/GetTaskAssignedByGroupsCommand.java
package org.jbpm.services.task.commands; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import org.kie.api.task.model.TaskSummary; import org.kie.internal.command.Context; @XmlRootElement(name = "get-task-by-groups-item-command") @XmlAccessorType(XmlAccessType.NONE) public class GetTaskAssignedByGroupsCommand extends TaskCommand<List<TaskSummary>> { private static final long serialVersionUID = 6296898155907765061L; @XmlElement private List<String> groupIds; public GetTaskAssignedByGroupsCommand() { } public GetTaskAssignedByGroupsCommand(List<String> groupIds) { this.groupIds = groupIds; } public List<String> getGroupIds() { return groupIds; } public void setGroupIds(List<String> groupIds) { this.groupIds = groupIds; } public List<TaskSummary> execute(Context cntxt) { TaskContext context = (TaskContext) cntxt; return context.getTaskQueryService().getTasksAssignedByGroups(groupIds); } }
BZ-1085958 - The kie-services-client can not be used with OSGi (jbpm-human-task-core command corrections)
jbpm-human-task/jbpm-human-task-core/src/main/java/org/jbpm/services/task/commands/GetTaskAssignedByGroupsCommand.java
BZ-1085958 - The kie-services-client can not be used with OSGi (jbpm-human-task-core command corrections)
Java
apache-2.0
3d7df10109a05f6db28aba45c469ea507c893ac0
0
tylertreat/Jockey
/* * Copyright (C) 2013 Clarion Media, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.clarionmedia.jockey; import com.clarionmedia.jockey.authentication.Authenticator; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; public final class HttpClientProxy implements InvocationHandler { private Authenticator mAuthenticator; public HttpClientProxy(Authenticator authenticator) { mAuthenticator = authenticator; } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (method.getName().equals("execute") && !mAuthenticator.isAuthenticated()) { mAuthenticator.authenticate(); } return method.invoke(mAuthenticator.getHttpClient(), args); } }
src/main/java/com/clarionmedia/jockey/HttpClientProxy.java
/* * Copyright (C) 2013 Clarion Media, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.clarionmedia.jockey; import com.clarionmedia.jockey.authentication.Authenticator; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; public final class HttpClientProxy implements InvocationHandler { private Authenticator mAuthenticator; public HttpClientProxy(Authenticator authenticator) { mAuthenticator = authenticator; } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (!mAuthenticator.isAuthenticated()) { mAuthenticator.authenticate(); } return method.invoke(mAuthenticator.getHttpClient(), args); } }
Only perform auth checks on HTTP requests
src/main/java/com/clarionmedia/jockey/HttpClientProxy.java
Only perform auth checks on HTTP requests
Java
apache-2.0
0dc9c78293d670b39d3c8cedf5dc8eb3dc913da5
0
bogdansolga/spring-boot-training,bogdansolga/spring-boot-training
package net.safedata.springboot.training.d02.s05.config; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; import org.apache.tomcat.jdbc.pool.DataSource; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.data.jpa.repository.config.EnableJpaRepositories; import org.springframework.transaction.annotation.EnableTransactionManagement; /** * A simple {@link javax.sql.DataSource} configuration, which: * <ul> * <li>configures the JPA repositories, using the {@link EnableJpaRepositories} annotation</li> * <li>configures a custom {@link javax.sql.DataSource}, using the {@link HikariDataSource} class</li> * </ul> * * @author bogdan.solga */ @Configuration @EnableJpaRepositories(basePackages = "net.safedata.springboot.training.d02.s05.repository") @EnableTransactionManagement public class CustomDataSourceConfig { private static final int AVAILABLE_PROCESSORS = Runtime.getRuntime().availableProcessors(); @Value("${spring.datasource.url}") private String url; @Value("${spring.datasource.username}") private String userName; @Value("${spring.datasource.password}") private String password; @Value("${spring.datasource.driver-class-name}") private String driverClassName; @Primary @Bean public javax.sql.DataSource hikariConnectionPool() { final HikariConfig hikariConfig = new HikariConfig(); hikariConfig.setPoolName("hikari-first-connection-pool"); hikariConfig.setMaximumPoolSize(AVAILABLE_PROCESSORS); hikariConfig.setConnectionTimeout(30000); hikariConfig.setIdleTimeout(60000); hikariConfig.setMaxLifetime(120000); hikariConfig.setJdbcUrl(url); hikariConfig.setUsername(userName); hikariConfig.setPassword(password); hikariConfig.setDriverClassName(driverClassName); return new HikariDataSource(hikariConfig); } @Bean public javax.sql.DataSource tomcatConnectionPool() { final DataSource dataSource = new DataSource(); dataSource.setName("tomcat-connection-pool"); dataSource.setUrl(url); dataSource.setUsername(userName); dataSource.setPassword(password); dataSource.setDriverClassName(driverClassName); dataSource.setMinIdle(2); dataSource.setMaxIdle(AVAILABLE_PROCESSORS / 2); dataSource.setMaxActive(AVAILABLE_PROCESSORS * 2); return dataSource; } }
d02/d02s05/d02s05e03-custom-datasource-config/src/main/java/net/safedata/springboot/training/d02/s05/config/CustomDataSourceConfig.java
package net.safedata.springboot.training.d02.s05.config; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; import org.apache.tomcat.jdbc.pool.DataSource; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.data.jpa.repository.config.EnableJpaRepositories; import org.springframework.transaction.annotation.EnableTransactionManagement; /** * A simple {@link javax.sql.DataSource} configuration, which: * <ul> * <li>configures the JPA repositories, using the {@link EnableJpaRepositories} annotation</li> * <li>configures a custom {@link javax.sql.DataSource}, using the {@link HikariDataSource} class</li> * </ul> * * @author bogdan.solga */ @Configuration @EnableJpaRepositories(basePackages = "net.safedata.springboot.training.d02.s05.repository") @EnableTransactionManagement public class CustomDataSourceConfig { private static final int AVAILABLE_PROCESSORS = Runtime.getRuntime().availableProcessors(); @Value("${spring.datasource.url}") private String url; @Value("${spring.datasource.username}") private String userName; @Value("${spring.datasource.password}") private String password; @Value("${spring.datasource.driver-class-name}") private String driverClassName; // if the @Bean is commented, the configured data-source will be wired @Primary @Bean public javax.sql.DataSource hikariConnectionPool() { final HikariConfig hikariConfig = new HikariConfig(); hikariConfig.setPoolName("hikari-connection-pool"); hikariConfig.setMaximumPoolSize(AVAILABLE_PROCESSORS / 2); hikariConfig.setConnectionTimeout(30000); hikariConfig.setIdleTimeout(60000); hikariConfig.setMaxLifetime(120000); hikariConfig.setJdbcUrl(url); hikariConfig.setUsername(userName); hikariConfig.setPassword(password); hikariConfig.setDriverClassName(driverClassName); return new HikariDataSource(hikariConfig); } @Bean public javax.sql.DataSource tomcatConnectionPool() { final DataSource dataSource = new DataSource(); dataSource.setName("tomcat-connection-pool"); dataSource.setUrl(url); dataSource.setUsername(userName); dataSource.setPassword(password); dataSource.setDriverClassName(driverClassName); dataSource.setMinIdle(2); dataSource.setMaxIdle(AVAILABLE_PROCESSORS / 2); dataSource.setMaxActive(AVAILABLE_PROCESSORS * 2); return dataSource; } }
[clean] Removed a comment, improved some params
d02/d02s05/d02s05e03-custom-datasource-config/src/main/java/net/safedata/springboot/training/d02/s05/config/CustomDataSourceConfig.java
[clean] Removed a comment, improved some params
Java
apache-2.0
ca48326ad78dfe0c1216d81d4a4f3536b70faaff
0
berinle/jawr-core,berinle/jawr-core
/** * Copyright 2007-2011 Jordi Hernndez Sells, Ibrahim Chaehoi * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package net.jawr.web.resource.bundle.factory; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import net.jawr.web.JawrConstant; import net.jawr.web.config.JawrConfig; import net.jawr.web.exception.BundleDependencyException; import net.jawr.web.exception.BundlingProcessException; import net.jawr.web.exception.DuplicateBundlePathException; import net.jawr.web.resource.FileNameUtils; import net.jawr.web.resource.bundle.CompositeResourceBundle; import net.jawr.web.resource.bundle.InclusionPattern; import net.jawr.web.resource.bundle.JoinableResourceBundle; import net.jawr.web.resource.bundle.JoinableResourceBundleImpl; import net.jawr.web.resource.bundle.factory.global.postprocessor.BasicGlobalPostprocessorChainFactory; import net.jawr.web.resource.bundle.factory.global.postprocessor.GlobalPostProcessingContext; import net.jawr.web.resource.bundle.factory.global.postprocessor.GlobalPostprocessorChainFactory; import net.jawr.web.resource.bundle.factory.global.preprocessor.BasicGlobalPreprocessorChainFactory; import net.jawr.web.resource.bundle.factory.global.preprocessor.GlobalPreprocessingContext; import net.jawr.web.resource.bundle.factory.global.preprocessor.GlobalPreprocessorChainFactory; import net.jawr.web.resource.bundle.factory.mapper.OrphanResourceBundlesMapper; import net.jawr.web.resource.bundle.factory.mapper.ResourceBundleDirMapper; import net.jawr.web.resource.bundle.factory.postprocessor.CSSPostProcessorChainFactory; import net.jawr.web.resource.bundle.factory.postprocessor.JSPostProcessorChainFactory; import net.jawr.web.resource.bundle.factory.postprocessor.PostProcessorChainFactory; import net.jawr.web.resource.bundle.factory.util.PathNormalizer; import net.jawr.web.resource.bundle.factory.util.ResourceBundleDefinition; import net.jawr.web.resource.bundle.global.processor.GlobalProcessor; import net.jawr.web.resource.bundle.handler.CachedResourceBundlesHandler; import net.jawr.web.resource.bundle.handler.ResourceBundlesHandler; import net.jawr.web.resource.bundle.handler.ResourceBundlesHandlerImpl; import net.jawr.web.resource.bundle.postprocess.ResourceBundlePostProcessor; import net.jawr.web.resource.handler.bundle.ResourceBundleHandler; import net.jawr.web.resource.handler.reader.ResourceReaderHandler; import org.apache.log4j.Logger; /** * Factory to create a ResourceBundlesHandler as per configuration options set by the user. * * @author Jordi Hernndez Sells * @author Ibrahim Chaehoi * */ public class BundlesHandlerFactory { /** The logger */ private static final Logger LOGGER = Logger .getLogger(BundlesHandlerFactory.class); /** The flag indicating if we should use the in memory cache */ private boolean useInMemoryCache = true; /** The root directory for the resources */ private String baseDir = ""; /** The resource type */ private String resourceType; /** The file extension */ private String fileExtension; /** The keys of the global post processors */ private String globalPostProcessorKeys; /** The keys of the unitary post processors */ private String unitPostProcessorKeys; /** The keys of the global post composite processors */ private String globalCompositePostProcessorKeys; /** The keys of the unitary post composite processors */ private String unitCompositePostProcessorKeys; /** The keys of the resource type preprocessors */ private String resourceTypePreprocessorKeys; /** The keys of the resource type postprocessors */ private String resourceTypePostprocessorKeys; /** The set of bundle definitions */ private Set<ResourceBundleDefinition> bundleDefinitions; /** The set of bundle definitions with dependencies */ private Set<ResourceBundleDefinition> bundleDefinitionsWithDependencies; /** The resource handler */ private ResourceReaderHandler resourceReaderHandler; /** The resource bundle handler */ private ResourceBundleHandler resourceBundleHandler; /** The post processor chain factory */ private PostProcessorChainFactory chainFactory; /** The global preprocessor chain factory */ private GlobalPreprocessorChainFactory resourceTypePreprocessorChainFactory; /** The global postprocessor chain factory */ private GlobalPostprocessorChainFactory resourceTypePostprocessorChainFactory; /** The flag indicating if we should use a single resource factory for the orphans resource of the base directory */ private boolean useSingleResourceFactory = false; /** The file name for the single file bundle for orphans */ private String singleFileBundleName; /** The flag indicating if we should use the directory mapper to define the resource bundles */ private boolean useDirMapperFactory = false; /** The set of directory to exclude from the directory mapper factory */ private Set<String> excludedDirMapperDirs; /** The jawr config */ private JawrConfig jawrConfig; /** The map of custom post processor */ private Map<String, String> customPostprocessors; /** The map of custom global pre processor */ private Map<String, String> customGlobalPreprocessors; /** The map of custom global post processor */ private Map<String, String> customGlobalPostprocessors; /** The flag indicating if we should skip the scan for the orphans */ private boolean scanForOrphans = true; /** * Build a ResourceBundlesHandler. Must be invoked after setting at least the ResourceHandler. * * @param jawrConfig the jawr config * @return the resource bundles handler * @throws DuplicateBundlePathException if two bundles are defined with the same path * @throws BundleDependencyException if an error exists in the dependency definition */ public ResourceBundlesHandler buildResourceBundlesHandler() throws DuplicateBundlePathException, BundleDependencyException { if (LOGGER.isInfoEnabled()) LOGGER.info("Building resources handler... "); // Ensure state is correct if (null == jawrConfig) throw new IllegalStateException( "Must set the JawrConfig for this factory before invoking buildResourceBundlesHandler(). "); if (null == resourceReaderHandler) throw new IllegalStateException( "Must set the resourceHandler for this factory before invoking buildResourceBundlesHandler(). "); if (useSingleResourceFactory && null == singleFileBundleName) throw new IllegalStateException( "Must set the singleFileBundleName when useSingleResourceFactory is set to true. Please check the documentation. "); // Initialize custom postprocessors before using the factory to build the postprocessing chains if (null != customPostprocessors) chainFactory.setCustomPostprocessors(customPostprocessors); // List of bundles List<JoinableResourceBundle> resourceBundles = new ArrayList<JoinableResourceBundle>(); boolean processBundle = !jawrConfig.getUseBundleMapping() || !resourceBundleHandler.isExistingMappingFile(); if (processBundle) { initResourceBundles(resourceBundles); } else { initResourceBundlesFromFullMapping(resourceBundles); } // Build the postprocessor for bundles ResourceBundlePostProcessor processor = null; if (null == this.globalPostProcessorKeys){ processor = this.chainFactory.buildDefaultProcessorChain(); }else{ processor = this.chainFactory .buildPostProcessorChain(globalPostProcessorKeys); } // Build the postprocessor to use on resources before adding them to the bundle. ResourceBundlePostProcessor unitProcessor = null; if (null == this.unitPostProcessorKeys){ unitProcessor = this.chainFactory.buildDefaultUnitProcessorChain(); }else{ unitProcessor = this.chainFactory .buildPostProcessorChain(unitPostProcessorKeys); } // Build the postprocessor for bundles ResourceBundlePostProcessor compositeBundleProcessor = null; if (null == this.globalCompositePostProcessorKeys){ compositeBundleProcessor = this.chainFactory.buildDefaultCompositeProcessorChain(); }else{ compositeBundleProcessor = this.chainFactory .buildPostProcessorChain(globalCompositePostProcessorKeys); } // Build the postprocessor to use on resources before adding them to the bundle. ResourceBundlePostProcessor compositeUnitProcessor = null; if (null == this.unitCompositePostProcessorKeys){ compositeUnitProcessor = this.chainFactory.buildDefaultUnitCompositeProcessorChain(); }else{ compositeUnitProcessor = this.chainFactory .buildPostProcessorChain(unitCompositePostProcessorKeys); } // Build the resource type global preprocessor to use on resources. // Initialize custom preprocessors before using the factory to build the preprocessing chains if (null != customGlobalPreprocessors) resourceTypePreprocessorChainFactory.setCustomGlobalProcessors(customGlobalPreprocessors); GlobalProcessor<GlobalPreprocessingContext> resourceTypePreprocessor = null; if (null == this.resourceTypePreprocessorKeys) resourceTypePreprocessor = this.resourceTypePreprocessorChainFactory.buildDefaultProcessorChain(); else resourceTypePreprocessor = this.resourceTypePreprocessorChainFactory .buildProcessorChain(resourceTypePreprocessorKeys); // Build the resource type global postprocessor to use on resources. // Initialize custom postprocessors before using the factory to build the postprocessing chains if (null != customGlobalPostprocessors) resourceTypePreprocessorChainFactory.setCustomGlobalProcessors(customGlobalPostprocessors); GlobalProcessor<GlobalPostProcessingContext> resourceTypePostprocessor = null; if (null == this.resourceTypePostprocessorKeys) resourceTypePostprocessor = this.resourceTypePostprocessorChainFactory.buildDefaultProcessorChain(); else resourceTypePostprocessor = this.resourceTypePostprocessorChainFactory .buildProcessorChain(resourceTypePostprocessorKeys); // Build the handler ResourceBundlesHandler collector = new ResourceBundlesHandlerImpl( resourceBundles, resourceReaderHandler, resourceBundleHandler, jawrConfig, processor, unitProcessor, compositeBundleProcessor, compositeUnitProcessor, resourceTypePreprocessor, resourceTypePostprocessor); // Use the cached proxy if specified when debug mode is off. if (useInMemoryCache && !jawrConfig.isDebugModeOn()) collector = new CachedResourceBundlesHandler(collector); collector.initAllBundles(); return collector; } /** * Initialize the resource bundles from the mapping file */ private void initResourceBundlesFromFullMapping(List<JoinableResourceBundle> resourceBundles) { if (LOGGER.isInfoEnabled()){ LOGGER.info("Building bundles from the full bundle mapping. The bundles will not be processed."); } Properties mappingProperties = resourceBundleHandler.getJawrBundleMapping(); FullMappingPropertiesBasedBundlesHandlerFactory factory = new FullMappingPropertiesBasedBundlesHandlerFactory(resourceType, resourceReaderHandler, jawrConfig.getGeneratorRegistry(), chainFactory); resourceBundles.addAll(factory.getResourceBundles(mappingProperties)); } /** * Initialize the resource bundles * * @param resourceBundles the resource bundles * @throws DuplicateBundlePathException if two bundles are defined with the same path * @throws BundleDependencyException if an error exists in the dependency definition */ private void initResourceBundles(List<JoinableResourceBundle> resourceBundles) throws DuplicateBundlePathException, BundleDependencyException { // Create custom defined bundles bundleDefinitionsWithDependencies = new HashSet<ResourceBundleDefinition>(); if (null != bundleDefinitions) { if (LOGGER.isInfoEnabled()) LOGGER.info("Adding custom bundle definitions. "); for (Iterator<ResourceBundleDefinition> it = bundleDefinitions.iterator(); it.hasNext();) { ResourceBundleDefinition def = it .next(); // If this is a composite bundle if (def.isComposite()) { List<JoinableResourceBundle> childBundles = new ArrayList<JoinableResourceBundle>(); for (Iterator<ResourceBundleDefinition> childIterator = def.getChildren().iterator(); childIterator .hasNext();) { ResourceBundleDefinition child = childIterator .next(); childBundles.add(buildResourcebundle(child)); } resourceBundles.add(buildCompositeResourcebundle(def, childBundles)); } else resourceBundles.add(buildResourcebundle(def)); } } // Use the dirmapper if specified if (useDirMapperFactory) { if (LOGGER.isInfoEnabled()) LOGGER.info("Using ResourceBundleDirMapper. "); ResourceBundleDirMapper dirFactory = new ResourceBundleDirMapper( baseDir, resourceReaderHandler, resourceBundles, fileExtension, excludedDirMapperDirs); Map<String, String> mappings = dirFactory.getBundleMapping(); for (Iterator<Entry<String, String>> it = mappings.entrySet().iterator(); it.hasNext();) { Entry<String, String> entry = it.next(); resourceBundles.add(buildDirMappedResourceBundle(entry.getKey(), entry.getValue())); } } if (this.scanForOrphans) { // Add all orphan bundles OrphanResourceBundlesMapper orphanFactory = new OrphanResourceBundlesMapper( baseDir, resourceReaderHandler, jawrConfig.getGeneratorRegistry(), resourceBundles, fileExtension); List<String> orphans = orphanFactory.getOrphansList(); // Orphans may be added separately or as one single resource bundle. if (useSingleResourceFactory) { // Add extension to the filename if (!singleFileBundleName.endsWith(fileExtension)) singleFileBundleName += fileExtension; if (LOGGER.isInfoEnabled()) LOGGER .info("Building bundle of orphan resources with the name: " + singleFileBundleName); resourceBundles.add(buildOrphansResourceBundle( singleFileBundleName, orphans)); } else { if (LOGGER.isInfoEnabled()) LOGGER.info("Creating mappings for orphan resources. "); for (Iterator<String> it = orphans.iterator(); it.hasNext();) { resourceBundles.add(buildOrphanResourceBundle(it.next())); } } } else if (LOGGER.isDebugEnabled()) { LOGGER.debug("Skipping orphan file auto processing. "); if ("".equals(jawrConfig.getServletMapping())) LOGGER .debug("Note that there is no specified mapping for Jawr " + "(it has been seet to serve *.js or *.css requests). " + "The orphan files will become unreachable through the server."); } // Initialize bundle dependencies for (Iterator<ResourceBundleDefinition> iterator = bundleDefinitionsWithDependencies.iterator(); iterator.hasNext();) { ResourceBundleDefinition definition = iterator.next(); JoinableResourceBundle bundle = getBundleFromName(definition.getBundleName(), resourceBundles); if(bundle != null){ bundle.setDependencies(getBundleDependencies(definition, resourceBundles)); } } } /** * Returns a bundle from its name * @param name the bundle name * @param bundles the list of bundle * @return a bundle from its name */ private JoinableResourceBundle getBundleFromName(String name, List<JoinableResourceBundle> bundles){ JoinableResourceBundle bundle = null; for (Iterator<JoinableResourceBundle> iterator = bundles.iterator(); iterator.hasNext();) { JoinableResourceBundle aBundle = iterator.next(); if(aBundle.getName().equals(name)){ bundle = aBundle; break; } } return bundle; } /** * Build a Composite resource bundle using a ResourceBundleDefinition * * @param definition the bundle definition * @param childBundles the list of child bundles * @return a Composite resource bundle */ private JoinableResourceBundle buildCompositeResourcebundle( ResourceBundleDefinition definition, List<JoinableResourceBundle> childBundles) { if (LOGGER.isDebugEnabled()) LOGGER.debug("Init composite bundle with id:" + definition.getBundleId()); validateBundleId(definition); InclusionPattern include = new InclusionPattern(definition.isGlobal(), definition.getInclusionOrder(), definition.isDebugOnly(), definition.isDebugNever()); CompositeResourceBundle composite = new CompositeResourceBundle( definition.getBundleId(), definition.getBundleName(), childBundles, include, resourceReaderHandler, fileExtension, jawrConfig); if (null != definition.getBundlePostProcessorKeys()) composite.setBundlePostProcessor(chainFactory .buildPostProcessorChain(definition .getBundlePostProcessorKeys())); if (null != definition.getUnitaryPostProcessorKeys()) composite.setUnitaryPostProcessor(chainFactory .buildPostProcessorChain(definition .getUnitaryPostProcessorKeys())); if (null != definition.getIeConditionalExpression()) composite.setExplorerConditionalExpression(definition .getIeConditionalExpression()); if (null != definition.getAlternateProductionURL()) composite.setAlternateProductionURL(definition .getAlternateProductionURL()); if (null != definition.getVariants()) composite.setVariants(definition .getVariants()); if (null != definition.getDependencies() && !definition.getDependencies().isEmpty()) bundleDefinitionsWithDependencies.add(definition); return composite; } /** * Build a JoinableResourceBundle using a ResourceBundleDefinition * * @param definition the resource bundle definition * @return a JoinableResourceBundle * @throws BundleDependencyException if an error exists in the dependency definition */ private JoinableResourceBundle buildResourcebundle( ResourceBundleDefinition definition) throws BundleDependencyException { if (LOGGER.isDebugEnabled()) LOGGER.debug("Init bundle with id:" + definition.getBundleId()); validateBundleId(definition); InclusionPattern include = new InclusionPattern(definition.isGlobal(), definition.getInclusionOrder(), definition.isDebugOnly(), definition.isDebugNever()); JoinableResourceBundleImpl newBundle = new JoinableResourceBundleImpl( definition.getBundleId(), definition.getBundleName(), fileExtension, include, definition.getMappings(), resourceReaderHandler, jawrConfig.getGeneratorRegistry()); if (null != definition.getBundlePostProcessorKeys()) newBundle.setBundlePostProcessor(chainFactory .buildPostProcessorChain(definition .getBundlePostProcessorKeys())); if (null != definition.getUnitaryPostProcessorKeys()) newBundle.setUnitaryPostProcessor(chainFactory .buildPostProcessorChain(definition .getUnitaryPostProcessorKeys())); if (null != definition.getIeConditionalExpression()) newBundle.setExplorerConditionalExpression(definition .getIeConditionalExpression()); if (null != definition.getVariants()) newBundle.setVariants(definition .getVariants()); if (null != definition.getAlternateProductionURL()) newBundle.setAlternateProductionURL(definition .getAlternateProductionURL()); if (null != definition.getDependencies() && !definition.getDependencies().isEmpty()){ bundleDefinitionsWithDependencies.add(definition); } return newBundle; } /** * Validates the bundle ID * @param definition the bundle ID * @throws a BundlingProcessException if the bundle ID is not valid */ private void validateBundleId(ResourceBundleDefinition definition) { String bundleId = definition.getBundleId(); if(bundleId != null){ if(!bundleId.endsWith(fileExtension)){ throw new BundlingProcessException("The extension of the bundle "+definition.getBundleName()+" - "+bundleId+" doesn't match the allowed extension : '"+fileExtension+"'. Please update your bundle definition."); }else if(bundleId.startsWith(JawrConstant.WEB_INF_DIR_PREFIX) || bundleId.startsWith(JawrConstant.META_INF_DIR_PREFIX)){ throw new BundlingProcessException("For the bundle "+definition.getBundleName()+", the bundle id '"+bundleId+"' is not allowed because it starts with \"/WEB-INF/\". Please update your bundle definition."); } } } /** * Returns the bundle dependencies from the resource bundle definition * * @param definition the resource definition * @param bundles the list of bundles * * @throws BundleDependencyException if an error exists in the dependency definition */ private List<JoinableResourceBundle> getBundleDependencies(ResourceBundleDefinition definition, List<JoinableResourceBundle> bundles) throws BundleDependencyException { List<JoinableResourceBundle> dependencies = new ArrayList<JoinableResourceBundle>(); List<String> processedBundles = new ArrayList<String>(); if(definition.isGlobal() && definition.getDependencies() != null && !definition.getDependencies().isEmpty()){ throw new BundleDependencyException(definition.getBundleName(), "The dependencies property is not allowed for global bundles. Please use the order property " + "to define the import order."); } initBundleDependencies(definition.getBundleName(), definition, dependencies, processedBundles, bundles); return dependencies; } /** * Initialize the bundle dependencies * * @param rootBundleDefinition the name of the bundle, whose is initalized * @param definition the current resource bundle definition * @param bundleDependencies the bundle dependencies * @param processedBundles the list of bundles already processed during the dependency resolution * @param bundles the list of reference bundles * * @throws BundleDependencyException if an error exists in the dependency definition */ private void initBundleDependencies(String rootBundleDefinition, ResourceBundleDefinition definition, List<JoinableResourceBundle> bundleDependencies, List<String> processedBundles, List<JoinableResourceBundle> bundles) throws BundleDependencyException { List<String> bundleDefDependencies = definition.getDependencies(); if(definition.isGlobal()){ if(LOGGER.isInfoEnabled()){ LOGGER.info("The global bundle '"+definition.getBundleName()+"' belongs to the dependencies of '"+rootBundleDefinition+"'." + "As it's a global bundle, it will not be defined as part of the dependencies."); } return; } if (bundleDefDependencies != null && !bundleDefDependencies.isEmpty()) { if (processedBundles.contains(definition.getBundleName())) { throw new BundleDependencyException(rootBundleDefinition, "There is a circular dependency. The bundle in conflict is '"+definition.getBundleName()+"'"); } else { processedBundles.add(definition.getBundleName()); for (Iterator<String> iterator = bundleDefDependencies.iterator(); iterator .hasNext();) { String dependency = iterator.next(); for (Iterator<ResourceBundleDefinition> itDep = bundleDefinitions.iterator(); itDep .hasNext();) { ResourceBundleDefinition dependencyBundle = itDep.next(); String dependencyBundleName = dependencyBundle.getBundleName(); if (dependencyBundleName.equals(dependency)) { if (!bundleDependencies.contains(dependencyBundleName)){ if(!processedBundles.contains(dependencyBundleName)) { initBundleDependencies(rootBundleDefinition, dependencyBundle, bundleDependencies, processedBundles, bundles); bundleDependencies.add(getBundleFromName(dependencyBundleName, bundles)); }else{ throw new BundleDependencyException(rootBundleDefinition, "There is a circular dependency. The bundle in conflict is '"+dependencyBundleName+"'"); } } else { if(LOGGER.isInfoEnabled()){ LOGGER.info("The bundle '" + dependencyBundle.getBundleId() + "' occurs multiple time in the dependencies hierarchy of the bundle '"+rootBundleDefinition+"'."); } } } } } } } } /** * Build a bundle based on a mapping returned by the ResourceBundleDirMapperFactory. * * @param bundleId the bundle Id * @param pathMapping the path mapping * @return a bundle based on a mapping returned by the ResourceBundleDirMapperFactory */ private JoinableResourceBundle buildDirMappedResourceBundle( String bundleId, String pathMapping) { List<String> path = Collections.singletonList(pathMapping); JoinableResourceBundle newBundle = new JoinableResourceBundleImpl( bundleId, generateBundleNameFromBundleId(bundleId), fileExtension, new InclusionPattern(), path, resourceReaderHandler, jawrConfig.getGeneratorRegistry()); return newBundle; } /** * Generates the bundle ID from the bundle name * * @param bundleId the bundle name * @return the generated bundle ID */ private String generateBundleNameFromBundleId(String bundleId) { String bundleName = bundleId; if(bundleName.startsWith("/")){ bundleName = bundleName.substring(1); } int idxExtension = FileNameUtils.indexOfExtension(bundleName); if(idxExtension != -1){ bundleName = bundleName.substring(0, idxExtension); } return bundleName.replaceAll("(/|\\.)", "_"); } /** * Builds a single bundle containing all the paths specified. Useful to make a single bundle out of every resource that is orphan after processing * config definitions. * * @param bundleId the bundle Id * @param orphanPaths the orphan paths * @return a single bundle containing all the paths specified */ private JoinableResourceBundle buildOrphansResourceBundle( String bundleId, List<String> orphanPaths) { JoinableResourceBundle newBundle = new JoinableResourceBundleImpl( bundleId, generateBundleNameFromBundleId(bundleId), fileExtension, new InclusionPattern(), orphanPaths, resourceReaderHandler, jawrConfig.getGeneratorRegistry()); return newBundle; } /** * Build a non-global, single-file resource bundle for orphans. * * @param orphanPath the path * @return a non-global, single-file resource bundle for orphans. */ private JoinableResourceBundle buildOrphanResourceBundle(String orphanPath) { String mapping = orphanPath; List<String> paths = Collections.singletonList(mapping); JoinableResourceBundle newBundle = new JoinableResourceBundleImpl( orphanPath, generateBundleNameFromBundleId(orphanPath), fileExtension, new InclusionPattern(), paths, resourceReaderHandler, jawrConfig.getGeneratorRegistry()); return newBundle; } /** * Set the type of bundle (js or css) to use for this factory. * * @param resourceType the resource type */ public void setBundlesType(String resourceType) { // Set the extension for resources and bundles this.resourceType = resourceType; this.fileExtension = "." + resourceType.toLowerCase(); this.resourceTypePreprocessorChainFactory = new BasicGlobalPreprocessorChainFactory(); this.resourceTypePostprocessorChainFactory = new BasicGlobalPostprocessorChainFactory(); // Create the chain factory. if ("js".equals(resourceType)) this.chainFactory = new JSPostProcessorChainFactory(); else this.chainFactory = new CSSPostProcessorChainFactory(); } /** * Set the custom bundle definitions to use. * * @param bundleDefinitions the set of bundle definitions */ public void setBundleDefinitions(Set<ResourceBundleDefinition> bundleDefinitions) { this.bundleDefinitions = bundleDefinitions; } /** * Set the base dir from which to fetch the resources. * * @param baseDir the base directory to set */ public void setBaseDir(String baseDir) { this.baseDir = PathNormalizer.asDirPath(baseDir); } /** * Set the keys to pass to the postprocessor factory upon processors creation. If none specified, the default version is used. * * @param globalPostProcessorKeys String Comma separated list of processor keys. */ public void setGlobalPostProcessorKeys(String globalPostProcessorKeys) { this.globalPostProcessorKeys = globalPostProcessorKeys; } /** * Set the keys to pass to the postprocessor factory upon unitary processors creation. If none specified, the default version is used. * * @param unitPostProcessorKeys String Comma separated list of processor keys. */ public void setUnitPostProcessorKeys(String unitPostProcessorKeys) { this.unitPostProcessorKeys = unitPostProcessorKeys; } /** * Sets the postprocessor keys for composite bundle * @param globalCompositePostProcessorKeys Comma separated list of processor keys. */ public void setGlobalCompositePostProcessorKeys( String globalCompositePostProcessorKeys) { this.globalCompositePostProcessorKeys = globalCompositePostProcessorKeys; } /** * Sets the unitary postprocessor keys for composite bundle * @param globalCompositePostProcessorKeys Comma separated list of processor keys. */ public void setUnitCompositePostProcessorKeys( String unitCompositePostProcessorKeys) { this.unitCompositePostProcessorKeys = unitCompositePostProcessorKeys; } /** * Set the keys to pass to the preprocessor factory upon global preprocessors creation. If none specified, the default version is used. * * @param resourceTypePreprocessorKeys String Comma separated list of preprocessor keys. */ public void setResourceTypePreprocessorKeys(String resourceTypePreprocessorKeys) { this.resourceTypePreprocessorKeys = resourceTypePreprocessorKeys; } /** * Set the keys to pass to the postprocessor factory upon global postprocessors creation. If none specified, the default version is used. * * @param resourceTypePostprocessorKeys String Comma separated list of processor keys. */ public void setResourceTypePostprocessorKeys(String resourceTypePostprocessorKeys) { this.resourceTypePostprocessorKeys = resourceTypePostprocessorKeys; } /** * Set the resource handler to use for file access. * * @param rsHandler */ public void setResourceReaderHandler(ResourceReaderHandler rsHandler) { this.resourceReaderHandler = rsHandler; } /** * Set the resource bundle handler to use for file access. * * @param rsBundleHandler */ public void setResourceBundleHandler(ResourceBundleHandler rsBundleHandler) { this.resourceBundleHandler = rsBundleHandler; } /** * Set wether resoures not specifically mapped to any bundle should be joined together in a single bundle, or served separately. * * @param useSingleResourceFactory boolean If true, bundles are joined together. In that case, the singleFileBundleName must be set as well. */ public void setUseSingleResourceFactory(boolean useSingleResourceFactory) { this.useSingleResourceFactory = useSingleResourceFactory; } /** * Set the name for the joint orphans bundle. Must be set when useSingleResourceFactory is true. * * @param singleFileBundleName */ public void setSingleFileBundleName(String singleFileBundleName) { if (null != singleFileBundleName) this.singleFileBundleName = PathNormalizer .normalizePath(singleFileBundleName); } /** * If true, the mapper factory that creates bundles from all directories under baseDir will be used. * * @param useDirMapperFactory */ public void setUseDirMapperFactory(boolean useDirMapperFactory) { this.useDirMapperFactory = useDirMapperFactory; } /** * Set wether bundles will be cached in memory instead of being always read from the filesystem. * * @param useInMemoryCache */ public void setUseInMemoryCache(boolean useInMemoryCache) { this.useInMemoryCache = useInMemoryCache; } /** * Sets the paths to exclude when using the dirMapper. * * @param excludedDirMapperDirs */ public void setExludedDirMapperDirs(Set<String> exludedDirMapperDirs) { if (null != exludedDirMapperDirs) this.excludedDirMapperDirs = PathNormalizer .normalizePaths(exludedDirMapperDirs); } /** * Sets the Jawr configuration * @param jawrConfig the configuration to set */ public void setJawrConfig(JawrConfig jawrConfig) { this.jawrConfig = jawrConfig; } /** * Sets the map of custom post processor * @param customPostprocessors the map to set */ public void setCustomPostprocessors(Map<String, String> customPostprocessors) { this.customPostprocessors = customPostprocessors; } /** * Sets the map of custom global preprocessor * @param customGlobalPreprocessors the map to set */ public void setCustomGlobalPreprocessors(Map<String, String> customGlobalPreprocessors) { this.customGlobalPreprocessors = customGlobalPreprocessors; } /** * Sets the map of custom global preprocessor * @param customGlobalPreprocessors the map to set */ public void setCustomGlobalPostprocessors(Map<String, String> customGlobalPostprocessors) { this.customGlobalPostprocessors = customGlobalPostprocessors; } /** * Sets the flag indicating if we should scan or not for the orphan resources * @param scanForOrphans the flag to set */ public void setScanForOrphans(boolean scanForOrphans) { this.scanForOrphans = scanForOrphans; } }
src/main/java/net/jawr/web/resource/bundle/factory/BundlesHandlerFactory.java
/** * Copyright 2007-2011 Jordi Hernndez Sells, Ibrahim Chaehoi * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package net.jawr.web.resource.bundle.factory; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import net.jawr.web.JawrConstant; import net.jawr.web.config.JawrConfig; import net.jawr.web.exception.BundleDependencyException; import net.jawr.web.exception.BundlingProcessException; import net.jawr.web.exception.DuplicateBundlePathException; import net.jawr.web.resource.FileNameUtils; import net.jawr.web.resource.bundle.CompositeResourceBundle; import net.jawr.web.resource.bundle.InclusionPattern; import net.jawr.web.resource.bundle.JoinableResourceBundle; import net.jawr.web.resource.bundle.JoinableResourceBundleImpl; import net.jawr.web.resource.bundle.factory.global.postprocessor.BasicGlobalPostprocessorChainFactory; import net.jawr.web.resource.bundle.factory.global.postprocessor.GlobalPostProcessingContext; import net.jawr.web.resource.bundle.factory.global.postprocessor.GlobalPostprocessorChainFactory; import net.jawr.web.resource.bundle.factory.global.preprocessor.BasicGlobalPreprocessorChainFactory; import net.jawr.web.resource.bundle.factory.global.preprocessor.GlobalPreprocessingContext; import net.jawr.web.resource.bundle.factory.global.preprocessor.GlobalPreprocessorChainFactory; import net.jawr.web.resource.bundle.factory.mapper.OrphanResourceBundlesMapper; import net.jawr.web.resource.bundle.factory.mapper.ResourceBundleDirMapper; import net.jawr.web.resource.bundle.factory.postprocessor.CSSPostProcessorChainFactory; import net.jawr.web.resource.bundle.factory.postprocessor.JSPostProcessorChainFactory; import net.jawr.web.resource.bundle.factory.postprocessor.PostProcessorChainFactory; import net.jawr.web.resource.bundle.factory.util.PathNormalizer; import net.jawr.web.resource.bundle.factory.util.ResourceBundleDefinition; import net.jawr.web.resource.bundle.global.processor.GlobalProcessor; import net.jawr.web.resource.bundle.handler.CachedResourceBundlesHandler; import net.jawr.web.resource.bundle.handler.ResourceBundlesHandler; import net.jawr.web.resource.bundle.handler.ResourceBundlesHandlerImpl; import net.jawr.web.resource.bundle.postprocess.ResourceBundlePostProcessor; import net.jawr.web.resource.handler.bundle.ResourceBundleHandler; import net.jawr.web.resource.handler.reader.ResourceReaderHandler; import org.apache.log4j.Logger; /** * Factory to create a ResourceBundlesHandler as per configuration options set by the user. * * @author Jordi Hernndez Sells * @author Ibrahim Chaehoi * */ public class BundlesHandlerFactory { /** The logger */ private static final Logger LOGGER = Logger .getLogger(BundlesHandlerFactory.class); /** The flag indicating if we should use the in memory cache */ private boolean useInMemoryCache = true; /** The root directory for the resources */ private String baseDir = ""; /** The resource type */ private String resourceType; /** The file extension */ private String fileExtension; /** The keys of the global post processors */ private String globalPostProcessorKeys; /** The keys of the unitary post processors */ private String unitPostProcessorKeys; /** The keys of the global post composite processors */ private String globalCompositePostProcessorKeys; /** The keys of the unitary post composite processors */ private String unitCompositePostProcessorKeys; /** The keys of the resource type preprocessors */ private String resourceTypePreprocessorKeys; /** The keys of the resource type postprocessors */ private String resourceTypePostprocessorKeys; /** The set of bundle definitions */ private Set<ResourceBundleDefinition> bundleDefinitions; /** The set of bundle definitions with dependencies */ private Set<ResourceBundleDefinition> bundleDefinitionsWithDependencies; /** The resource handler */ private ResourceReaderHandler resourceReaderHandler; /** The resource bundle handler */ private ResourceBundleHandler resourceBundleHandler; /** The post processor chain factory */ private PostProcessorChainFactory chainFactory; /** The global preprocessor chain factory */ private GlobalPreprocessorChainFactory resourceTypePreprocessorChainFactory; /** The global postprocessor chain factory */ private GlobalPostprocessorChainFactory resourceTypePostprocessorChainFactory; /** The flag indicating if we should use a single resource factory for the orphans resource of the base directory */ private boolean useSingleResourceFactory = false; /** The file name for the single file bundle for orphans */ private String singleFileBundleName; /** The flag indicating if we should use the directory mapper to define the resource bundles */ private boolean useDirMapperFactory = false; /** The set of directory to exclude from the directory mapper factory */ private Set<String> excludedDirMapperDirs; /** The jawr config */ private JawrConfig jawrConfig; /** The map of custom post processor */ private Map<String, String> customPostprocessors; /** The map of custom global pre processor */ private Map<String, String> customGlobalPreprocessors; /** The map of custom global post processor */ private Map<String, String> customGlobalPostprocessors; /** The flag indicating if we should skip the scan for the orphans */ private boolean scanForOrphans = true; /** * Build a ResourceBundlesHandler. Must be invoked after setting at least the ResourceHandler. * * @param jawrConfig the jawr config * @return the resource bundles handler * @throws DuplicateBundlePathException if two bundles are defined with the same path * @throws BundleDependencyException if an error exists in the dependency definition */ public ResourceBundlesHandler buildResourceBundlesHandler() throws DuplicateBundlePathException, BundleDependencyException { if (LOGGER.isInfoEnabled()) LOGGER.info("Building resources handler... "); // Ensure state is correct if (null == jawrConfig) throw new IllegalStateException( "Must set the JawrConfig for this factory before invoking buildResourceBundlesHandler(). "); if (null == resourceReaderHandler) throw new IllegalStateException( "Must set the resourceHandler for this factory before invoking buildResourceBundlesHandler(). "); if (useSingleResourceFactory && null == singleFileBundleName) throw new IllegalStateException( "Must set the singleFileBundleName when useSingleResourceFactory is set to true. Please check the documentation. "); // Initialize custom postprocessors before using the factory to build the postprocessing chains if (null != customPostprocessors) chainFactory.setCustomPostprocessors(customPostprocessors); // List of bundles List<JoinableResourceBundle> resourceBundles = new ArrayList<JoinableResourceBundle>(); boolean processBundle = !jawrConfig.getUseBundleMapping() || !resourceBundleHandler.isExistingMappingFile(); if (processBundle) { initResourceBundles(resourceBundles); } else { initResourceBundlesFromFullMapping(resourceBundles); } // Build the postprocessor for bundles ResourceBundlePostProcessor processor = null; if (null == this.globalPostProcessorKeys){ processor = this.chainFactory.buildDefaultProcessorChain(); }else{ processor = this.chainFactory .buildPostProcessorChain(globalPostProcessorKeys); } // Build the postprocessor to use on resources before adding them to the bundle. ResourceBundlePostProcessor unitProcessor = null; if (null == this.unitPostProcessorKeys){ unitProcessor = this.chainFactory.buildDefaultUnitProcessorChain(); }else{ unitProcessor = this.chainFactory .buildPostProcessorChain(unitPostProcessorKeys); } // Build the postprocessor for bundles ResourceBundlePostProcessor compositeBundleProcessor = null; if (null == this.globalCompositePostProcessorKeys){ compositeBundleProcessor = this.chainFactory.buildDefaultCompositeProcessorChain(); }else{ compositeBundleProcessor = this.chainFactory .buildPostProcessorChain(globalCompositePostProcessorKeys); } // Build the postprocessor to use on resources before adding them to the bundle. ResourceBundlePostProcessor compositeUnitProcessor = null; if (null == this.unitCompositePostProcessorKeys){ compositeUnitProcessor = this.chainFactory.buildDefaultUnitCompositeProcessorChain(); }else{ compositeUnitProcessor = this.chainFactory .buildPostProcessorChain(unitCompositePostProcessorKeys); } // Build the resource type global preprocessor to use on resources. // Initialize custom preprocessors before using the factory to build the preprocessing chains if (null != customGlobalPreprocessors) resourceTypePreprocessorChainFactory.setCustomGlobalProcessors(customGlobalPreprocessors); GlobalProcessor<GlobalPreprocessingContext> resourceTypePreprocessor = null; if (null == this.resourceTypePreprocessorKeys) resourceTypePreprocessor = this.resourceTypePreprocessorChainFactory.buildDefaultProcessorChain(); else resourceTypePreprocessor = this.resourceTypePreprocessorChainFactory .buildProcessorChain(resourceTypePreprocessorKeys); // Build the resource type global postprocessor to use on resources. // Initialize custom postprocessors before using the factory to build the postprocessing chains if (null != customGlobalPostprocessors) resourceTypePreprocessorChainFactory.setCustomGlobalProcessors(customGlobalPostprocessors); GlobalProcessor<GlobalPostProcessingContext> resourceTypePostprocessor = null; if (null == this.resourceTypePostprocessorKeys) resourceTypePostprocessor = this.resourceTypePostprocessorChainFactory.buildDefaultProcessorChain(); else resourceTypePostprocessor = this.resourceTypePostprocessorChainFactory .buildProcessorChain(resourceTypePostprocessorKeys); // Build the handler ResourceBundlesHandler collector = new ResourceBundlesHandlerImpl( resourceBundles, resourceReaderHandler, resourceBundleHandler, jawrConfig, processor, unitProcessor, compositeBundleProcessor, compositeUnitProcessor, resourceTypePreprocessor, resourceTypePostprocessor); // Use the cached proxy if specified when debug mode is off. if (useInMemoryCache && !jawrConfig.isDebugModeOn()) collector = new CachedResourceBundlesHandler(collector); collector.initAllBundles(); return collector; } /** * Initialize the resource bundles from the mapping file */ private void initResourceBundlesFromFullMapping(List<JoinableResourceBundle> resourceBundles) { if (LOGGER.isInfoEnabled()){ LOGGER.info("Building bundles from the full bundle mapping. The bundles will not be processed."); } Properties mappingProperties = resourceBundleHandler.getJawrBundleMapping(); FullMappingPropertiesBasedBundlesHandlerFactory factory = new FullMappingPropertiesBasedBundlesHandlerFactory(resourceType, resourceReaderHandler, jawrConfig.getGeneratorRegistry(), chainFactory); resourceBundles.addAll(factory.getResourceBundles(mappingProperties)); } /** * Initialize the resource bundles * * @param resourceBundles the resource bundles * @throws DuplicateBundlePathException if two bundles are defined with the same path * @throws BundleDependencyException if an error exists in the dependency definition */ private void initResourceBundles(List<JoinableResourceBundle> resourceBundles) throws DuplicateBundlePathException, BundleDependencyException { // Create custom defined bundles bundleDefinitionsWithDependencies = new HashSet<ResourceBundleDefinition>(); if (null != bundleDefinitions) { if (LOGGER.isInfoEnabled()) LOGGER.info("Adding custom bundle definitions. "); for (Iterator<ResourceBundleDefinition> it = bundleDefinitions.iterator(); it.hasNext();) { ResourceBundleDefinition def = it .next(); // If this is a composite bundle if (def.isComposite()) { List<JoinableResourceBundle> childBundles = new ArrayList<JoinableResourceBundle>(); for (Iterator<ResourceBundleDefinition> childIterator = def.getChildren().iterator(); childIterator .hasNext();) { ResourceBundleDefinition child = childIterator .next(); childBundles.add(buildResourcebundle(child)); } resourceBundles.add(buildCompositeResourcebundle(def, childBundles)); } else resourceBundles.add(buildResourcebundle(def)); } } // Use the dirmapper if specified if (useDirMapperFactory) { if (LOGGER.isInfoEnabled()) LOGGER.info("Using ResourceBundleDirMapper. "); ResourceBundleDirMapper dirFactory = new ResourceBundleDirMapper( baseDir, resourceReaderHandler, resourceBundles, fileExtension, excludedDirMapperDirs); Map<String, String> mappings = dirFactory.getBundleMapping(); for (Iterator<Entry<String, String>> it = mappings.entrySet().iterator(); it.hasNext();) { Entry<String, String> entry = it.next(); resourceBundles.add(buildDirMappedResourceBundle(entry.getKey(), entry.getValue())); } } if (this.scanForOrphans) { // Add all orphan bundles OrphanResourceBundlesMapper orphanFactory = new OrphanResourceBundlesMapper( baseDir, resourceReaderHandler, jawrConfig.getGeneratorRegistry(), resourceBundles, fileExtension); List<String> orphans = orphanFactory.getOrphansList(); // Orphans may be added separately or as one single resource bundle. if (useSingleResourceFactory) { // Add extension to the filename if (!singleFileBundleName.endsWith(fileExtension)) singleFileBundleName += fileExtension; if (LOGGER.isInfoEnabled()) LOGGER .info("Building bundle of orphan resources with the name: " + singleFileBundleName); resourceBundles.add(buildOrphansResourceBundle( singleFileBundleName, orphans)); } else { if (LOGGER.isInfoEnabled()) LOGGER.info("Creating mappings for orphan resources. "); for (Iterator<String> it = orphans.iterator(); it.hasNext();) { resourceBundles.add(buildOrphanResourceBundle(it.next())); } } } else if (LOGGER.isDebugEnabled()) { LOGGER.debug("Skipping orphan file auto processing. "); if ("".equals(jawrConfig.getServletMapping())) LOGGER .debug("Note that there is no specified mapping for Jawr " + "(it has been seet to serve *.js or *.css requests). " + "The orphan files will become unreachable through the server."); } // Initialize bundle dependencies for (Iterator<ResourceBundleDefinition> iterator = bundleDefinitionsWithDependencies.iterator(); iterator.hasNext();) { ResourceBundleDefinition definition = iterator.next(); JoinableResourceBundle bundle = getBundleFromName(definition.getBundleName(), resourceBundles); if(bundle != null){ bundle.setDependencies(getBundleDependencies(definition, resourceBundles)); } } } /** * Returns a bundle from its name * @param name the bundle name * @param bundles the list of bundle * @return a bundle from its name */ private JoinableResourceBundle getBundleFromName(String name, List<JoinableResourceBundle> bundles){ JoinableResourceBundle bundle = null; for (Iterator<JoinableResourceBundle> iterator = bundles.iterator(); iterator.hasNext();) { JoinableResourceBundle aBundle = iterator.next(); if(aBundle.getName().equals(name)){ bundle = aBundle; break; } } return bundle; } /** * Build a Composite resource bundle using a ResourceBundleDefinition * * @param definition the bundle definition * @param childBundles the list of child bundles * @return a Composite resource bundle */ private JoinableResourceBundle buildCompositeResourcebundle( ResourceBundleDefinition definition, List<JoinableResourceBundle> childBundles) { if (LOGGER.isDebugEnabled()) LOGGER.debug("Init composite bundle with id:" + definition.getBundleId()); validateBundleId(definition); InclusionPattern include = new InclusionPattern(definition.isGlobal(), definition.getInclusionOrder(), definition.isDebugOnly(), definition.isDebugNever()); CompositeResourceBundle composite = new CompositeResourceBundle( definition.getBundleId(), definition.getBundleName(), childBundles, include, resourceReaderHandler, fileExtension, jawrConfig); if (null != definition.getBundlePostProcessorKeys()) composite.setBundlePostProcessor(chainFactory .buildPostProcessorChain(definition .getBundlePostProcessorKeys())); if (null != definition.getUnitaryPostProcessorKeys()) composite.setUnitaryPostProcessor(chainFactory .buildPostProcessorChain(definition .getUnitaryPostProcessorKeys())); if (null != definition.getIeConditionalExpression()) composite.setExplorerConditionalExpression(definition .getIeConditionalExpression()); if (null != definition.getAlternateProductionURL()) composite.setAlternateProductionURL(definition .getAlternateProductionURL()); if (null != definition.getVariants()) composite.setVariants(definition .getVariants()); if (null != definition.getDependencies() && !definition.getDependencies().isEmpty()) bundleDefinitionsWithDependencies.add(definition); return composite; } /** * Build a JoinableResourceBundle using a ResourceBundleDefinition * * @param definition the resource bundle definition * @return a JoinableResourceBundle * @throws BundleDependencyException if an error exists in the dependency definition */ private JoinableResourceBundle buildResourcebundle( ResourceBundleDefinition definition) throws BundleDependencyException { if (LOGGER.isDebugEnabled()) LOGGER.debug("Init bundle with id:" + definition.getBundleId()); validateBundleId(definition); InclusionPattern include = new InclusionPattern(definition.isGlobal(), definition.getInclusionOrder(), definition.isDebugOnly(), definition.isDebugNever()); JoinableResourceBundleImpl newBundle = new JoinableResourceBundleImpl( definition.getBundleId(), definition.getBundleName(), fileExtension, include, definition.getMappings(), resourceReaderHandler, jawrConfig.getGeneratorRegistry()); if (null != definition.getBundlePostProcessorKeys()) newBundle.setBundlePostProcessor(chainFactory .buildPostProcessorChain(definition .getBundlePostProcessorKeys())); if (null != definition.getUnitaryPostProcessorKeys()) newBundle.setUnitaryPostProcessor(chainFactory .buildPostProcessorChain(definition .getUnitaryPostProcessorKeys())); if (null != definition.getIeConditionalExpression()) newBundle.setExplorerConditionalExpression(definition .getIeConditionalExpression()); if (null != definition.getVariants()) newBundle.setVariants(definition .getVariants()); if (null != definition.getAlternateProductionURL()) newBundle.setAlternateProductionURL(definition .getAlternateProductionURL()); if (null != definition.getDependencies() && !definition.getDependencies().isEmpty()){ bundleDefinitionsWithDependencies.add(definition); } return newBundle; } /** * Validates the bundle ID * @param definition the bundle ID * @throws a BundlingProcessException if the bundle ID is not valid */ private void validateBundleId(ResourceBundleDefinition definition) { String bundleId = definition.getBundleId(); if(bundleId != null){ if(!bundleId.endsWith(fileExtension)){ throw new BundlingProcessException("The extension of the bundle "+definition.getBundleName()+" - "+bundleId+" doesn't match the allowed extension : '"+fileExtension+"'. Please update your bundle definition."); }else if(bundleId.startsWith(JawrConstant.WEB_INF_DIR_PREFIX) || bundleId.startsWith(JawrConstant.META_INF_DIR_PREFIX)){ throw new BundlingProcessException("For the bundle "+definition.getBundleName()+", the bundle id '"+bundleId+"' is not allowed because it starts with \"/WEB-INF/\". Please update your bundle definition."); } } } /** * Returns the bundle dependencies from the resource bundle definition * * @param definition the resource definition * @param bundles the list of bundles * * @throws BundleDependencyException if an error exists in the dependency definition */ private List<JoinableResourceBundle> getBundleDependencies(ResourceBundleDefinition definition, List<JoinableResourceBundle> bundles) throws BundleDependencyException { List<JoinableResourceBundle> dependencies = new ArrayList<JoinableResourceBundle>(); List<String> processedBundles = new ArrayList<String>(); if(definition.isGlobal() && definition.getDependencies() != null && !definition.getDependencies().isEmpty()){ throw new BundleDependencyException(definition.getBundleName(), "The dependencies property is not allowed for global bundles. Please use the order property " + "to define the import order."); } initBundleDependencies(definition.getBundleName(), definition, dependencies, processedBundles, bundles); return dependencies; } /** * Initialize the bundle dependencies * * @param rootBundleDefinition the name of the bundle, whose is initalized * @param definition the current resource bundle definition * @param bundleDependencies the bundle dependencies * @param processedBundles the list of bundles already processed during the dependency resolution * @param bundles the list of reference bundles * * @throws BundleDependencyException if an error exists in the dependency definition */ private void initBundleDependencies(String rootBundleDefinition, ResourceBundleDefinition definition, List<JoinableResourceBundle> bundleDependencies, List<String> processedBundles, List<JoinableResourceBundle> bundles) throws BundleDependencyException { List<String> bundleDefDependencies = definition.getDependencies(); if(definition.isGlobal()){ if(LOGGER.isInfoEnabled()){ LOGGER.info("The global bundle '"+definition.getBundleName()+"' belongs to the dependencies of '"+rootBundleDefinition+"'." + "As it's a global bundle, it will not be defined as part of the dependencies."); } return; } if (bundleDefDependencies != null && !bundleDefDependencies.isEmpty()) { if (processedBundles.contains(definition.getBundleName())) { throw new BundleDependencyException(rootBundleDefinition, "There is a circular dependency. The bundle in conflict is '"+definition.getBundleName()+"'"); } else { processedBundles.add(definition.getBundleName()); for (Iterator<String> iterator = bundleDefDependencies.iterator(); iterator .hasNext();) { String dependency = iterator.next(); for (Iterator<ResourceBundleDefinition> itDep = bundleDefinitions.iterator(); itDep .hasNext();) { ResourceBundleDefinition dependencyBundle = itDep.next(); String dependencyBundleName = dependencyBundle.getBundleName(); if (dependencyBundleName.equals(dependency)) { if (!bundleDependencies.contains(dependencyBundleName)){ if(!processedBundles.contains(dependencyBundleName)) { initBundleDependencies(rootBundleDefinition, dependencyBundle, bundleDependencies, processedBundles, bundles); bundleDependencies.add(getBundleFromName(dependencyBundleName, bundles)); }else{ throw new BundleDependencyException(rootBundleDefinition, "There is a circular dependency. The bundle in conflict is '"+dependencyBundleName+"'"); } } else { if(LOGGER.isInfoEnabled()){ LOGGER.info("The bundle '" + dependencyBundle.getBundleId() + "' occurs multiple time in the dependencies hierarchy of the bundle '"+rootBundleDefinition+"'."); } } } } } } } } /** * Build a bundle based on a mapping returned by the ResourceBundleDirMapperFactory. * * @param bundleId the bundle Id * @param pathMapping the path mapping * @return a bundle based on a mapping returned by the ResourceBundleDirMapperFactory */ private JoinableResourceBundle buildDirMappedResourceBundle( String bundleId, String pathMapping) { List<String> path = Collections.singletonList(pathMapping); JoinableResourceBundle newBundle = new JoinableResourceBundleImpl( bundleId, generateBundleNameFromBundleId(bundleId), fileExtension, new InclusionPattern(), path, resourceReaderHandler, jawrConfig.getGeneratorRegistry()); return newBundle; } /** * Generates the bundle ID from the bundle name * * @param bundleId the bundle name * @return the generated bundle ID */ private String generateBundleNameFromBundleId(String bundleId) { String bundleName = bundleId; if(bundleName.startsWith("/")){ bundleName = bundleName.substring(1); } int idxExtension = FileNameUtils.indexOfExtension(bundleName); if(idxExtension != -1){ bundleName = bundleName.substring(0, idxExtension); } return bundleName.replaceAll("/", "_"); } /** * Builds a single bundle containing all the paths specified. Useful to make a single bundle out of every resource that is orphan after processing * config definitions. * * @param bundleId the bundle Id * @param orphanPaths the orphan paths * @return a single bundle containing all the paths specified */ private JoinableResourceBundle buildOrphansResourceBundle( String bundleId, List<String> orphanPaths) { JoinableResourceBundle newBundle = new JoinableResourceBundleImpl( bundleId, generateBundleNameFromBundleId(bundleId), fileExtension, new InclusionPattern(), orphanPaths, resourceReaderHandler, jawrConfig.getGeneratorRegistry()); return newBundle; } /** * Build a non-global, single-file resource bundle for orphans. * * @param orphanPath the path * @return a non-global, single-file resource bundle for orphans. */ private JoinableResourceBundle buildOrphanResourceBundle(String orphanPath) { String mapping = orphanPath;// .startsWith("/") ? orphanPath.substring(0) : orphanPath; List<String> paths = Collections.singletonList(mapping); JoinableResourceBundle newBundle = new JoinableResourceBundleImpl( orphanPath, generateBundleNameFromBundleId(orphanPath), fileExtension, new InclusionPattern(), paths, resourceReaderHandler, jawrConfig.getGeneratorRegistry()); return newBundle; } /** * Set the type of bundle (js or css) to use for this factory. * * @param resourceType the resource type */ public void setBundlesType(String resourceType) { // Set the extension for resources and bundles this.resourceType = resourceType; this.fileExtension = "." + resourceType.toLowerCase(); this.resourceTypePreprocessorChainFactory = new BasicGlobalPreprocessorChainFactory(); this.resourceTypePostprocessorChainFactory = new BasicGlobalPostprocessorChainFactory(); // Create the chain factory. if ("js".equals(resourceType)) this.chainFactory = new JSPostProcessorChainFactory(); else this.chainFactory = new CSSPostProcessorChainFactory(); } /** * Set the custom bundle definitions to use. * * @param bundleDefinitions the set of bundle definitions */ public void setBundleDefinitions(Set<ResourceBundleDefinition> bundleDefinitions) { this.bundleDefinitions = bundleDefinitions; } /** * Set the base dir from which to fetch the resources. * * @param baseDir the base directory to set */ public void setBaseDir(String baseDir) { this.baseDir = PathNormalizer.asDirPath(baseDir); } /** * Set the keys to pass to the postprocessor factory upon processors creation. If none specified, the default version is used. * * @param globalPostProcessorKeys String Comma separated list of processor keys. */ public void setGlobalPostProcessorKeys(String globalPostProcessorKeys) { this.globalPostProcessorKeys = globalPostProcessorKeys; } /** * Set the keys to pass to the postprocessor factory upon unitary processors creation. If none specified, the default version is used. * * @param unitPostProcessorKeys String Comma separated list of processor keys. */ public void setUnitPostProcessorKeys(String unitPostProcessorKeys) { this.unitPostProcessorKeys = unitPostProcessorKeys; } /** * Sets the postprocessor keys for composite bundle * @param globalCompositePostProcessorKeys Comma separated list of processor keys. */ public void setGlobalCompositePostProcessorKeys( String globalCompositePostProcessorKeys) { this.globalCompositePostProcessorKeys = globalCompositePostProcessorKeys; } /** * Sets the unitary postprocessor keys for composite bundle * @param globalCompositePostProcessorKeys Comma separated list of processor keys. */ public void setUnitCompositePostProcessorKeys( String unitCompositePostProcessorKeys) { this.unitCompositePostProcessorKeys = unitCompositePostProcessorKeys; } /** * Set the keys to pass to the preprocessor factory upon global preprocessors creation. If none specified, the default version is used. * * @param resourceTypePreprocessorKeys String Comma separated list of preprocessor keys. */ public void setResourceTypePreprocessorKeys(String resourceTypePreprocessorKeys) { this.resourceTypePreprocessorKeys = resourceTypePreprocessorKeys; } /** * Set the keys to pass to the postprocessor factory upon global postprocessors creation. If none specified, the default version is used. * * @param resourceTypePostprocessorKeys String Comma separated list of processor keys. */ public void setResourceTypePostprocessorKeys(String resourceTypePostprocessorKeys) { this.resourceTypePostprocessorKeys = resourceTypePostprocessorKeys; } /** * Set the resource handler to use for file access. * * @param rsHandler */ public void setResourceReaderHandler(ResourceReaderHandler rsHandler) { this.resourceReaderHandler = rsHandler; } /** * Set the resource bundle handler to use for file access. * * @param rsBundleHandler */ public void setResourceBundleHandler(ResourceBundleHandler rsBundleHandler) { this.resourceBundleHandler = rsBundleHandler; } /** * Set wether resoures not specifically mapped to any bundle should be joined together in a single bundle, or served separately. * * @param useSingleResourceFactory boolean If true, bundles are joined together. In that case, the singleFileBundleName must be set as well. */ public void setUseSingleResourceFactory(boolean useSingleResourceFactory) { this.useSingleResourceFactory = useSingleResourceFactory; } /** * Set the name for the joint orphans bundle. Must be set when useSingleResourceFactory is true. * * @param singleFileBundleName */ public void setSingleFileBundleName(String singleFileBundleName) { if (null != singleFileBundleName) this.singleFileBundleName = PathNormalizer .normalizePath(singleFileBundleName); } /** * If true, the mapper factory that creates bundles from all directories under baseDir will be used. * * @param useDirMapperFactory */ public void setUseDirMapperFactory(boolean useDirMapperFactory) { this.useDirMapperFactory = useDirMapperFactory; } /** * Set wether bundles will be cached in memory instead of being always read from the filesystem. * * @param useInMemoryCache */ public void setUseInMemoryCache(boolean useInMemoryCache) { this.useInMemoryCache = useInMemoryCache; } /** * Sets the paths to exclude when using the dirMapper. * * @param excludedDirMapperDirs */ public void setExludedDirMapperDirs(Set<String> exludedDirMapperDirs) { if (null != exludedDirMapperDirs) this.excludedDirMapperDirs = PathNormalizer .normalizePaths(exludedDirMapperDirs); } /** * Sets the Jawr configuration * @param jawrConfig the configuration to set */ public void setJawrConfig(JawrConfig jawrConfig) { this.jawrConfig = jawrConfig; } /** * Sets the map of custom post processor * @param customPostprocessors the map to set */ public void setCustomPostprocessors(Map<String, String> customPostprocessors) { this.customPostprocessors = customPostprocessors; } /** * Sets the map of custom global preprocessor * @param customGlobalPreprocessors the map to set */ public void setCustomGlobalPreprocessors(Map<String, String> customGlobalPreprocessors) { this.customGlobalPreprocessors = customGlobalPreprocessors; } /** * Sets the map of custom global preprocessor * @param customGlobalPreprocessors the map to set */ public void setCustomGlobalPostprocessors(Map<String, String> customGlobalPostprocessors) { this.customGlobalPostprocessors = customGlobalPostprocessors; } /** * Sets the flag indicating if we should scan or not for the orphan resources * @param scanForOrphans the flag to set */ public void setScanForOrphans(boolean scanForOrphans) { this.scanForOrphans = scanForOrphans; } }
Issue #178 : Issue with use.bundling.mapping for preprocessed orphan bundles containing dots in the name git-svn-id: afcaba4fbc8ca56f3a4045400dc56a195f59e084@766 4db09855-0bbe-6bc9-8408-d5225a1fabd9
src/main/java/net/jawr/web/resource/bundle/factory/BundlesHandlerFactory.java
Issue #178 : Issue with use.bundling.mapping for preprocessed orphan bundles containing dots in the name
Java
apache-2.0
5974945e03f19ca75ba5a514b3e24c784df6aa39
0
igorakkerman/jlib
/* * jlib - Open Source Java Library * * www.jlib.org * * * Copyright 2005-2013 Igor Akkerman * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jlib.core.valueholder; import org.jlib.core.exception.ValueNotAccessibleException; /** * Skeletal implementation of a not initialized {@link ModifiableValueHolder}. * * @param <Value> * type of the value * * @author Igor Akkerman */ public abstract class UninitializedValueHolder<Value> implements ModifiableValueHolder<Value> { /** * Creates a new {@link UninitializedValueHolder}. */ protected UninitializedValueHolder() { super(); } /** * Always throws a {@link ValueNotAccessibleException}. * * @return never * * @throws ValueNotAccessibleException * always */ @Override public Value getValue() throws ValueNotAccessibleException { throw new ValueNotAccessibleException(); } }
jlib-core/src/main/java/org/jlib/core/valueholder/UninitializedValueHolder.java
/* * jlib - Open Source Java Library * * www.jlib.org * * * Copyright 2005-2013 Igor Akkerman * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jlib.core.valueholder; import org.jlib.core.exception.ValueNotAccessibleException; /** * Skeletal implementation of a not initialized {@link ModifiableValueHolder}. * * @param <Value> * type of the value * * @author Igor Akkerman */ public abstract class UninitializedValueHolder<Value> implements ModifiableValueHolder<Value> { /** * Creates a new {@link UninitializedValueHolder}. */ public UninitializedValueHolder() { super(); } /** * Always throws a {@link ValueNotAccessibleException}. * * @return never * * @throws ValueNotAccessibleException * always */ @Override public Value getValue() throws ValueNotAccessibleException { throw new ValueNotAccessibleException(); } }
public constructor in abstract class made protected
jlib-core/src/main/java/org/jlib/core/valueholder/UninitializedValueHolder.java
public constructor in abstract class made protected
Java
bsd-3-clause
023d787116a35cd5275be77d0c96dbbb82c9c033
0
UNFPAInnovation/GetIn_Mobile,UNFPAInnovation/GetIn_Mobile
/** * Copyright (c) 2013, Sana * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Sana nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL Sana BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.sana.android.content.core; import org.sana.api.IConcept; import org.sana.core.Concept; import android.os.Parcel; import android.os.Parcelable; /** * Parcelable implementation of {@link org.sana.core.Concept}. * * @author Sana Development * */ public class ConceptParcel extends Concept implements Parcelable{ public static final String TAG = ConceptParcel.class.getSimpleName(); /** * Creates an uninitialized instance. */ public ConceptParcel(){} public ConceptParcel(Parcel in){ ModelParcel.readFromParcel(this, in); setName(in.readString()); setConstraints(in.readString()); setDescription(in.readString()); setDatatype(in.readString()); setDisplayName(in.readString()); setMediatype(in.readString()); } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { ModelParcel.writeToParcel(this, dest); dest.writeString(getName()); dest.writeString(getConstraints()); dest.writeString(getDescription()); dest.writeString(getDatatype()); dest.writeString(getDisplayName()); dest.writeString(getMediatype()); } public static final Parcelable.Creator<ConceptParcel> CREATOR = new Parcelable.Creator<ConceptParcel>() { @Override public ConceptParcel createFromParcel(Parcel source) { return new ConceptParcel(source); } @Override public ConceptParcel[] newArray(int size) { ConceptParcel[] array = new ConceptParcel[size]; for(int i=0; i < size;i++){ array[i] = new ConceptParcel(); } return array; } }; /** * Initialize a new Parcelable Concept from an object adhering to * the IConcept interface. * * @param obj The object to copy * @return A new instance */ public static ConceptParcel get(IConcept obj){ ConceptParcel parcel = new ConceptParcel(); parcel.setUuid(obj.getUuid()); parcel.setCreated(obj.getCreated()); parcel.setModified(obj.getModified()); parcel.setName(obj.getName()); parcel.setConstraints(obj.getConstraints()); parcel.setDescription(obj.getDescription()); parcel.setDatatype(obj.getDatatype()); parcel.setDisplayName(obj.getDisplayName()); parcel.setMediatype(obj.getMediatype()); return parcel; } }
api-android/src/main/java/org/sana/android/content/core/ConceptParcel.java
/** * Copyright (c) 2013, Sana * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Sana nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL Sana BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.sana.android.content.core; import java.text.ParseException; import org.sana.core.Concept; import org.sana.util.DateUtil; import android.os.Parcel; import android.os.Parcelable; /** * Parcelable implementation of {@link org.sana.Concept}. * * @author Sana Development * */ public class ConceptParcel extends Concept implements Parcelable{ public static final String TAG = ConceptParcel.class.getSimpleName(); /** * Creates an uninitialized instance. */ public ConceptParcel(){} public ConceptParcel(Parcel in){ setUuid(in.readString()); try { setCreated(DateUtil.parseDate(in.readString())); setModified(DateUtil.parseDate(in.readString())); } catch (ParseException e) { e.printStackTrace(); throw new IllegalArgumentException(e); } //TODO Complete reading fields from the Parcel } /* (non-Javadoc) * @see android.os.Parcelable#describeContents() */ @Override public int describeContents() { return 0; } /* (non-Javadoc) * @see android.os.Parcelable#writeToParcel(android.os.Parcel, int) */ @Override public void writeToParcel(Parcel dest, int flags) { // TODO Auto-generated method stub } public static final Parcelable.Creator<ConceptParcel> CREATOR = new Parcelable.Creator<ConceptParcel>() { @Override public ConceptParcel createFromParcel(Parcel source) { // TODO Auto-generated method stub return null; } @Override public ConceptParcel[] newArray(int size) { // TODO Auto-generated method stub return null; } }; }
Update Parcelable implementation of Concept
api-android/src/main/java/org/sana/android/content/core/ConceptParcel.java
Update Parcelable implementation of Concept
Java
bsd-3-clause
022d0d764d5772fc3d36ddc48bdf5225ce87b122
0
Clunker5/tregmine-2.0,Clunker5/tregmine-2.0
package info.tregmine.commands; import java.util.Queue; import static org.bukkit.ChatColor.*; import org.bukkit.entity.Player; import org.bukkit.Server; import org.bukkit.scheduler.BukkitScheduler; import info.tregmine.Tregmine; import info.tregmine.api.TregminePlayer; import info.tregmine.api.Rank; import info.tregmine.database.DAOException; import info.tregmine.database.IContext; import info.tregmine.database.IPlayerDAO; import info.tregmine.database.IMentorLogDAO; public class MentorCommand extends AbstractCommand { public MentorCommand(Tregmine tregmine) { super(tregmine, "mentor"); } @Override public boolean handlePlayer(TregminePlayer player, String[] args) { String action = "queue"; if (args.length > 0) { action = args[0]; } if ("queue".equalsIgnoreCase(action)) { if (!player.canMentor()) { player.sendMessage(RED + "You have not been granted mentoring abilities."); return true; } if (player.getStudent() != null) { player.sendMessage(RED + "You can only mentor one " + "student at any given time."); return true; } Queue<TregminePlayer> students = tregmine.getStudentQueue(); if (students.size() > 0) { TregminePlayer student = students.poll(); startMentoring(tregmine, student, player); return true; } Queue<TregminePlayer> mentors = tregmine.getMentorQueue(); mentors.offer(player); player.sendMessage(GREEN + "You are now part of the mentor queue. " + "You are number " + mentors.size() + ". Type /mentor cancel " + "to opt out."); } else if ("cancel".equalsIgnoreCase(action)) { if (player.getRank() == Rank.TOURIST) { TregminePlayer mentor = player.getMentor(); try (IContext ctx = tregmine.createContext()) { IMentorLogDAO mentorLogDAO = ctx.getMentorLogDAO(); int mentorLogId = mentorLogDAO.getMentorLogId(player, mentor); mentorLogDAO.updateMentorLogEvent(mentorLogId, IMentorLogDAO.MentoringEvent.CANCELLED); } catch (DAOException e) { throw new RuntimeException(e); } player.setMentor(null); mentor.setStudent(null); mentor.sendMessage(player.getChatName() + RED + " cancelled " + "mentoring with you."); player.sendMessage(GREEN + "Mentoring cancelled. Attempting to " + "find you a new mentor."); findMentor(tregmine, player); } else { Queue<TregminePlayer> mentors = tregmine.getMentorQueue(); if (!mentors.contains(player)) { player.sendMessage(RED + "You are not part of the mentor queue. " + "If you have already been assigned a student, you cannot " + "abort the mentoring."); return true; } mentors.remove(player); player.sendMessage(GREEN + "You are no longer part of the mentor queue."); } } else if ("complete".equalsIgnoreCase(action)) { if (!player.canMentor()) { player.sendMessage(RED + "You have not been granted mentoring abilities."); return true; } TregminePlayer student = player.getStudent(); if (student == null) { player.sendMessage(RED + "You are not mentoring anyone right now."); return true; } int timeRemaining = Math.max(60*5 - student.getPlayTime() - student.getTimeOnline(), 0); if (timeRemaining > 0) { player.sendMessage(RED + student.getChatName() + RED + " has " + timeRemaining + " seconds of mentoring left."); return true; } player.sendMessage(GREEN + "Mentoring of " + student.getChatName() + GREEN + " has now finished!"); player.giveExp(100); student.sendMessage(GREEN + "Congratulations! You have now achieved " + "settler status. We hope you'll enjoy your stay on Tregmine!"); Tregmine.LOGGER.info("[MENTOR] " + student.getChatName() + " was " + "promoted to settler by " + player.getChatName() + "."); try (IContext ctx = tregmine.createContext()) { student.setRank(Rank.SETTLER); student.setMentor(null); player.setStudent(null); IPlayerDAO playerDAO = ctx.getPlayerDAO(); playerDAO.updatePlayer(student); playerDAO.updatePlayerInfo(student); IMentorLogDAO mentorLogDAO = ctx.getMentorLogDAO(); int mentorLogId = mentorLogDAO.getMentorLogId(student, player); mentorLogDAO.updateMentorLogEvent(mentorLogId, IMentorLogDAO.MentoringEvent.COMPLETED); } catch (DAOException e) { throw new RuntimeException(e); } } else { return false; } return true; } public static void findMentor(Tregmine plugin, TregminePlayer student) { if(student.getRank() != Rank.UNVERIFIED && student.getRank() != Rank.TOURIST){ return; } Queue<TregminePlayer> mentors = plugin.getMentorQueue(); TregminePlayer mentor = mentors.poll(); if (mentor != null) { startMentoring(plugin, student, mentor); } else { student.sendMessage(YELLOW + "You will now be assigned " + "a mentor to show you around, as soon as one becomes available."); Queue<TregminePlayer> students = plugin.getStudentQueue(); students.offer(student); for (TregminePlayer p : plugin.getOnlinePlayers()) { if (!p.canMentor()) { continue; } p.sendMessage(student.getChatName() + YELLOW + " needs a mentor! Type /mentor to " + "offer your services!"); } } } public static void startMentoring(Tregmine tregmine, TregminePlayer student, TregminePlayer mentor) { student.setMentor(mentor); mentor.setStudent(student); try (IContext ctx = tregmine.createContext()) { IMentorLogDAO mentorLogDAO = ctx.getMentorLogDAO(); int mentorLogId = mentorLogDAO.getMentorLogId(student, mentor); Tregmine.LOGGER.info("Mentor log id: " + mentorLogId); if (mentorLogId == 0) { mentorLogDAO.insertMentorLog(student, mentor); } else { mentorLogDAO.updateMentorLogResume(mentorLogId); } } catch (DAOException e) { throw new RuntimeException(e); } Tregmine.LOGGER.info("[MENTOR] " + mentor.getChatName() + " is " + "mentoring " + student.getChatName()); // Instructions for students student.sendMessage(mentor.getChatName() + GREEN + " has been assigned as your mentor!"); student.sendMessage(YELLOW + "He or she will show you " + "around, answer any questions, and help you find a place " + "to build."); student.sendMessage(YELLOW + "If your mentor turns out to be unhelpful, " + "type " + RED + "/mentor cancel" + YELLOW + " to stop and wait " + "for a new mentor to become available."); // Instructions for mentor mentor.sendMessage(GREEN + "You have been assigned to " + "mentor " + student.getChatName() + GREEN + "."); mentor.sendMessage(YELLOW + "Please do this: "); mentor.sendMessage(YELLOW + "1. Explain basic rules (" + RED + "Do not force your student to read the rules, or take a test " + YELLOW + ")"); mentor.sendMessage(YELLOW + "2. Demonstrate basic commands"); mentor.sendMessage(YELLOW + "3. Show him or her around"); mentor.sendMessage(YELLOW + "4. Help him or her to find a lot " + "and start building. If you own a zone, you may sell " + "a lot, but keep in mind that it might be a good idea " + "to let other players make offers too. Your students will " + "also be able to build anywhere as long as they are within a " + "50 block radius of you."); mentor.sendMessage(YELLOW + "Scamming new players will not be "+ "tolerated."); mentor.sendMessage(YELLOW + "Mentoring takes at least 15 minutes, and " + "after that time has passed you can upgrade the tourist to " + "settler rank by doing " + GREEN + "/mentor complete" + YELLOW + "."); mentor.sendMessage(YELLOW + "Please start by teleporting to " + student.getChatName() + YELLOW + ", or by summoning him or her!"); } }
src/info/tregmine/commands/MentorCommand.java
package info.tregmine.commands; import java.util.Queue; import static org.bukkit.ChatColor.*; import org.bukkit.entity.Player; import org.bukkit.Server; import org.bukkit.scheduler.BukkitScheduler; import info.tregmine.Tregmine; import info.tregmine.api.TregminePlayer; import info.tregmine.api.Rank; import info.tregmine.database.DAOException; import info.tregmine.database.IContext; import info.tregmine.database.IPlayerDAO; import info.tregmine.database.IMentorLogDAO; public class MentorCommand extends AbstractCommand { public MentorCommand(Tregmine tregmine) { super(tregmine, "mentor"); } @Override public boolean handlePlayer(TregminePlayer player, String[] args) { String action = "queue"; if (args.length > 0) { action = args[0]; } if ("queue".equalsIgnoreCase(action)) { if (!player.canMentor()) { player.sendMessage(RED + "You have not been granted mentoring abilities."); return true; } if (player.getStudent() != null) { player.sendMessage(RED + "You can only mentor one " + "student at any given time."); return true; } Queue<TregminePlayer> students = tregmine.getStudentQueue(); if (students.size() > 0) { TregminePlayer student = students.poll(); startMentoring(tregmine, student, player); return true; } Queue<TregminePlayer> mentors = tregmine.getMentorQueue(); mentors.offer(player); player.sendMessage(GREEN + "You are now part of the mentor queue. " + "You are number " + mentors.size() + ". Type /mentor cancel " + "to opt out."); } else if ("cancel".equalsIgnoreCase(action)) { if (player.getRank() == Rank.TOURIST) { TregminePlayer mentor = player.getMentor(); try (IContext ctx = tregmine.createContext()) { IMentorLogDAO mentorLogDAO = ctx.getMentorLogDAO(); int mentorLogId = mentorLogDAO.getMentorLogId(player, mentor); mentorLogDAO.updateMentorLogEvent(mentorLogId, IMentorLogDAO.MentoringEvent.CANCELLED); } catch (DAOException e) { throw new RuntimeException(e); } player.setMentor(null); mentor.setStudent(null); mentor.sendMessage(player.getChatName() + RED + " cancelled " + "mentoring with you."); player.sendMessage(GREEN + "Mentoring cancelled. Attempting to " + "find you a new mentor."); findMentor(tregmine, player); } else { Queue<TregminePlayer> mentors = tregmine.getMentorQueue(); if (!mentors.contains(player)) { player.sendMessage(RED + "You are not part of the mentor queue. " + "If you have already been assigned a student, you cannot " + "abort the mentoring."); return true; } mentors.remove(player); player.sendMessage(GREEN + "You are no longer part of the mentor queue."); } } else if ("complete".equalsIgnoreCase(action)) { if (!player.canMentor()) { player.sendMessage(RED + "You have not been granted mentoring abilities."); return true; } TregminePlayer student = player.getStudent(); if (student == null) { player.sendMessage(RED + "You are not mentoring anyone right now."); return true; } int timeRemaining = Math.max(60*5 - student.getPlayTime() - student.getTimeOnline(), 0); if (timeRemaining > 0) { player.sendMessage(RED + student.getChatName() + RED + " has " + timeRemaining + " seconds of mentoring left."); return true; } player.sendMessage(GREEN + "Mentoring of " + student.getChatName() + GREEN + " has now finished!"); player.giveExp(100); student.sendMessage(GREEN + "Congratulations! You have now achieved " + "settler status. We hope you'll enjoy your stay on Tregmine!"); Tregmine.LOGGER.info("[MENTOR] " + student.getChatName() + " was " + "promoted to settler by " + player.getChatName() + "."); try (IContext ctx = tregmine.createContext()) { student.setRank(Rank.SETTLER); student.setMentor(null); player.setStudent(null); IPlayerDAO playerDAO = ctx.getPlayerDAO(); playerDAO.updatePlayer(student); playerDAO.updatePlayerInfo(student); IMentorLogDAO mentorLogDAO = ctx.getMentorLogDAO(); int mentorLogId = mentorLogDAO.getMentorLogId(student, player); mentorLogDAO.updateMentorLogEvent(mentorLogId, IMentorLogDAO.MentoringEvent.COMPLETED); } catch (DAOException e) { throw new RuntimeException(e); } } else { return false; } return true; } public static void findMentor(Tregmine plugin, TregminePlayer student) { Queue<TregminePlayer> mentors = plugin.getMentorQueue(); TregminePlayer mentor = mentors.poll(); if (mentor != null) { startMentoring(plugin, student, mentor); } else { student.sendMessage(YELLOW + "You will now be assigned " + "a mentor to show you around, as soon as one becomes available."); Queue<TregminePlayer> students = plugin.getStudentQueue(); students.offer(student); for (TregminePlayer p : plugin.getOnlinePlayers()) { if (!p.canMentor()) { continue; } p.sendMessage(student.getChatName() + YELLOW + " needs a mentor! Type /mentor to " + "offer your services!"); } } } public static void startMentoring(Tregmine tregmine, TregminePlayer student, TregminePlayer mentor) { student.setMentor(mentor); mentor.setStudent(student); try (IContext ctx = tregmine.createContext()) { IMentorLogDAO mentorLogDAO = ctx.getMentorLogDAO(); int mentorLogId = mentorLogDAO.getMentorLogId(student, mentor); Tregmine.LOGGER.info("Mentor log id: " + mentorLogId); if (mentorLogId == 0) { mentorLogDAO.insertMentorLog(student, mentor); } else { mentorLogDAO.updateMentorLogResume(mentorLogId); } } catch (DAOException e) { throw new RuntimeException(e); } Tregmine.LOGGER.info("[MENTOR] " + mentor.getChatName() + " is " + "mentoring " + student.getChatName()); // Instructions for students student.sendMessage(mentor.getChatName() + GREEN + " has been assigned as your mentor!"); student.sendMessage(YELLOW + "He or she will show you " + "around, answer any questions, and help you find a place " + "to build."); student.sendMessage(YELLOW + "If your mentor turns out to be unhelpful, " + "type " + RED + "/mentor cancel" + YELLOW + " to stop and wait " + "for a new mentor to become available."); // Instructions for mentor mentor.sendMessage(GREEN + "You have been assigned to " + "mentor " + student.getChatName() + GREEN + "."); mentor.sendMessage(YELLOW + "Please do this: "); mentor.sendMessage(YELLOW + "1. Explain basic rules (" + RED + "Do not force your student to read the rules, or take a test " + YELLOW + ")"); mentor.sendMessage(YELLOW + "2. Demonstrate basic commands"); mentor.sendMessage(YELLOW + "3. Show him or her around"); mentor.sendMessage(YELLOW + "4. Help him or her to find a lot " + "and start building. If you own a zone, you may sell " + "a lot, but keep in mind that it might be a good idea " + "to let other players make offers too. Your students will " + "also be able to build anywhere as long as they are within a " + "50 block radius of you."); mentor.sendMessage(YELLOW + "Scamming new players will not be "+ "tolerated."); mentor.sendMessage(YELLOW + "Mentoring takes at least 15 minutes, and " + "after that time has passed you can upgrade the tourist to " + "settler rank by doing " + GREEN + "/mentor complete" + YELLOW + "."); mentor.sendMessage(YELLOW + "Please start by teleporting to " + student.getChatName() + YELLOW + ", or by summoning him or her!"); } }
Fixed a bug
src/info/tregmine/commands/MentorCommand.java
Fixed a bug
Java
bsd-3-clause
f7120df4d2b555f075ce0f5f228478ea0beacead
0
edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon
/* * $Id: TestRepositoryNodeImpl.java,v 1.61 2010-02-23 04:58:15 pgust Exp $ */ /* Copyright (c) 2000-2007 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.repository; import java.io.*; import java.net.*; import java.util.*; import org.lockss.test.*; import org.lockss.app.*; import org.lockss.util.*; import org.lockss.daemon.*; import org.lockss.plugin.*; import org.lockss.protocol.*; /** * This is the test class for org.lockss.repository.RepositoryNodeImpl */ public class TestRepositoryNodeImpl extends LockssTestCase { static final String TREE_SIZE_PROPERTY = RepositoryNodeImpl.TREE_SIZE_PROPERTY; static final String CHILD_COUNT_PROPERTY = RepositoryNodeImpl.CHILD_COUNT_PROPERTY; private MockLockssDaemon theDaemon; private MyLockssRepositoryImpl repo; private String tempDirPath; MockArchivalUnit mau; private MockIdentityManager idmgr; Properties props; public void setUp() throws Exception { super.setUp(); tempDirPath = getTempDir().getAbsolutePath() + File.separator; props = new Properties(); props.setProperty(LockssRepositoryImpl.PARAM_CACHE_LOCATION, tempDirPath); ConfigurationUtil.setCurrentConfigFromProps(props); mau = new MockArchivalUnit(); theDaemon = getMockLockssDaemon(); // Create the identity manager... idmgr = new MockIdentityManager(); theDaemon.setIdentityManager(idmgr); idmgr.initService(theDaemon); repo = (MyLockssRepositoryImpl)MyLockssRepositoryImpl.createNewLockssRepository(mau); theDaemon.setAuManager(LockssDaemon.LOCKSS_REPOSITORY, mau, repo); repo.initService(theDaemon); repo.startService(); } public void tearDown() throws Exception { TimeBase.setReal(); repo.stopService(); theDaemon.stopDaemon(); super.tearDown(); } // RepositoryNodeImpl relies on nonexistent dir.listFiles() returning // null, not empty list. public void testFileAssumptions() throws Exception { // empty dir returns empty list File dir1 = getTempDir(); assertNotNull(null, dir1.listFiles()); assertEquals(new File[0], dir1.listFiles()); // nonexistent dir returns null File dir2 = new File(dir1, "bacds"); assertNull(null, dir2.listFiles()); // dir list of non-dir returns null File file1 = File.createTempFile("xxx", ".tmp", dir1); assertTrue(file1.exists()); assertNull(null, file1.listFiles()); } public void testGetNodeUrl() { RepositoryNode node = new RepositoryNodeImpl("testUrl", "testDir", null); assertEquals("testUrl", node.getNodeUrl()); node = new RepositoryNodeImpl("testUrl/test.txt", "testUrl/test.txt", null); assertEquals("testUrl/test.txt", node.getNodeUrl()); } public void testFileLocation() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/branch1/leaf1"); File testFile = new File(tempDirPath); assertTrue(testFile.exists()); testFile = new File(tempDirPath + "/#content/current"); assertTrue(testFile.exists()); testFile = new File(tempDirPath + "/#content/current.props"); assertTrue(testFile.exists()); testFile = new File(tempDirPath + "/#node_props"); assertFalse(testFile.exists()); testFile = new File(tempDirPath + "/#agreement"); assertFalse(testFile.exists()); } public void testUpdateAgreementCreatesFile() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/branch1/leaf1"); File testFile = new File(tempDirPath, "#agreement"); assertFalse(testFile.exists()); // Agreeing IDs. PeerIdentity[] agreeingPeers = { new MockPeerIdentity("TCP:[192.168.0.1]:9723"), new MockPeerIdentity("TCP:[192.168.0.2]:9723") }; leaf.signalAgreement(ListUtil.fromArray(agreeingPeers)); assertTrue(testFile.exists()); } public void testUpdateAndLoadAgreement() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/branch1/leaf1"); PeerIdentity testid_1 = new MockPeerIdentity("TCP:[192.168.0.1]:9723"); PeerIdentity testid_2 = new MockPeerIdentity("TCP:[192.168.0.2]:9723"); PeerIdentity testid_3 = new MockPeerIdentity("TCP:[192.168.0.3]:9723"); PeerIdentity testid_4 = new MockPeerIdentity("TCP:[192.168.0.4]:9723"); idmgr.addPeerIdentity(testid_1.getIdString(), testid_1); idmgr.addPeerIdentity(testid_2.getIdString(), testid_2); idmgr.addPeerIdentity(testid_3.getIdString(), testid_3); idmgr.addPeerIdentity(testid_4.getIdString(), testid_4); leaf.signalAgreement(ListUtil.list(testid_1, testid_3)); assertEquals(2, ((RepositoryNodeImpl)leaf).loadAgreementHistory().size()); assertTrue(leaf.hasAgreement(testid_1)); assertFalse(leaf.hasAgreement(testid_2)); assertTrue(leaf.hasAgreement(testid_3)); assertFalse(leaf.hasAgreement(testid_4)); leaf.signalAgreement(ListUtil.list(testid_1, testid_2, testid_3, testid_4)); assertEquals(4, ((RepositoryNodeImpl)leaf).loadAgreementHistory().size()); assertTrue(leaf.hasAgreement(testid_1)); assertTrue(leaf.hasAgreement(testid_2)); assertTrue(leaf.hasAgreement(testid_3)); assertTrue(leaf.hasAgreement(testid_4)); } public void testVersionFileLocation() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/branch1/leaf1"); File testFile = new File(tempDirPath + "/#content/1"); assertFalse(testFile.exists()); testFile = new File(tempDirPath + "/#content/1.props"); assertFalse(testFile.exists()); leaf.makeNewVersion(); OutputStream os = leaf.getNewOutputStream(); InputStream is = new StringInputStream("test stream 2"); StreamUtil.copy(is, os); is.close(); os.close(); leaf.setNewProperties(new Properties()); leaf.sealNewVersion(); testFile = new File(tempDirPath + "/#content/1"); assertTrue(testFile.exists()); testFile = new File(tempDirPath + "/#content/1.props"); assertTrue(testFile.exists()); } public void testInactiveFileLocation() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/branch1/leaf1"); File curFile = new File(tempDirPath + "/#content/current"); File curPropsFile = new File(tempDirPath + "/#content/current.props"); File inactFile = new File(tempDirPath + "/#content/inactive"); File inactPropsFile = new File(tempDirPath + "/#content/inactive.props"); assertTrue(curFile.exists()); assertTrue(curPropsFile.exists()); assertFalse(inactFile.exists()); assertFalse(inactPropsFile.exists()); leaf.deactivateContent(); assertFalse(curFile.exists()); assertFalse(curPropsFile.exists()); assertTrue(inactFile.exists()); assertTrue(inactPropsFile.exists()); //reactivate leaf.restoreLastVersion(); assertTrue(curFile.exists()); assertTrue(curPropsFile.exists()); assertFalse(inactFile.exists()); assertFalse(inactPropsFile.exists()); leaf.deactivateContent(); assertFalse(curFile.exists()); assertFalse(curPropsFile.exists()); assertTrue(inactFile.exists()); assertTrue(inactPropsFile.exists()); // make new version leaf.makeNewVersion(); OutputStream os = leaf.getNewOutputStream(); InputStream is = new StringInputStream("test stream 2"); StreamUtil.copy(is, os); is.close(); os.close(); leaf.setNewProperties(new Properties()); leaf.sealNewVersion(); assertTrue(curFile.exists()); assertTrue(curPropsFile.exists()); assertFalse(inactFile.exists()); assertFalse(inactPropsFile.exists()); } public void testDeleteFileLocation() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/branch1/leaf1"); File curFile = new File(tempDirPath + "/#content/current"); File curPropsFile = new File(tempDirPath + "/#content/current.props"); File inactFile = new File(tempDirPath + "/#content/inactive"); File inactPropsFile = new File(tempDirPath + "/#content/inactive.props"); assertTrue(curFile.exists()); assertTrue(curPropsFile.exists()); assertFalse(inactFile.exists()); assertFalse(inactPropsFile.exists()); leaf.markAsDeleted(); assertFalse(curFile.exists()); assertFalse(curPropsFile.exists()); assertTrue(inactFile.exists()); assertTrue(inactPropsFile.exists()); //reactivate leaf.restoreLastVersion(); assertTrue(curFile.exists()); assertTrue(curPropsFile.exists()); assertFalse(inactFile.exists()); assertFalse(inactPropsFile.exists()); leaf.markAsDeleted(); assertFalse(curFile.exists()); assertFalse(curPropsFile.exists()); assertTrue(inactFile.exists()); assertTrue(inactPropsFile.exists()); // make new version leaf.makeNewVersion(); OutputStream os = leaf.getNewOutputStream(); InputStream is = new StringInputStream("test stream 2"); StreamUtil.copy(is, os); is.close(); os.close(); leaf.setNewProperties(new Properties()); leaf.sealNewVersion(); assertTrue(curFile.exists()); assertTrue(curPropsFile.exists()); assertFalse(inactFile.exists()); assertFalse(inactPropsFile.exists()); } public void testListEntriesNonexistentDir() throws Exception { RepositoryNode node = new RepositoryNodeImpl("foo-no-url", "foo-no-dir", null); try { node.listChildren(null, false); fail("listChildren() is nonexistent dir should throw"); } catch (LockssRepository.RepositoryStateException e) { } } public void testListEntries() throws Exception { createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); createLeaf("http://www.example.com/testDir/branch1/leaf2", "test stream", null); createLeaf("http://www.example.com/testDir/branch2/leaf3", "test stream", null); createLeaf("http://www.example.com/testDir/branch2", "test stream", null); createLeaf("http://www.example.com/testDir/leaf4", "test stream", null); // root branch RepositoryNode dirEntry = repo.getNode("http://www.example.com/testDir"); Iterator childIt = dirEntry.listChildren(null, false); ArrayList childL = new ArrayList(3); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } String[] expectedA = new String[] { "http://www.example.com/testDir/branch1", "http://www.example.com/testDir/branch2", "http://www.example.com/testDir/leaf4" }; assertIsomorphic(expectedA, childL); // sub-branch dirEntry = repo.getNode("http://www.example.com/testDir/branch1"); childL.clear(); childIt = dirEntry.listChildren(null, false); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } expectedA = new String[] { "http://www.example.com/testDir/branch1/leaf1", "http://www.example.com/testDir/branch1/leaf2", }; assertIsomorphic(expectedA, childL); // sub-branch with content dirEntry = repo.getNode("http://www.example.com/testDir/branch2"); childL.clear(); childIt = dirEntry.listChildren(null, false); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } expectedA = new String[] { "http://www.example.com/testDir/branch2/leaf3", }; assertIsomorphic(expectedA, childL); // leaf node dirEntry = repo.getNode("http://www.example.com/testDir/branch1/leaf1"); childL.clear(); childIt = dirEntry.listChildren(null, false); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } expectedA = new String[] { }; assertIsomorphic(expectedA, childL); } String normalizeName(RepositoryNodeImpl node, String name) { return node.normalize(new File(name)).getPath(); } public void testNormalizeUrlEncodingCase() throws Exception { if (!PlatformUtil.getInstance().isCaseSensitiveFileSystem()) { log.debug("Skipping testNormalizeUrlEncodingCase: file system is not case sensitive."); return; } RepositoryNodeImpl node = new RepositoryNodeImpl("foo", "bar", null); // nothing to normalize File file = new File("foo/bar/baz"); assertSame(file, node.normalize(file)); file = new File("foo/bar/ba%ABz"); assertSame(file, node.normalize(file)); // unnormalized in parent dir name is left alone file = new File("ba%abz/bar"); assertSame(file, node.normalize(file)); file = new File("foo/ba%abz/bar"); assertSame(file, node.normalize(file)); // should be normalized assertEquals("ba%ABz", normalizeName(node, "ba%aBz")); assertEquals("/ba%ABz", normalizeName(node, "/ba%aBz")); assertEquals("foo/bar/ba%ABz", normalizeName(node, "foo/bar/ba%aBz")); assertEquals("foo/bar/ba%ABz", normalizeName(node, "foo/bar/ba%Abz")); assertEquals("foo/bar/ba%ABz", normalizeName(node, "foo/bar/ba%abz")); assertEquals("foo/bar/ba%abz/ba%ABz", normalizeName(node, "foo/bar/ba%abz/ba%abz")); } public void testNormalizeTrailingQuestion() throws Exception { RepositoryNodeImpl node = new RepositoryNodeImpl("foo", "bar", null); // nothing to normalize File file = new File("foo/bar/baz"); assertSame(file, node.normalize(file)); file = new File("foo/bar/ba?z"); assertSame(file, node.normalize(file)); // unnormalized in parent dir name is left alone file = new File("ba?/bar"); assertSame(file, node.normalize(file)); // should be normalized assertEquals("baz", normalizeName(node, "baz?")); assertEquals(new File("/ba").getPath(), normalizeName(node, "/ba?")); assertEquals(new File("foo/bar/bar").getPath(), normalizeName(node, "foo/bar/bar?")); assertEquals(new File("foo/ba?r/bar").getPath(), normalizeName(node, "foo/ba?r/bar?")); assertEquals(new File("foo/bar?/bar").getPath(), normalizeName(node, "foo/bar?/bar?")); // disable trailing ? normalization ConfigurationUtil.addFromArgs(UrlUtil.PARAM_NORMALIZE_EMPTY_QUERY, "false"); assertEquals("baz?", normalizeName(node, "baz?")); } List getChildNames(String nodeName) throws MalformedURLException { RepositoryNode dirEntry = repo.getNode(nodeName); ArrayList res = new ArrayList(); for (Iterator childIt = dirEntry.listChildren(null, false); childIt.hasNext(); ) { RepositoryNode node = (RepositoryNode)childIt.next(); res.add(node.getNodeUrl()); } return res; } public void testFixUnnormalized_Rename() throws Exception { if (!PlatformUtil.getInstance().isCaseSensitiveFileSystem()) { log.debug("Skipping testFixUnnormalized_Rename: file system is not case sensitive."); return; } repo.setDontNormalize(true); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "false"); createLeaf("http://www.example.com/testDir/branch%3c1/leaf%2C1", "test stream", null); createLeaf("http://www.example.com/testDir/branch%3c1/leaf%2c2", "test stream", null); createLeaf("http://www.example.com/testDir/branch2/leaf3", "test stream", null); createLeaf("http://www.example.com/testDir/branch2", "test stream", null); createLeaf("http://www.example.com/testDir/leaf4", "test stream", null); String[] expectedA = new String[] { "http://www.example.com/testDir/branch%3c1", "http://www.example.com/testDir/branch2", "http://www.example.com/testDir/leaf4" }; assertIsomorphic(expectedA, getChildNames(("http://www.example.com/testDir"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "true"); String[] expectedB = new String[] { "http://www.example.com/testDir/branch%3C1", "http://www.example.com/testDir/branch2", "http://www.example.com/testDir/leaf4" }; assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "false"); assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); String[] expectedC = new String[] { "http://www.example.com/testDir/branch%3C1/leaf%2C1", "http://www.example.com/testDir/branch%3C1/leaf%2c2", }; assertIsomorphic(expectedC, getChildNames(("http://www.example.com/testDir/branch%3C1"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "true"); assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); String[] expectedD = new String[] { "http://www.example.com/testDir/branch%3C1/leaf%2C1", "http://www.example.com/testDir/branch%3C1/leaf%2C2", }; assertIsomorphic(expectedD, getChildNames(("http://www.example.com/testDir/branch%3C1"))); } public void testFixUnnormalizedMultiple_Delete() throws Exception { if (!PlatformUtil.getInstance().isCaseSensitiveFileSystem()) { log.debug("Skipping testFixUnnormalizedMultiple_Delete: file system is not case sensitive."); return; } repo.setDontNormalize(true); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "false"); createLeaf("http://www.example.com/testDir/leaf%2C1", "test stream", null); createLeaf("http://www.example.com/testDir/leaf%2c1", "test stream", null); createLeaf("http://www.example.com/testDir/leaf3", "test stream", null); String[] expectedA = new String[] { "http://www.example.com/testDir/leaf%2C1", "http://www.example.com/testDir/leaf%2c1", "http://www.example.com/testDir/leaf3", }; assertIsomorphic(expectedA, getChildNames(("http://www.example.com/testDir"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "true"); String[] expectedB = new String[] { "http://www.example.com/testDir/leaf%2C1", "http://www.example.com/testDir/leaf3", }; assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "false"); assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); } public void testFixUnnormalizedMultiple_DeleteMultiple() throws Exception { if (!PlatformUtil.getInstance().isCaseSensitiveFileSystem()) { log.debug("Skipping testFixUnnormalizedMultiple_DeleteMultiple: file system is not case sensitive."); return; } repo.setDontNormalize(true); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "false"); createLeaf("http://www.example.com/testDir/leaf%CA%3E", "test stream", null); createLeaf("http://www.example.com/testDir/leaf%cA%3E", "test stream", null); createLeaf("http://www.example.com/testDir/leaf%ca%3E", "test stream", null); createLeaf("http://www.example.com/testDir/leaf%ca%3e", "test stream", null); createLeaf("http://www.example.com/testDir/leaf3", "test stream", null); String[] expectedA = new String[] { "http://www.example.com/testDir/leaf%CA%3E", "http://www.example.com/testDir/leaf%cA%3E", "http://www.example.com/testDir/leaf%ca%3E", "http://www.example.com/testDir/leaf%ca%3e", "http://www.example.com/testDir/leaf3", }; assertIsomorphic(expectedA, getChildNames(("http://www.example.com/testDir"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "true"); String[] expectedB = new String[] { "http://www.example.com/testDir/leaf%CA%3E", "http://www.example.com/testDir/leaf3", }; assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "false"); assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); } public void testFixUnnormalized_DontFixParent() throws Exception { if (!PlatformUtil.getInstance().isCaseSensitiveFileSystem()) { log.debug("Skipping testFixUnnormalized_DontFixParent: file system is not case sensitive."); return; } repo.setDontNormalize(true); createLeaf("http://www.example.com/testDir/branch%3c1/leaf%2C1", "test stream", null); createLeaf("http://www.example.com/testDir/branch%3c1/leaf%2c2", "test stream", null); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "true"); String[] expectedA = new String[] { "http://www.example.com/testDir/branch%3c1/leaf%2C1", "http://www.example.com/testDir/branch%3c1/leaf%2C2", }; assertIsomorphic(expectedA, getChildNames(("http://www.example.com/testDir/branch%3c1"))); } public void testEntrySort() throws Exception { createLeaf("http://www.example.com/testDir/branch2/leaf1", null, null); createLeaf("http://www.example.com/testDir/leaf4", null, null); createLeaf("http://www.example.com/testDir/branch1/leaf1", null, null); createLeaf("http://www.example.com/testDir/leaf3", null, null); RepositoryNode dirEntry = repo.getNode("http://www.example.com/testDir"); Iterator childIt = dirEntry.listChildren(null, false); ArrayList childL = new ArrayList(4); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } String[] expectedA = new String[] { "http://www.example.com/testDir/branch1", "http://www.example.com/testDir/branch2", "http://www.example.com/testDir/leaf3", "http://www.example.com/testDir/leaf4" }; assertIsomorphic(expectedA, childL); } public void testIllegalOperations() throws Exception { RepositoryNode leaf = repo.createNewNode("http://www.example.com/testDir/test.cache"); assertFalse(leaf.hasContent()); try { leaf.getCurrentVersion(); fail("Cannot get current version if no content."); } catch (UnsupportedOperationException uoe) { } try { leaf.getContentSize(); fail("Cannot get content size if no content."); } catch (UnsupportedOperationException uoe) { } try { leaf.getNodeContents(); fail("Cannot get RepositoryNodeContents if no content."); } catch (UnsupportedOperationException uoe) { } try { leaf.sealNewVersion(); fail("Cannot seal version if not open."); } catch (UnsupportedOperationException uoe) { } leaf.makeNewVersion(); try { leaf.sealNewVersion(); fail("Cannot seal version if getNewOutputStream() uncalled."); } catch (UnsupportedOperationException uoe) { } leaf.makeNewVersion(); try { leaf.deactivateContent(); fail("Cannot deactivate if currently open for writing."); } catch (UnsupportedOperationException uoe) { } writeToLeaf(leaf, "test stream"); try { leaf.sealNewVersion(); fail("Cannot seal version if setNewProperties() uncalled."); } catch (UnsupportedOperationException uoe) { } leaf.makeNewVersion(); writeToLeaf(leaf, "test stream"); leaf.setNewProperties(new Properties()); leaf.sealNewVersion(); assertEquals(1, leaf.getCurrentVersion()); assertTrue(leaf.hasContent()); } public void testVersionTimeout() throws Exception { TimeBase.setSimulated(); RepositoryNode leaf = repo.createNewNode("http://www.example.com/testDir/test.cache"); RepositoryNode leaf2 = repo.getNode("http://www.example.com/testDir/test.cache"); leaf.makeNewVersion(); try { leaf2.makeNewVersion(); fail("Can't make new version while version open."); } catch (UnsupportedOperationException e) { } TimeBase.step(RepositoryNodeImpl.DEFAULT_VERSION_TIMEOUT/2); try { leaf2.makeNewVersion(); fail("Can't make new version while version not timed out."); } catch (UnsupportedOperationException e) { } TimeBase.step(RepositoryNodeImpl.DEFAULT_VERSION_TIMEOUT/2); leaf2.makeNewVersion(); } public void testMakeNewCache() throws Exception { RepositoryNode leaf = repo.createNewNode("http://www.example.com/testDir/test.cache"); assertFalse(leaf.hasContent()); try { leaf.getCurrentVersion(); fail("Cannot get current version if no content."); } catch (UnsupportedOperationException uoe) { } leaf.makeNewVersion(); writeToLeaf(leaf, "test stream"); leaf.setNewProperties(new Properties()); leaf.sealNewVersion(); assertTrue(leaf.hasContent()); assertEquals(1, leaf.getCurrentVersion()); } public void testMakeNodeLocation() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl) repo.createNewNode("http://www.example.com/testDir"); String nodeLoc = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); nodeLoc = LockssRepositoryImpl.mapUrlToFileLocation(nodeLoc, "http://www.example.com/testDir"); File testFile = new File(nodeLoc); assertFalse(testFile.exists()); leaf.createNodeLocation(); assertTrue(testFile.exists()); assertTrue(testFile.isDirectory()); } public void testMakeNewVersion() throws Exception { Properties props = new Properties(); props.setProperty("test 1", "value 1"); RepositoryNode leaf = createLeaf("http://www.example.com/testDir/test.cache", "test stream 1", props); assertEquals(1, leaf.getCurrentVersion()); props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream 2"); leaf.sealNewVersion(); assertEquals(2, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream 2", resultStr); props = leaf.getNodeContents().getProperties(); assertEquals("value 2", props.getProperty("test 1")); } static final int DEL_NODE_DIR = 1; static final int DEL_CONTENT_DIR = 2; static final int DEL_CONTENT_FILE = 3; static final int DEL_PROPS_FILE = 4; public void testDisappearingFile(int whichFile, boolean tryRead) throws Exception { String url = "http://www.example.com/foo.html"; RepositoryNodeImpl leaf = (RepositoryNodeImpl)repo.createNewNode(url); String nodeLoc = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); nodeLoc = LockssRepositoryImpl.mapUrlToFileLocation(nodeLoc, url); File testFile; switch (whichFile) { case DEL_NODE_DIR: testFile = new File(nodeLoc); break; case DEL_CONTENT_DIR: testFile = new File(nodeLoc, "#content"); break; case DEL_CONTENT_FILE: testFile = new File(nodeLoc, "#content/current"); break; case DEL_PROPS_FILE: testFile = new File(nodeLoc, "#content/current.props"); break; default: throw new UnsupportedOperationException(); } assertFalse(testFile.exists()); Properties props1 = PropUtil.fromArgs("key1", "value 1"); createContentVersion(leaf, "test content 11111", props1); assertEquals(1, leaf.getCurrentVersion()); assertTrue(testFile.exists()); switch (whichFile) { case DEL_NODE_DIR: case DEL_CONTENT_DIR: assertTrue(FileUtil.delTree(testFile)); break; case DEL_CONTENT_FILE: case DEL_PROPS_FILE: assertTrue(testFile.delete()); break; } assertFalse(testFile.exists()); Properties props2 = PropUtil.fromArgs("key2", "value 2"); RepositoryNode leaf2 = repo.createNewNode(url); assertSame(leaf, leaf2); assertTrue(leaf.hasContent()); if (tryRead) { try { getLeafContent(leaf); } catch (LockssRepository.RepositoryStateException e) { // expected } } leaf2.makeNewVersion(); writeToLeaf(leaf, "test content 22222"); leaf.setNewProperties(props2); leaf.sealNewVersion(); assertTrue(testFile.exists()); int expver = 2; // if we tried to read while node or content dir was missing, version // number will have been reset. if (tryRead) { switch (whichFile) { case DEL_NODE_DIR: case DEL_CONTENT_DIR: expver = 1; } } assertEquals(expver, leaf.getCurrentVersion()); assertEquals("test content 22222", getLeafContent(leaf)); assertEquals("value 2", leaf.getNodeContents().getProperties().get("key2")); } public void testDisappearingNodeDir() throws Exception { testDisappearingFile(DEL_NODE_DIR, false); } public void testDisappearingContentDir() throws Exception { testDisappearingFile(DEL_CONTENT_DIR, false); } public void testDisappearingContentFile() throws Exception { testDisappearingFile(DEL_CONTENT_FILE, false); } public void testDisappearingPropsFile() throws Exception { testDisappearingFile(DEL_PROPS_FILE, false); } public void testDisappearingNodeDirWithRead() throws Exception { testDisappearingFile(DEL_NODE_DIR, true); } public void testDisappearingContentDirWithRead() throws Exception { testDisappearingFile(DEL_CONTENT_DIR, true); } public void testDisappearingContentFileWithRead() throws Exception { testDisappearingFile(DEL_CONTENT_FILE, true); } public void testDisappearingPropsFileWithRead() throws Exception { testDisappearingFile(DEL_PROPS_FILE, true); } public void testMakeNewVersionWithoutClosingStream() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/test.cache", "test stream 1", new Properties()); leaf.makeNewVersion(); leaf.setNewProperties(new Properties()); OutputStream os = leaf.getNewOutputStream(); InputStream is = new StringInputStream("test stream 2"); StreamUtil.copy(is, os); is.close(); // don't close outputstream leaf.sealNewVersion(); assertEquals(2, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream 2", resultStr); } public void testMakeNewIdenticalVersionDefault() throws Exception { Properties props = new Properties(); props.setProperty("test 1", "value 1"); MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)createLeaf( "http://www.example.com/testDir/test.cache", "test stream", props)); assertEquals(1, leaf.getCurrentVersion()); // set the file extension leaf.dateValue = 123321; props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream"); leaf.sealNewVersion(); assertEquals(1, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream", resultStr); props = leaf.getNodeContents().getProperties(); assertEquals("value 2", props.getProperty("test 1")); // make sure proper files exist tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/test.cache"); File testFileDir = new File(tempDirPath + "/#content"); File[] files = testFileDir.listFiles(); assertEquals(2, files.length); File testFile = new File(testFileDir, "current"); assertTrue(testFile.exists()); testFile = new File(testFileDir, "current.props"); assertTrue(testFile.exists()); // testFile = new File(testFileDir, "1.props-123321"); // assertFalse(testFile.exists()); } public void testMakeNewIdenticalVersionOldWay() throws Exception { props.setProperty(RepositoryNodeImpl.PARAM_KEEP_ALL_PROPS_FOR_DUPE_FILE, "true"); ConfigurationUtil.setCurrentConfigFromProps(props); Properties props = new Properties(); props.setProperty("test 1", "value 1"); MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)createLeaf( "http://www.example.com/testDir/test.cache", "test stream", props)); assertEquals(1, leaf.getCurrentVersion()); // set the file extension leaf.dateValue = 123321; props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream"); leaf.sealNewVersion(); assertEquals(1, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream", resultStr); props = leaf.getNodeContents().getProperties(); assertEquals("value 2", props.getProperty("test 1")); // make sure proper files exist tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/test.cache"); File testFileDir = new File(tempDirPath + "/#content"); File[] files = testFileDir.listFiles(); assertEquals(3, files.length); File testFile = new File(testFileDir, "current"); assertTrue(testFile.exists()); testFile = new File(testFileDir, "current.props"); assertTrue(testFile.exists()); testFile = new File(testFileDir, "1.props-123321"); assertTrue(testFile.exists()); } public void testMakeNewIdenticalVersionNewWay() throws Exception { props.setProperty(RepositoryNodeImpl.PARAM_KEEP_ALL_PROPS_FOR_DUPE_FILE, "false"); ConfigurationUtil.setCurrentConfigFromProps(props); Properties props = new Properties(); props.setProperty("test 1", "value 1"); MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)createLeaf( "http://www.example.com/testDir/test.cache", "test stream", props)); assertEquals(1, leaf.getCurrentVersion()); // set the file extension leaf.dateValue = 123321; props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream"); leaf.sealNewVersion(); assertEquals(1, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream", resultStr); props = leaf.getNodeContents().getProperties(); assertEquals("value 2", props.getProperty("test 1")); // make sure proper files exist tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/test.cache"); File testFileDir = new File(tempDirPath + "/#content"); File[] files = testFileDir.listFiles(); assertEquals(2, files.length); File testFile = new File(testFileDir, "current"); assertTrue(testFile.exists()); testFile = new File(testFileDir, "current.props"); assertTrue(testFile.exists()); // testFile = new File(testFileDir, "1.props-123321"); // assertFalse(testFile.exists()); } public void testIdenticalVersionFixesVersionError() throws Exception { Properties props = new Properties(); MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)createLeaf( "http://www.example.com/testDir/test.cache", "test stream", props)); assertEquals(1, leaf.getCurrentVersion()); props = new Properties(); leaf.makeNewVersion(); leaf.setNewProperties(props); // set to error state leaf.currentVersion = 0; writeToLeaf(leaf, "test stream"); assertEquals(0, leaf.currentVersion); leaf.sealNewVersion(); // fixes error state, even though identical assertEquals(1, leaf.getCurrentVersion()); } public void testMakeNewVersionFixesVersionError() throws Exception { Properties props = new Properties(); MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)createLeaf( "http://www.example.com/testDir/test.cache", "test stream", props)); assertEquals(1, leaf.getCurrentVersion()); props = new Properties(); leaf.makeNewVersion(); // set to error state leaf.currentVersion = -1; leaf.setNewProperties(props); writeToLeaf(leaf, "test stream2"); leaf.sealNewVersion(); // fixes error state assertEquals(1, leaf.getCurrentVersion()); } public void testGetInputStream() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/test.cache", "test stream", null); String resultStr = getLeafContent(leaf); assertEquals("test stream", resultStr); } public void testGetProperties() throws Exception { Properties props = new Properties(); props.setProperty("test 1", "value 1"); RepositoryNode leaf = createLeaf("http://www.example.com/testDir/test.cache", "test stream", props); RepositoryNode.RepositoryNodeContents contents = leaf.getNodeContents(); props = contents.getProperties(); // close stream to allow the file to be renamed later // XXX 'getProperties()' creates an input stream, and 'release()' just // sets it to null. The rename still fails in Windows unless the stream // is closed first. contents.getInputStream().close(); contents.release(); assertEquals("value 1", props.getProperty("test 1")); leaf.makeNewVersion(); props = new Properties(); props.setProperty("test 1", "value 2"); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream 2"); leaf.sealNewVersion(); props = leaf.getNodeContents().getProperties(); assertEquals("value 2", props.getProperty("test 1")); } RepositoryNode createNodeWithCorruptProps(String url) throws Exception { Properties props = new Properties(); props.setProperty("test 1", "value 1"); RepositoryNode leaf = createLeaf(url, "test stream", props); RepositoryNodeImpl leafImpl = (RepositoryNodeImpl)leaf; File propsFile = new File(leafImpl.getContentDir(), RepositoryNodeImpl.CURRENT_PROPS_FILENAME); // Write a Malformed unicode escape that will cause Properties.load() // to throw OutputStream os = new BufferedOutputStream(new FileOutputStream(propsFile, true)); os.write("\\uxxxxfoo=bar".getBytes()); os.close(); return leaf; } public void testCorruptProperties1() throws Exception { RepositoryNode leaf = createNodeWithCorruptProps("http://www.example.com/testDir/test.cache"); assertFalse(leaf.hasContent()); assertTrue(leaf.isDeleted()); leaf.makeNewVersion(); writeToLeaf(leaf, "test stream"); leaf.setNewProperties(new Properties()); leaf.sealNewVersion(); assertTrue(leaf.hasContent()); assertFalse(leaf.isDeleted()); } public void testCorruptProperties2() throws Exception { String stem = "http://www.example.com/testDir"; RepositoryNode leaf = createNodeWithCorruptProps(stem + "/test.cache"); RepositoryNode leaf2 = createLeaf(stem + "/foo", "test stream", props); RepositoryNode dirEntry = repo.getNode("http://www.example.com/testDir"); Iterator childIt = dirEntry.listChildren(null, false); assertEquals(ListUtil.list(leaf2), ListUtil.fromIterator(childIt)); } static String cntnt(int ix) { return "content " + ix + "ABCDEFGHIJKLMNOPQRSTUVWXYZ".substring(0, ix); } static int lngth(int ix) { return cntnt(ix).length(); } public void testGetNodeVersion() throws Exception { int max = 5; String url = "http://www.example.com/versionedcontent.txt"; String key = "key"; String val = "grrl"; Properties props = new Properties(); RepositoryNode leaf = repo.createNewNode(url); // create several versions for (int ix = 1; ix <= max; ix++) { props.setProperty(key, val+ix); createContentVersion(leaf, cntnt(ix), props); } // getNodeVersion(current) should return the main node assertEquals(leaf, leaf.getNodeVersion(leaf.getCurrentVersion())); // loop through other versions checking version, content, props for (int ix = 1; ix < max; ix++) { RepositoryNodeVersion nodeVer = leaf.getNodeVersion(ix); log.debug("ver: " + nodeVer.getVersion() + ", content: " + getLeafContent(nodeVer)); assertEquals(ix, nodeVer.getVersion()); assertEquals(cntnt(ix), getLeafContent(nodeVer)); assertEquals(lngth(ix), nodeVer.getContentSize()); props = nodeVer.getNodeContents().getProperties(); assertEquals(val+ix, props.getProperty(key)); } } public void testGetNodeVersions() throws Exception { int max = 5; String url = "http://www.example.com/versionedcontent.txt"; String key = "key"; String val = "grrl"; Properties props = new Properties(); RepositoryNode leaf = repo.createNewNode(url); // create several versions for (int ix = 1; ix <= max; ix++) { props.setProperty(key, val+ix); createContentVersion(leaf, cntnt(ix), props); } // check expected current version number assertEquals(max, leaf.getCurrentVersion()); assertEquals(max, leaf.getVersion()); // checking version, content, props of current version assertEquals(cntnt(max), getLeafContent(leaf)); assertEquals(lngth(max), leaf.getContentSize()); props = leaf.getNodeContents().getProperties(); assertEquals(val+max, props.getProperty(key)); // ask for all older versions RepositoryNodeVersion[] vers = leaf.getNodeVersions(); assertEquals(max, vers.length); // loop through them checking version, content, props for (int ix = 0; ix < max-1; ix++) { int exp = max - ix; RepositoryNodeVersion nodeVer = vers[ix]; log.debug("ver: " + nodeVer.getVersion() + ", content: " + getLeafContent(nodeVer)); assertEquals(exp, nodeVer.getVersion()); assertEquals(cntnt(exp), getLeafContent(nodeVer)); assertEquals(lngth(exp), nodeVer.getContentSize()); props = nodeVer.getNodeContents().getProperties(); assertEquals(val+exp, props.getProperty(key)); } // now ask for and check a subset of the older versions assertTrue("max must be at least 4 for this test", max >= 4); int numver = max - 2; vers = leaf.getNodeVersions(numver); assertEquals(numver, vers.length); for (int ix = 0; ix < numver-1; ix++) { int exp = max - ix; RepositoryNodeVersion nodeVer = vers[ix]; log.debug("ver: " + nodeVer.getVersion() + ", content: " + getLeafContent(nodeVer)); assertEquals(exp, nodeVer.getVersion()); assertEquals(cntnt(exp), getLeafContent(nodeVer)); assertEquals(lngth(exp), nodeVer.getContentSize()); props = nodeVer.getNodeContents().getProperties(); assertEquals(val+exp, props.getProperty(key)); } } public void testIllegalVersionOperations() throws Exception { RepositoryNode.RepositoryNodeContents rnc; RepositoryNodeVersion nv; RepositoryNode leaf = repo.createNewNode("http://www.example.com/testDir/test.cache"); try { nv = leaf.getNodeVersion(7); fail("No content, shouldn't be able to get versioned node: " + nv); } catch (UnsupportedOperationException e) { } // create first version Properties props = new Properties(); props.setProperty("key", "val1"); createContentVersion(leaf, cntnt(1), props); // We're allowed to get a RepositoryNodeVersion when the version // doesn't exist ... nv = leaf.getNodeVersion(7); // but all operations on it should throw try { nv.getContentSize(); fail("No version; shouldn't get content size"); } catch (UnsupportedOperationException e) { } try { rnc = nv.getNodeContents(); fail("No version; shouldn't get RepositoryNodeContents"); } catch (UnsupportedOperationException e) { } } public void testDirContent() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/test.cache", "test stream", null); assertTrue(leaf.hasContent()); RepositoryNode dir = repo.getNode("http://www.example.com/testDir"); dir.makeNewVersion(); writeToLeaf(dir, "test stream"); dir.setNewProperties(new Properties()); dir.sealNewVersion(); assertTrue(dir.hasContent()); dir = createLeaf("http://www.example.com/testDir/test.cache/new.test", "test stream", null); assertTrue(dir.hasContent()); } public void testNodeSize() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/test.cache", "test stream", null); assertTrue(leaf.hasContent()); assertEquals(11, (int)leaf.getContentSize()); } public void testTreeSize() throws Exception { createLeaf("http://www.example.com/testDir", "test", null); createLeaf("http://www.example.com/testDir/test1", "test1", null); createLeaf("http://www.example.com/testDir/test2", "test2", null); createLeaf("http://www.example.com/testDir/test3/branch1", "test33", null); createLeaf("http://www.example.com/testDir/test3/branch2", "test33", null); RepositoryNode leaf = repo.getNode("http://www.example.com/testDir"); assertEquals(-1, leaf.getTreeContentSize(null, false)); assertEquals(26, leaf.getTreeContentSize(null, true)); assertEquals(26, leaf.getTreeContentSize(null, false)); leaf = repo.getNode("http://www.example.com/testDir/test1"); assertEquals(5, leaf.getTreeContentSize(null, true)); leaf = repo.getNode("http://www.example.com/testDir/test3"); assertEquals(12, leaf.getTreeContentSize(null, true)); CachedUrlSetSpec cuss = new RangeCachedUrlSetSpec("http://www.example.com/testDir/test3", "/branch1", "/branch1"); assertEquals(6, leaf.getTreeContentSize(cuss, true)); } public void testDetermineParentNode() throws Exception { repo.createNewNode("http://www.example.com"); repo.createNewNode("http://www.example.com/test"); assertNotNull(repo.getNode("http://www.example.com/test")); RepositoryNodeImpl node = (RepositoryNodeImpl)repo.createNewNode( "http://www.example.com/test/branch"); assertEquals("http://www.example.com/test/branch", node.getNodeUrl()); node = node.determineParentNode(); assertEquals("http://www.example.com/test", node.getNodeUrl()); node = node.determineParentNode(); assertEquals("http://www.example.com", node.getNodeUrl()); node = node.determineParentNode(); assertEquals(AuUrl.PROTOCOL, node.getNodeUrl()); node = node.determineParentNode(); assertEquals(AuUrl.PROTOCOL, node.getNodeUrl()); } public void testCacheInvalidation() throws Exception { RepositoryNodeImpl root = (RepositoryNodeImpl)createLeaf("http://www.example.com", "test", null); RepositoryNodeImpl branch = (RepositoryNodeImpl)createLeaf("http://www.example.com/branch", "test", null); RepositoryNodeImpl branch2 = (RepositoryNodeImpl)createLeaf("http://www.example.com/branch/branch2", "test", null); // This one has directory level with no node prop file, to check that // cache invalidation traverses them correctly RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com/branch/branch2/a/b/c/leaf", "test", null); assertNull(branch.nodeProps.getProperty(TREE_SIZE_PROPERTY)); assertNull(leaf.nodeProps.getProperty(TREE_SIZE_PROPERTY)); // force invalidation to happen branch.nodeProps.setProperty(TREE_SIZE_PROPERTY, "789"); branch.invalidateCachedValues(true); // should now be explicitly marked invalid assertEquals(RepositoryNodeImpl.INVALID, branch.nodeProps.getProperty(TREE_SIZE_PROPERTY)); assertEquals(RepositoryNodeImpl.INVALID, branch.nodeProps.getProperty(CHILD_COUNT_PROPERTY)); // fake prop set at root to check invalidation stops properly root.nodeProps.setProperty(TREE_SIZE_PROPERTY, "789"); root.nodeProps.setProperty(CHILD_COUNT_PROPERTY, "3"); // don't set branch so the invalidate stops there branch2.nodeProps.setProperty(TREE_SIZE_PROPERTY, "456"); branch2.nodeProps.setProperty(CHILD_COUNT_PROPERTY, "1"); leaf.nodeProps.setProperty(TREE_SIZE_PROPERTY, "123"); leaf.nodeProps.setProperty(CHILD_COUNT_PROPERTY, "0"); leaf.invalidateCachedValues(true); // shoulddn't be set here anymore assertFalse(isPropValid(leaf.nodeProps.getProperty(TREE_SIZE_PROPERTY))); assertFalse(isPropValid(leaf.nodeProps.getProperty(CHILD_COUNT_PROPERTY))); // or here (requires recursing up through dirs that have no node props // file) assertFalse(isPropValid(branch2.nodeProps.getProperty(TREE_SIZE_PROPERTY))); assertFalse(isPropValid(branch2.nodeProps.getProperty(CHILD_COUNT_PROPERTY))); // still invalid, recursion should have stopped here assertFalse(isPropValid(branch.nodeProps.getProperty(TREE_SIZE_PROPERTY))); assertFalse(isPropValid(branch.nodeProps.getProperty(CHILD_COUNT_PROPERTY))); // so not cleared these assertTrue(isPropValid(root.nodeProps.getProperty(TREE_SIZE_PROPERTY))); assertTrue(isPropValid(root.nodeProps.getProperty(CHILD_COUNT_PROPERTY))); assertEquals("789", root.nodeProps.getProperty(TREE_SIZE_PROPERTY)); assertEquals("3", root.nodeProps.getProperty(CHILD_COUNT_PROPERTY)); } boolean isPropValid(String val) { return RepositoryNodeImpl.isPropValid(val); } boolean isPropInvalid(String val) { return RepositoryNodeImpl.isPropInvalid(val); } public void testTreeSizeCaching() throws Exception { createLeaf("http://www.example.com/testDir", "test", null); RepositoryNodeImpl leaf = (RepositoryNodeImpl)repo.getNode("http://www.example.com/testDir"); assertNull(leaf.nodeProps.getProperty(TREE_SIZE_PROPERTY)); assertEquals(4, leaf.getTreeContentSize(null, true)); assertEquals("4", leaf.nodeProps.getProperty(TREE_SIZE_PROPERTY)); leaf.markAsDeleted(); assertTrue(isPropInvalid(leaf.nodeProps.getProperty(TREE_SIZE_PROPERTY))); assertEquals(0, leaf.getTreeContentSize(null, true)); assertEquals("0", leaf.nodeProps.getProperty(TREE_SIZE_PROPERTY)); } public void testChildCount() throws Exception { createLeaf("http://www.example.com/testDir", "test", null); RepositoryNodeImpl leaf = (RepositoryNodeImpl)repo.getNode("http://www.example.com/testDir"); assertNull(leaf.nodeProps.getProperty(CHILD_COUNT_PROPERTY)); assertEquals(0, leaf.getChildCount()); assertEquals("0", leaf.nodeProps.getProperty(CHILD_COUNT_PROPERTY)); createLeaf("http://www.example.com/testDir/test1", "test1", null); createLeaf("http://www.example.com/testDir/test2", "test2", null); assertEquals(2, leaf.getChildCount()); assertEquals("2", leaf.nodeProps.getProperty(CHILD_COUNT_PROPERTY)); } public void testDeactivate() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com/test1", "test stream", null); assertTrue(leaf.hasContent()); assertFalse(leaf.isContentInactive()); assertEquals(1, leaf.getCurrentVersion()); assertNull(leaf.nodeProps.getProperty(RepositoryNodeImpl.INACTIVE_CONTENT_PROPERTY)); leaf.deactivateContent(); assertFalse(leaf.hasContent()); assertTrue(leaf.isContentInactive()); assertEquals(RepositoryNodeImpl.INACTIVE_VERSION, leaf.getCurrentVersion()); assertEquals("true", leaf.nodeProps.getProperty(RepositoryNodeImpl.INACTIVE_CONTENT_PROPERTY)); } public void testDelete() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com/test1", "test stream", null); assertTrue(leaf.hasContent()); assertFalse(leaf.isDeleted()); assertEquals(1, leaf.getCurrentVersion()); assertNull(leaf.nodeProps.getProperty(RepositoryNodeImpl.DELETION_PROPERTY)); leaf.markAsDeleted(); assertFalse(leaf.hasContent()); assertTrue(leaf.isDeleted()); assertEquals(RepositoryNodeImpl.DELETED_VERSION, leaf.getCurrentVersion()); assertEquals("true", leaf.nodeProps.getProperty(RepositoryNodeImpl.DELETION_PROPERTY)); } public void testUnDelete() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com/test1", "test stream", null); leaf.markAsDeleted(); assertTrue(leaf.isDeleted()); assertEquals(RepositoryNodeImpl.DELETED_VERSION, leaf.getCurrentVersion()); leaf.markAsNotDeleted(); assertFalse(leaf.isContentInactive()); assertFalse(leaf.isDeleted()); assertEquals(1, leaf.getCurrentVersion()); // make to null, not 'false' assertNull(leaf.nodeProps.getProperty(RepositoryNodeImpl.DELETION_PROPERTY)); String resultStr = getLeafContent(leaf); assertEquals("test stream", resultStr); } public void testRestoreLastVersion() throws Exception { Properties props = new Properties(); props.setProperty("test 1", "value 1"); RepositoryNode leaf = createLeaf("http://www.example.com/test1", "test stream 1", props); assertEquals(1, leaf.getCurrentVersion()); props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream 2"); leaf.sealNewVersion(); assertEquals(2, leaf.getCurrentVersion()); leaf.restoreLastVersion(); assertEquals(1, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream 1", resultStr); props = leaf.getNodeContents().getProperties(); assertEquals("value 1", props.getProperty("test 1")); } public void testReactivateViaRestore() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com/test1", "test stream", null); leaf.deactivateContent(); assertTrue(leaf.isContentInactive()); assertEquals(RepositoryNodeImpl.INACTIVE_VERSION, leaf.getCurrentVersion()); leaf.restoreLastVersion(); assertFalse(leaf.isContentInactive()); assertEquals(1, leaf.getCurrentVersion()); // back to null, not 'false' assertNull(leaf.nodeProps.getProperty(RepositoryNodeImpl.INACTIVE_CONTENT_PROPERTY)); String resultStr = getLeafContent(leaf); assertEquals("test stream", resultStr); } public void testReactivateViaNewVersion() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com/test1", "test stream", null); leaf.deactivateContent(); assertTrue(leaf.isContentInactive()); assertEquals(RepositoryNodeImpl.INACTIVE_VERSION, leaf.getCurrentVersion()); Properties props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream 2"); leaf.sealNewVersion(); assertFalse(leaf.isContentInactive()); assertEquals(2, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream 2", resultStr); File lastProps = new File(leaf.contentDir, "1.props"); assertTrue(lastProps.exists()); InputStream is = new BufferedInputStream(new FileInputStream(lastProps)); props.load(is); is.close(); // make sure the 'was inactive' property hasn't been lost assertEquals("true", props.getProperty(RepositoryNodeImpl.NODE_WAS_INACTIVE_PROPERTY)); } public void testAbandonReactivateViaNewVersion() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/test1", "test stream", null); leaf.deactivateContent(); assertTrue(leaf.isContentInactive()); assertEquals(RepositoryNodeImpl.INACTIVE_VERSION, leaf.getCurrentVersion()); Properties props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream 2"); leaf.abandonNewVersion(); assertTrue(leaf.isContentInactive()); assertEquals(RepositoryNodeImpl.INACTIVE_VERSION, leaf.getCurrentVersion()); } public void testIsLeaf() throws Exception { createLeaf("http://www.example.com/testDir/test1", "test stream", null); createLeaf("http://www.example.com/testDir/branch1", "test stream", null); createLeaf("http://www.example.com/testDir/branch1/test4", "test stream", null); RepositoryNode leaf = repo.getNode("http://www.example.com/testDir/test1"); assertTrue(leaf.isLeaf()); leaf = repo.getNode("http://www.example.com/testDir/branch1"); assertFalse(leaf.isLeaf()); } public void testListInactiveNodes() throws Exception { createLeaf("http://www.example.com/testDir/test1", "test stream", null); createLeaf("http://www.example.com/testDir/test2", "test stream", null); createLeaf("http://www.example.com/testDir/test3", "test stream", null); createLeaf("http://www.example.com/testDir/branch1", "test stream", null); createLeaf("http://www.example.com/testDir/branch1/test4", "test stream", null); createLeaf("http://www.example.com/testDir/branch2", "test stream", null); createLeaf("http://www.example.com/testDir/branch2/test5", "test stream", null); RepositoryNode dirEntry = repo.getNode("http://www.example.com/testDir"); Iterator childIt = dirEntry.listChildren(null, false); ArrayList childL = new ArrayList(3); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } String[] expectedA = new String[] { "http://www.example.com/testDir/branch1", "http://www.example.com/testDir/branch2", "http://www.example.com/testDir/test1", "http://www.example.com/testDir/test2", "http://www.example.com/testDir/test3" }; assertIsomorphic(expectedA, childL); RepositoryNode leaf = repo.getNode("http://www.example.com/testDir/test2"); leaf.deactivateContent(); // this next shouldn't be excluded since it isn't a leaf node leaf = repo.getNode("http://www.example.com/testDir/branch1"); leaf.deactivateContent(); // this next should be excluded because it's deleted leaf = repo.getNode("http://www.example.com/testDir/branch2"); leaf.markAsDeleted(); childIt = dirEntry.listChildren(null, false); childL = new ArrayList(2); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } expectedA = new String[] { "http://www.example.com/testDir/branch1", "http://www.example.com/testDir/test1", "http://www.example.com/testDir/test3" }; assertIsomorphic("Excluding inactive nodes failed.", expectedA, childL); childIt = dirEntry.listChildren(null, true); childL = new ArrayList(3); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } expectedA = new String[] { "http://www.example.com/testDir/branch1", "http://www.example.com/testDir/test1", "http://www.example.com/testDir/test2", "http://www.example.com/testDir/test3" }; assertIsomorphic("Including inactive nodes failed.", expectedA, childL); } public void testDeleteInnerNode() throws Exception { createLeaf("http://www.example.com/testDir/test1", "test stream", null); createLeaf("http://www.example.com/testDir/test2", "test stream", null); RepositoryNode dirEntry = repo.getNode("http://www.example.com/testDir"); assertFalse(dirEntry.isDeleted()); dirEntry.markAsDeleted(); assertTrue(dirEntry.isDeleted()); dirEntry.markAsNotDeleted(); assertFalse(dirEntry.isDeleted()); } public void testGetFileStrings() throws Exception { RepositoryNodeImpl node = (RepositoryNodeImpl)repo.createNewNode( "http://www.example.com/test.url"); node.initNodeRoot(); String contentStr = FileUtil.sysDepPath(node.nodeLocation + "/#content"); assertEquals(contentStr, node.getContentDir().toString()); contentStr = contentStr + File.separator; String expectedStr = contentStr + "123"; assertEquals(expectedStr, node.getVersionedCacheFile(123).getAbsolutePath()); expectedStr = contentStr + "123.props"; assertEquals(expectedStr, node.getVersionedPropsFile(123).getAbsolutePath()); expectedStr = contentStr + "inactive"; assertEquals(expectedStr, node.getInactiveCacheFile().getAbsolutePath()); expectedStr = contentStr + "inactive.props"; assertEquals(expectedStr, node.getInactivePropsFile().getAbsolutePath()); } public void testCheckNodeConsistency() throws Exception { // check returns proper values for errors MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)repo.createNewNode("http://www.example.com/testDir")); leaf.makeNewVersion(); // should abort and return true since version open leaf.failRootConsist = true; assertTrue(leaf.checkNodeConsistency()); // finish write leaf.setNewProperties(new Properties()); writeToLeaf(leaf, "test stream"); leaf.sealNewVersion(); // should return false if node root fails assertFalse(leaf.checkNodeConsistency()); leaf.failRootConsist = false; assertTrue(leaf.checkNodeConsistency()); // check returns false if content fails leaf.failContentConsist = true; assertFalse(leaf.checkNodeConsistency()); leaf.failContentConsist = false; assertTrue(leaf.checkNodeConsistency()); // check returns false if current info load fails leaf.failEnsureCurrentLoaded = true; assertFalse(leaf.checkNodeConsistency()); leaf.failEnsureCurrentLoaded = false; assertTrue(leaf.checkNodeConsistency()); } public void testCheckNodeRootConsistency() throws Exception { MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)repo.createNewNode("http://www.example.com/testDir")); leaf.createNodeLocation(); assertTrue(leaf.nodeRootFile.exists()); // returns true when normal assertTrue(leaf.checkNodeRootConsistency()); leaf.nodeRootFile.delete(); assertFalse(leaf.nodeRootFile.exists()); // creates dir, returns true when missing assertTrue(leaf.checkNodeRootConsistency()); assertTrue(leaf.nodeRootFile.exists()); assertTrue(leaf.nodeRootFile.isDirectory()); // fail node props load leaf.getChildCount(); assertTrue(leaf.nodePropsFile.exists()); File renameFile = new File(leaf.nodePropsFile.getAbsolutePath()+ RepositoryNodeImpl.FAULTY_FILE_EXTENSION); assertFalse(renameFile.exists()); leaf.failPropsLoad = true; assertTrue(leaf.checkNodeRootConsistency()); assertFalse(leaf.nodePropsFile.exists()); assertTrue(renameFile.exists()); } public void testCheckContentConsistency() throws Exception { MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)createLeaf("http://www.example.com/testDir", "test stream", null)); leaf.ensureCurrentInfoLoaded(); // should return false if content dir fails MyMockRepositoryNode.failEnsureDirExists = true; assertFalse(leaf.checkContentConsistency()); MyMockRepositoryNode.failEnsureDirExists = false; assertTrue(leaf.checkContentConsistency()); // should return false if content file absent File renameFile = new File(leaf.currentCacheFile.getAbsolutePath()+"RENAME"); assertTrue(PlatformUtil.updateAtomically(leaf.currentCacheFile, renameFile)); assertFalse(leaf.checkContentConsistency()); PlatformUtil.updateAtomically(renameFile, leaf.currentCacheFile); assertTrue(leaf.checkContentConsistency()); // should return false if content props absent PlatformUtil.updateAtomically(leaf.currentPropsFile, renameFile); assertFalse(leaf.checkContentConsistency()); PlatformUtil.updateAtomically(renameFile, leaf.currentPropsFile); assertTrue(leaf.checkContentConsistency()); // should return false if inactive and files missing leaf.currentVersion = RepositoryNodeImpl.INACTIVE_VERSION; assertFalse(leaf.checkContentConsistency()); PlatformUtil.updateAtomically(leaf.currentPropsFile, leaf.getInactivePropsFile()); assertFalse(leaf.checkContentConsistency()); PlatformUtil.updateAtomically(leaf.currentCacheFile, leaf.getInactiveCacheFile()); assertTrue(leaf.checkContentConsistency()); PlatformUtil.updateAtomically(leaf.getInactivePropsFile(), leaf.currentPropsFile); assertFalse(leaf.checkContentConsistency()); // finish restoring PlatformUtil.updateAtomically(leaf.getInactiveCacheFile(), leaf.currentCacheFile); leaf.currentVersion = 1; assertTrue(leaf.checkContentConsistency()); // remove residual files // - create files FileOutputStream fos = new FileOutputStream(leaf.tempCacheFile); StringInputStream sis = new StringInputStream("test stream"); StreamUtil.copy(sis, fos); fos.close(); sis.close(); fos = new FileOutputStream(leaf.tempPropsFile); sis = new StringInputStream("test stream"); StreamUtil.copy(sis, fos); fos.close(); sis.close(); // should be removed assertTrue(leaf.tempCacheFile.exists()); assertTrue(leaf.tempPropsFile.exists()); assertTrue(leaf.checkContentConsistency()); assertFalse(leaf.tempCacheFile.exists()); assertFalse(leaf.tempPropsFile.exists()); } public void testEnsureDirExists() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com", null, null); File testDir = new File(tempDirPath, "testDir"); // should return true if dir exists testDir.mkdir(); assertTrue(testDir.exists()); assertTrue(testDir.isDirectory()); assertTrue(leaf.ensureDirExists(testDir)); // should create dir, return true if not exists testDir.delete(); assertFalse(testDir.exists()); assertTrue(leaf.ensureDirExists(testDir)); assertTrue(testDir.exists()); assertTrue(testDir.isDirectory()); // should rename file, create dir, return true if file exists // -create file testDir.delete(); FileOutputStream fos = new FileOutputStream(testDir); StringInputStream sis = new StringInputStream("test stream"); StreamUtil.copy(sis, fos); fos.close(); sis.close(); assertTrue(testDir.exists()); assertTrue(testDir.isFile()); // rename via 'ensureDirExists()' File renameFile = new File(tempDirPath, "testDir"+ RepositoryNodeImpl.FAULTY_FILE_EXTENSION); assertFalse(renameFile.exists()); assertTrue(leaf.ensureDirExists(testDir)); assertTrue(testDir.isDirectory()); assertEquals("test stream", StringUtil.fromFile(renameFile)); } public void testCheckFileExists() throws Exception { // return false if doesn't exist File testFile = new File(tempDirPath, "testFile"); assertFalse(testFile.exists()); assertFalse(RepositoryNodeImpl.checkFileExists(testFile, "test file")); // rename if dir (to make room for file creation), then return false testFile.mkdir(); File renameDir = new File(tempDirPath, "testFile"+ RepositoryNodeImpl.FAULTY_FILE_EXTENSION); assertTrue(testFile.exists()); assertTrue(testFile.isDirectory()); assertFalse(renameDir.exists()); assertFalse(RepositoryNodeImpl.checkFileExists(testFile, "test file")); assertFalse(testFile.exists()); assertTrue(renameDir.exists()); assertTrue(renameDir.isDirectory()); // return true if exists FileOutputStream fos = new FileOutputStream(testFile); StringInputStream sis = new StringInputStream("test stream"); StreamUtil.copy(sis, fos); fos.close(); sis.close(); assertTrue(testFile.exists()); assertTrue(testFile.isFile()); assertTrue(RepositoryNodeImpl.checkFileExists(testFile, "test file")); assertEquals("test stream", StringUtil.fromFile(testFile)); } public void testCheckChildCountCacheAccuracy() throws Exception { createLeaf("http://www.example.com/testDir/branch2", "test stream", null); createLeaf("http://www.example.com/testDir/branch3", "test stream", null); RepositoryNodeImpl dirEntry = (RepositoryNodeImpl)repo.getNode("http://www.example.com/testDir"); assertEquals(2, dirEntry.getChildCount()); assertEquals("2", dirEntry.nodeProps.getProperty(RepositoryNodeImpl.CHILD_COUNT_PROPERTY)); // check that no change to valid count cache dirEntry.checkChildCountCacheAccuracy(); assertEquals("2", dirEntry.nodeProps.getProperty(RepositoryNodeImpl.CHILD_COUNT_PROPERTY)); // check that invalid cache removed dirEntry.nodeProps.setProperty(RepositoryNodeImpl.CHILD_COUNT_PROPERTY, "3"); dirEntry.checkChildCountCacheAccuracy(); assertEquals(RepositoryNodeImpl.INVALID, dirEntry.nodeProps.getProperty(RepositoryNodeImpl.CHILD_COUNT_PROPERTY)); } private RepositoryNode createLeaf(String url, String content, Properties props) throws Exception { return createLeaf(repo, url, content, props); } public static RepositoryNode createLeaf(LockssRepository repo, String url, String content, Properties props) throws Exception { RepositoryNode leaf = repo.createNewNode(url); createContentVersion(leaf, content, props); return leaf; } public static void createContentVersion(RepositoryNode leaf, String content, Properties props) throws Exception { leaf.makeNewVersion(); writeToLeaf(leaf, content); if (props==null) { props = new Properties(); } leaf.setNewProperties(props); leaf.sealNewVersion(); } public static void writeToLeaf(RepositoryNode leaf, String content) throws Exception { if (content==null) { content = ""; } OutputStream os = leaf.getNewOutputStream(); InputStream is = new StringInputStream(content); StreamUtil.copy(is, os); os.close(); is.close(); } public static String getLeafContent(RepositoryNodeVersion leaf) throws IOException { return getRNCContent(leaf.getNodeContents()); } public static String getRNCContent(RepositoryNode.RepositoryNodeContents rnc) throws IOException { InputStream is = rnc.getInputStream(); OutputStream baos = new ByteArrayOutputStream(20); StreamUtil.copy(is, baos); is.close(); String resultStr = baos.toString(); baos.close(); return resultStr; } public static void main(String[] argv) { String[] testCaseList = { TestRepositoryNodeImpl.class.getName()}; junit.swingui.TestRunner.main(testCaseList); } // this class overrides 'getDatedVersionedPropsFile()' so I can // manipulate the file names for testing. Also allows 'loadNodeProps() // to fail on demand static class MyMockRepositoryNode extends RepositoryNodeImpl { long dateValue; boolean failPropsLoad = false; boolean failRootConsist = false; boolean failContentConsist = false; boolean failEnsureCurrentLoaded = false; static boolean failEnsureDirExists = false; MyMockRepositoryNode(RepositoryNodeImpl nodeImpl) { super(nodeImpl.url, nodeImpl.nodeLocation, nodeImpl.repository); } File getDatedVersionedPropsFile(int version, long date) { StringBuffer buffer = new StringBuffer(); buffer.append(version); buffer.append(PROPS_EXTENSION); buffer.append("-"); buffer.append(dateValue); return new File(getContentDir(), buffer.toString()); } void loadNodeProps(boolean okIfNotThere) { if (failPropsLoad) { throw new LockssRepository.RepositoryStateException("Couldn't load properties file."); } else { super.loadNodeProps(okIfNotThere); } } boolean checkNodeRootConsistency() { if (failRootConsist) { return false; } else { return super.checkNodeRootConsistency(); } } boolean checkContentConsistency() { if (failContentConsist) { return false; } else { return super.checkContentConsistency(); } } void ensureCurrentInfoLoaded() { if (failEnsureCurrentLoaded) { throw new LockssRepository.RepositoryStateException("Couldn't load current info."); } else { super.ensureCurrentInfoLoaded(); } } boolean ensureDirExists(File dirFile) { if (failEnsureDirExists) { return false; } else { return super.ensureDirExists(dirFile); } } } static class MyLockssRepositoryImpl extends LockssRepositoryImpl { boolean dontNormalize = false; void setDontNormalize(boolean val) { dontNormalize = val; } MyLockssRepositoryImpl(String rootPath) { super(rootPath); } public String canonicalizePath(String url) throws MalformedURLException { if (dontNormalize) return url; return super.canonicalizePath(url); } public static LockssRepository createNewLockssRepository(ArchivalUnit au) { String root = getRepositoryRoot(au); if (root == null) { throw new LockssRepository.RepositoryStateException("null root"); } String auDir = LockssRepositoryImpl.mapAuToFileLocation(root, au); log.debug("repo: " + auDir + ", au: " + au.getName()); // staticCacheLocation = extendCacheLocation(root); LockssRepositoryImpl repo = new MyLockssRepositoryImpl(auDir); Plugin plugin = au.getPlugin(); if (plugin != null) { LockssDaemon daemon = plugin.getDaemon(); if (daemon != null) { RepositoryManager mgr = daemon.getRepositoryManager(); if (mgr != null) { mgr.setRepositoryForPath(auDir, repo); } } } return repo; } } }
test/src/org/lockss/repository/TestRepositoryNodeImpl.java
/* * $Id: TestRepositoryNodeImpl.java,v 1.60 2009-02-05 05:09:46 tlipkis Exp $ */ /* Copyright (c) 2000-2007 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.repository; import java.io.*; import java.net.*; import java.util.*; import org.lockss.test.*; import org.lockss.app.*; import org.lockss.util.*; import org.lockss.daemon.*; import org.lockss.plugin.*; import org.lockss.protocol.*; /** * This is the test class for org.lockss.repository.RepositoryNodeImpl */ public class TestRepositoryNodeImpl extends LockssTestCase { static final String TREE_SIZE_PROPERTY = RepositoryNodeImpl.TREE_SIZE_PROPERTY; static final String CHILD_COUNT_PROPERTY = RepositoryNodeImpl.CHILD_COUNT_PROPERTY; private MockLockssDaemon theDaemon; private MyLockssRepositoryImpl repo; private String tempDirPath; MockArchivalUnit mau; private MockIdentityManager idmgr; Properties props; public void setUp() throws Exception { super.setUp(); tempDirPath = getTempDir().getAbsolutePath() + File.separator; props = new Properties(); props.setProperty(LockssRepositoryImpl.PARAM_CACHE_LOCATION, tempDirPath); ConfigurationUtil.setCurrentConfigFromProps(props); mau = new MockArchivalUnit(); theDaemon = getMockLockssDaemon(); // Create the identity manager... idmgr = new MockIdentityManager(); theDaemon.setIdentityManager(idmgr); idmgr.initService(theDaemon); repo = (MyLockssRepositoryImpl)MyLockssRepositoryImpl.createNewLockssRepository(mau); theDaemon.setAuManager(LockssDaemon.LOCKSS_REPOSITORY, mau, repo); repo.initService(theDaemon); repo.startService(); } public void tearDown() throws Exception { TimeBase.setReal(); repo.stopService(); theDaemon.stopDaemon(); super.tearDown(); } // RepositoryNodeImpl relies on nonexistent dir.listFiles() returning // null, not empty list. public void testFileAssumptions() throws Exception { // empty dir returns empty list File dir1 = getTempDir(); assertNotNull(null, dir1.listFiles()); assertEquals(new File[0], dir1.listFiles()); // nonexistent dir returns null File dir2 = new File(dir1, "bacds"); assertNull(null, dir2.listFiles()); // dir list of non-dir returns null File file1 = File.createTempFile("xxx", ".tmp", dir1); assertTrue(file1.exists()); assertNull(null, file1.listFiles()); } public void testGetNodeUrl() { RepositoryNode node = new RepositoryNodeImpl("testUrl", "testDir", null); assertEquals("testUrl", node.getNodeUrl()); node = new RepositoryNodeImpl("testUrl/test.txt", "testUrl/test.txt", null); assertEquals("testUrl/test.txt", node.getNodeUrl()); } public void testFileLocation() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/branch1/leaf1"); File testFile = new File(tempDirPath); assertTrue(testFile.exists()); testFile = new File(tempDirPath + "/#content/current"); assertTrue(testFile.exists()); testFile = new File(tempDirPath + "/#content/current.props"); assertTrue(testFile.exists()); testFile = new File(tempDirPath + "/#node_props"); assertFalse(testFile.exists()); testFile = new File(tempDirPath + "/#agreement"); assertFalse(testFile.exists()); } public void testUpdateAgreementCreatesFile() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/branch1/leaf1"); File testFile = new File(tempDirPath, "#agreement"); assertFalse(testFile.exists()); // Agreeing IDs. PeerIdentity[] agreeingPeers = { new MockPeerIdentity("TCP:[192.168.0.1]:9723"), new MockPeerIdentity("TCP:[192.168.0.2]:9723") }; leaf.signalAgreement(ListUtil.fromArray(agreeingPeers)); assertTrue(testFile.exists()); } public void testUpdateAndLoadAgreement() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/branch1/leaf1"); PeerIdentity testid_1 = new MockPeerIdentity("TCP:[192.168.0.1]:9723"); PeerIdentity testid_2 = new MockPeerIdentity("TCP:[192.168.0.2]:9723"); PeerIdentity testid_3 = new MockPeerIdentity("TCP:[192.168.0.3]:9723"); PeerIdentity testid_4 = new MockPeerIdentity("TCP:[192.168.0.4]:9723"); idmgr.addPeerIdentity(testid_1.getIdString(), testid_1); idmgr.addPeerIdentity(testid_2.getIdString(), testid_2); idmgr.addPeerIdentity(testid_3.getIdString(), testid_3); idmgr.addPeerIdentity(testid_4.getIdString(), testid_4); leaf.signalAgreement(ListUtil.list(testid_1, testid_3)); assertEquals(2, ((RepositoryNodeImpl)leaf).loadAgreementHistory().size()); assertTrue(leaf.hasAgreement(testid_1)); assertFalse(leaf.hasAgreement(testid_2)); assertTrue(leaf.hasAgreement(testid_3)); assertFalse(leaf.hasAgreement(testid_4)); leaf.signalAgreement(ListUtil.list(testid_1, testid_2, testid_3, testid_4)); assertEquals(4, ((RepositoryNodeImpl)leaf).loadAgreementHistory().size()); assertTrue(leaf.hasAgreement(testid_1)); assertTrue(leaf.hasAgreement(testid_2)); assertTrue(leaf.hasAgreement(testid_3)); assertTrue(leaf.hasAgreement(testid_4)); } public void testVersionFileLocation() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/branch1/leaf1"); File testFile = new File(tempDirPath + "/#content/1"); assertFalse(testFile.exists()); testFile = new File(tempDirPath + "/#content/1.props"); assertFalse(testFile.exists()); leaf.makeNewVersion(); OutputStream os = leaf.getNewOutputStream(); InputStream is = new StringInputStream("test stream 2"); StreamUtil.copy(is, os); is.close(); os.close(); leaf.setNewProperties(new Properties()); leaf.sealNewVersion(); testFile = new File(tempDirPath + "/#content/1"); assertTrue(testFile.exists()); testFile = new File(tempDirPath + "/#content/1.props"); assertTrue(testFile.exists()); } public void testInactiveFileLocation() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/branch1/leaf1"); File curFile = new File(tempDirPath + "/#content/current"); File curPropsFile = new File(tempDirPath + "/#content/current.props"); File inactFile = new File(tempDirPath + "/#content/inactive"); File inactPropsFile = new File(tempDirPath + "/#content/inactive.props"); assertTrue(curFile.exists()); assertTrue(curPropsFile.exists()); assertFalse(inactFile.exists()); assertFalse(inactPropsFile.exists()); leaf.deactivateContent(); assertFalse(curFile.exists()); assertFalse(curPropsFile.exists()); assertTrue(inactFile.exists()); assertTrue(inactPropsFile.exists()); //reactivate leaf.restoreLastVersion(); assertTrue(curFile.exists()); assertTrue(curPropsFile.exists()); assertFalse(inactFile.exists()); assertFalse(inactPropsFile.exists()); leaf.deactivateContent(); assertFalse(curFile.exists()); assertFalse(curPropsFile.exists()); assertTrue(inactFile.exists()); assertTrue(inactPropsFile.exists()); // make new version leaf.makeNewVersion(); OutputStream os = leaf.getNewOutputStream(); InputStream is = new StringInputStream("test stream 2"); StreamUtil.copy(is, os); is.close(); os.close(); leaf.setNewProperties(new Properties()); leaf.sealNewVersion(); assertTrue(curFile.exists()); assertTrue(curPropsFile.exists()); assertFalse(inactFile.exists()); assertFalse(inactPropsFile.exists()); } public void testDeleteFileLocation() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/branch1/leaf1"); File curFile = new File(tempDirPath + "/#content/current"); File curPropsFile = new File(tempDirPath + "/#content/current.props"); File inactFile = new File(tempDirPath + "/#content/inactive"); File inactPropsFile = new File(tempDirPath + "/#content/inactive.props"); assertTrue(curFile.exists()); assertTrue(curPropsFile.exists()); assertFalse(inactFile.exists()); assertFalse(inactPropsFile.exists()); leaf.markAsDeleted(); assertFalse(curFile.exists()); assertFalse(curPropsFile.exists()); assertTrue(inactFile.exists()); assertTrue(inactPropsFile.exists()); //reactivate leaf.restoreLastVersion(); assertTrue(curFile.exists()); assertTrue(curPropsFile.exists()); assertFalse(inactFile.exists()); assertFalse(inactPropsFile.exists()); leaf.markAsDeleted(); assertFalse(curFile.exists()); assertFalse(curPropsFile.exists()); assertTrue(inactFile.exists()); assertTrue(inactPropsFile.exists()); // make new version leaf.makeNewVersion(); OutputStream os = leaf.getNewOutputStream(); InputStream is = new StringInputStream("test stream 2"); StreamUtil.copy(is, os); is.close(); os.close(); leaf.setNewProperties(new Properties()); leaf.sealNewVersion(); assertTrue(curFile.exists()); assertTrue(curPropsFile.exists()); assertFalse(inactFile.exists()); assertFalse(inactPropsFile.exists()); } public void testListEntriesNonexistentDir() throws Exception { RepositoryNode node = new RepositoryNodeImpl("foo-no-url", "foo-no-dir", null); try { node.listChildren(null, false); fail("listChildren() is nonexistent dir should throw"); } catch (LockssRepository.RepositoryStateException e) { } } public void testListEntries() throws Exception { createLeaf("http://www.example.com/testDir/branch1/leaf1", "test stream", null); createLeaf("http://www.example.com/testDir/branch1/leaf2", "test stream", null); createLeaf("http://www.example.com/testDir/branch2/leaf3", "test stream", null); createLeaf("http://www.example.com/testDir/branch2", "test stream", null); createLeaf("http://www.example.com/testDir/leaf4", "test stream", null); // root branch RepositoryNode dirEntry = repo.getNode("http://www.example.com/testDir"); Iterator childIt = dirEntry.listChildren(null, false); ArrayList childL = new ArrayList(3); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } String[] expectedA = new String[] { "http://www.example.com/testDir/branch1", "http://www.example.com/testDir/branch2", "http://www.example.com/testDir/leaf4" }; assertIsomorphic(expectedA, childL); // sub-branch dirEntry = repo.getNode("http://www.example.com/testDir/branch1"); childL.clear(); childIt = dirEntry.listChildren(null, false); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } expectedA = new String[] { "http://www.example.com/testDir/branch1/leaf1", "http://www.example.com/testDir/branch1/leaf2", }; assertIsomorphic(expectedA, childL); // sub-branch with content dirEntry = repo.getNode("http://www.example.com/testDir/branch2"); childL.clear(); childIt = dirEntry.listChildren(null, false); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } expectedA = new String[] { "http://www.example.com/testDir/branch2/leaf3", }; assertIsomorphic(expectedA, childL); // leaf node dirEntry = repo.getNode("http://www.example.com/testDir/branch1/leaf1"); childL.clear(); childIt = dirEntry.listChildren(null, false); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } expectedA = new String[] { }; assertIsomorphic(expectedA, childL); } String normalizeName(RepositoryNodeImpl node, String name) { return node.normalize(new File(name)).getPath(); } public void testNormalizeUrlEncodingCase() throws Exception { RepositoryNodeImpl node = new RepositoryNodeImpl("foo", "bar", null); // nothing to normalize File file = new File("foo/bar/baz"); assertSame(file, node.normalize(file)); file = new File("foo/bar/ba%ABz"); assertSame(file, node.normalize(file)); // unnormalized in parent dir name is left alone file = new File("ba%abz/bar"); assertSame(file, node.normalize(file)); file = new File("foo/ba%abz/bar"); assertSame(file, node.normalize(file)); // should be normalized assertEquals("ba%ABz", normalizeName(node, "ba%aBz")); assertEquals("/ba%ABz", normalizeName(node, "/ba%aBz")); assertEquals("foo/bar/ba%ABz", normalizeName(node, "foo/bar/ba%aBz")); assertEquals("foo/bar/ba%ABz", normalizeName(node, "foo/bar/ba%Abz")); assertEquals("foo/bar/ba%ABz", normalizeName(node, "foo/bar/ba%abz")); assertEquals("foo/bar/ba%abz/ba%ABz", normalizeName(node, "foo/bar/ba%abz/ba%abz")); } public void testNormalizeTrailingQuestion() throws Exception { RepositoryNodeImpl node = new RepositoryNodeImpl("foo", "bar", null); // nothing to normalize File file = new File("foo/bar/baz"); assertSame(file, node.normalize(file)); file = new File("foo/bar/ba?z"); assertSame(file, node.normalize(file)); // unnormalized in parent dir name is left alone file = new File("ba?/bar"); assertSame(file, node.normalize(file)); // should be normalized assertEquals("baz", normalizeName(node, "baz?")); assertEquals("/ba", normalizeName(node, "/ba?")); assertEquals("foo/bar/bar", normalizeName(node, "foo/bar/bar?")); assertEquals("foo/ba?r/bar", normalizeName(node, "foo/ba?r/bar?")); assertEquals("foo/bar?/bar", normalizeName(node, "foo/bar?/bar?")); // disable trailing ? normalization ConfigurationUtil.addFromArgs(UrlUtil.PARAM_NORMALIZE_EMPTY_QUERY, "false"); assertEquals("baz?", normalizeName(node, "baz?")); } List getChildNames(String nodeName) throws MalformedURLException { RepositoryNode dirEntry = repo.getNode(nodeName); ArrayList res = new ArrayList(); for (Iterator childIt = dirEntry.listChildren(null, false); childIt.hasNext(); ) { RepositoryNode node = (RepositoryNode)childIt.next(); res.add(node.getNodeUrl()); } return res; } public void testFixUnnormalized_Rename() throws Exception { repo.setDontNormalize(true); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "false"); createLeaf("http://www.example.com/testDir/branch%3c1/leaf%2C1", "test stream", null); createLeaf("http://www.example.com/testDir/branch%3c1/leaf%2c2", "test stream", null); createLeaf("http://www.example.com/testDir/branch2/leaf3", "test stream", null); createLeaf("http://www.example.com/testDir/branch2", "test stream", null); createLeaf("http://www.example.com/testDir/leaf4", "test stream", null); String[] expectedA = new String[] { "http://www.example.com/testDir/branch%3c1", "http://www.example.com/testDir/branch2", "http://www.example.com/testDir/leaf4" }; assertIsomorphic(expectedA, getChildNames(("http://www.example.com/testDir"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "true"); String[] expectedB = new String[] { "http://www.example.com/testDir/branch%3C1", "http://www.example.com/testDir/branch2", "http://www.example.com/testDir/leaf4" }; assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "false"); assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); String[] expectedC = new String[] { "http://www.example.com/testDir/branch%3C1/leaf%2C1", "http://www.example.com/testDir/branch%3C1/leaf%2c2", }; assertIsomorphic(expectedC, getChildNames(("http://www.example.com/testDir/branch%3C1"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "true"); assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); String[] expectedD = new String[] { "http://www.example.com/testDir/branch%3C1/leaf%2C1", "http://www.example.com/testDir/branch%3C1/leaf%2C2", }; assertIsomorphic(expectedD, getChildNames(("http://www.example.com/testDir/branch%3C1"))); } public void testFixUnnormalizedMultiple_Delete() throws Exception { repo.setDontNormalize(true); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "false"); createLeaf("http://www.example.com/testDir/leaf%2C1", "test stream", null); createLeaf("http://www.example.com/testDir/leaf%2c1", "test stream", null); createLeaf("http://www.example.com/testDir/leaf3", "test stream", null); String[] expectedA = new String[] { "http://www.example.com/testDir/leaf%2C1", "http://www.example.com/testDir/leaf%2c1", "http://www.example.com/testDir/leaf3", }; assertIsomorphic(expectedA, getChildNames(("http://www.example.com/testDir"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "true"); String[] expectedB = new String[] { "http://www.example.com/testDir/leaf%2C1", "http://www.example.com/testDir/leaf3", }; assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "false"); assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); } public void testFixUnnormalizedMultiple_DeleteMultiple() throws Exception { repo.setDontNormalize(true); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "false"); createLeaf("http://www.example.com/testDir/leaf%CA%3E", "test stream", null); createLeaf("http://www.example.com/testDir/leaf%cA%3E", "test stream", null); createLeaf("http://www.example.com/testDir/leaf%ca%3E", "test stream", null); createLeaf("http://www.example.com/testDir/leaf%ca%3e", "test stream", null); createLeaf("http://www.example.com/testDir/leaf3", "test stream", null); String[] expectedA = new String[] { "http://www.example.com/testDir/leaf%CA%3E", "http://www.example.com/testDir/leaf%cA%3E", "http://www.example.com/testDir/leaf%ca%3E", "http://www.example.com/testDir/leaf%ca%3e", "http://www.example.com/testDir/leaf3", }; assertIsomorphic(expectedA, getChildNames(("http://www.example.com/testDir"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "true"); String[] expectedB = new String[] { "http://www.example.com/testDir/leaf%CA%3E", "http://www.example.com/testDir/leaf3", }; assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "false"); assertIsomorphic(expectedB, getChildNames(("http://www.example.com/testDir"))); } public void testFixUnnormalized_DontFixParent() throws Exception { repo.setDontNormalize(true); createLeaf("http://www.example.com/testDir/branch%3c1/leaf%2C1", "test stream", null); createLeaf("http://www.example.com/testDir/branch%3c1/leaf%2c2", "test stream", null); ConfigurationUtil.addFromArgs(RepositoryNodeImpl.PARAM_FIX_UNNORMALIZED, "true"); String[] expectedA = new String[] { "http://www.example.com/testDir/branch%3c1/leaf%2C1", "http://www.example.com/testDir/branch%3c1/leaf%2C2", }; assertIsomorphic(expectedA, getChildNames(("http://www.example.com/testDir/branch%3c1"))); } public void testEntrySort() throws Exception { createLeaf("http://www.example.com/testDir/branch2/leaf1", null, null); createLeaf("http://www.example.com/testDir/leaf4", null, null); createLeaf("http://www.example.com/testDir/branch1/leaf1", null, null); createLeaf("http://www.example.com/testDir/leaf3", null, null); RepositoryNode dirEntry = repo.getNode("http://www.example.com/testDir"); Iterator childIt = dirEntry.listChildren(null, false); ArrayList childL = new ArrayList(4); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } String[] expectedA = new String[] { "http://www.example.com/testDir/branch1", "http://www.example.com/testDir/branch2", "http://www.example.com/testDir/leaf3", "http://www.example.com/testDir/leaf4" }; assertIsomorphic(expectedA, childL); } public void testIllegalOperations() throws Exception { RepositoryNode leaf = repo.createNewNode("http://www.example.com/testDir/test.cache"); assertFalse(leaf.hasContent()); try { leaf.getCurrentVersion(); fail("Cannot get current version if no content."); } catch (UnsupportedOperationException uoe) { } try { leaf.getContentSize(); fail("Cannot get content size if no content."); } catch (UnsupportedOperationException uoe) { } try { leaf.getNodeContents(); fail("Cannot get RepositoryNodeContents if no content."); } catch (UnsupportedOperationException uoe) { } try { leaf.sealNewVersion(); fail("Cannot seal version if not open."); } catch (UnsupportedOperationException uoe) { } leaf.makeNewVersion(); try { leaf.sealNewVersion(); fail("Cannot seal version if getNewOutputStream() uncalled."); } catch (UnsupportedOperationException uoe) { } leaf.makeNewVersion(); try { leaf.deactivateContent(); fail("Cannot deactivate if currently open for writing."); } catch (UnsupportedOperationException uoe) { } writeToLeaf(leaf, "test stream"); try { leaf.sealNewVersion(); fail("Cannot seal version if setNewProperties() uncalled."); } catch (UnsupportedOperationException uoe) { } leaf.makeNewVersion(); writeToLeaf(leaf, "test stream"); leaf.setNewProperties(new Properties()); leaf.sealNewVersion(); assertEquals(1, leaf.getCurrentVersion()); assertTrue(leaf.hasContent()); } public void testVersionTimeout() throws Exception { TimeBase.setSimulated(); RepositoryNode leaf = repo.createNewNode("http://www.example.com/testDir/test.cache"); RepositoryNode leaf2 = repo.getNode("http://www.example.com/testDir/test.cache"); leaf.makeNewVersion(); try { leaf2.makeNewVersion(); fail("Can't make new version while version open."); } catch (UnsupportedOperationException e) { } TimeBase.step(RepositoryNodeImpl.DEFAULT_VERSION_TIMEOUT/2); try { leaf2.makeNewVersion(); fail("Can't make new version while version not timed out."); } catch (UnsupportedOperationException e) { } TimeBase.step(RepositoryNodeImpl.DEFAULT_VERSION_TIMEOUT/2); leaf2.makeNewVersion(); } public void testMakeNewCache() throws Exception { RepositoryNode leaf = repo.createNewNode("http://www.example.com/testDir/test.cache"); assertFalse(leaf.hasContent()); try { leaf.getCurrentVersion(); fail("Cannot get current version if no content."); } catch (UnsupportedOperationException uoe) { } leaf.makeNewVersion(); writeToLeaf(leaf, "test stream"); leaf.setNewProperties(new Properties()); leaf.sealNewVersion(); assertTrue(leaf.hasContent()); assertEquals(1, leaf.getCurrentVersion()); } public void testMakeNodeLocation() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl) repo.createNewNode("http://www.example.com/testDir"); String nodeLoc = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); nodeLoc = LockssRepositoryImpl.mapUrlToFileLocation(nodeLoc, "http://www.example.com/testDir"); File testFile = new File(nodeLoc); assertFalse(testFile.exists()); leaf.createNodeLocation(); assertTrue(testFile.exists()); assertTrue(testFile.isDirectory()); } public void testMakeNewVersion() throws Exception { Properties props = new Properties(); props.setProperty("test 1", "value 1"); RepositoryNode leaf = createLeaf("http://www.example.com/testDir/test.cache", "test stream 1", props); assertEquals(1, leaf.getCurrentVersion()); props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream 2"); leaf.sealNewVersion(); assertEquals(2, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream 2", resultStr); props = leaf.getNodeContents().getProperties(); assertEquals("value 2", props.getProperty("test 1")); } static final int DEL_NODE_DIR = 1; static final int DEL_CONTENT_DIR = 2; static final int DEL_CONTENT_FILE = 3; static final int DEL_PROPS_FILE = 4; public void testDisappearingFile(int whichFile, boolean tryRead) throws Exception { String url = "http://www.example.com/foo.html"; RepositoryNodeImpl leaf = (RepositoryNodeImpl)repo.createNewNode(url); String nodeLoc = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); nodeLoc = LockssRepositoryImpl.mapUrlToFileLocation(nodeLoc, url); File testFile; switch (whichFile) { case DEL_NODE_DIR: testFile = new File(nodeLoc); break; case DEL_CONTENT_DIR: testFile = new File(nodeLoc, "#content"); break; case DEL_CONTENT_FILE: testFile = new File(nodeLoc, "#content/current"); break; case DEL_PROPS_FILE: testFile = new File(nodeLoc, "#content/current.props"); break; default: throw new UnsupportedOperationException(); } assertFalse(testFile.exists()); Properties props1 = PropUtil.fromArgs("key1", "value 1"); createContentVersion(leaf, "test content 11111", props1); assertEquals(1, leaf.getCurrentVersion()); assertTrue(testFile.exists()); switch (whichFile) { case DEL_NODE_DIR: case DEL_CONTENT_DIR: assertTrue(FileUtil.delTree(testFile)); break; case DEL_CONTENT_FILE: case DEL_PROPS_FILE: assertTrue(testFile.delete()); break; } assertFalse(testFile.exists()); Properties props2 = PropUtil.fromArgs("key2", "value 2"); RepositoryNode leaf2 = repo.createNewNode(url); assertSame(leaf, leaf2); assertTrue(leaf.hasContent()); if (tryRead) { try { getLeafContent(leaf); } catch (LockssRepository.RepositoryStateException e) { // expected } } leaf2.makeNewVersion(); writeToLeaf(leaf, "test content 22222"); leaf.setNewProperties(props2); leaf.sealNewVersion(); assertTrue(testFile.exists()); int expver = 2; // if we tried to read while node or content dir was missing, version // number will have been reset. if (tryRead) { switch (whichFile) { case DEL_NODE_DIR: case DEL_CONTENT_DIR: expver = 1; } } assertEquals(expver, leaf.getCurrentVersion()); assertEquals("test content 22222", getLeafContent(leaf)); assertEquals("value 2", leaf.getNodeContents().getProperties().get("key2")); } public void testDisappearingNodeDir() throws Exception { testDisappearingFile(DEL_NODE_DIR, false); } public void testDisappearingContentDir() throws Exception { testDisappearingFile(DEL_CONTENT_DIR, false); } public void testDisappearingContentFile() throws Exception { testDisappearingFile(DEL_CONTENT_FILE, false); } public void testDisappearingPropsFile() throws Exception { testDisappearingFile(DEL_PROPS_FILE, false); } public void testDisappearingNodeDirWithRead() throws Exception { testDisappearingFile(DEL_NODE_DIR, true); } public void testDisappearingContentDirWithRead() throws Exception { testDisappearingFile(DEL_CONTENT_DIR, true); } public void testDisappearingContentFileWithRead() throws Exception { testDisappearingFile(DEL_CONTENT_FILE, true); } public void testDisappearingPropsFileWithRead() throws Exception { testDisappearingFile(DEL_PROPS_FILE, true); } public void testMakeNewVersionWithoutClosingStream() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/test.cache", "test stream 1", new Properties()); leaf.makeNewVersion(); leaf.setNewProperties(new Properties()); OutputStream os = leaf.getNewOutputStream(); InputStream is = new StringInputStream("test stream 2"); StreamUtil.copy(is, os); is.close(); // don't close outputstream leaf.sealNewVersion(); assertEquals(2, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream 2", resultStr); } public void testMakeNewIdenticalVersionDefault() throws Exception { Properties props = new Properties(); props.setProperty("test 1", "value 1"); MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)createLeaf( "http://www.example.com/testDir/test.cache", "test stream", props)); assertEquals(1, leaf.getCurrentVersion()); // set the file extension leaf.dateValue = 123321; props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream"); leaf.sealNewVersion(); assertEquals(1, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream", resultStr); props = leaf.getNodeContents().getProperties(); assertEquals("value 2", props.getProperty("test 1")); // make sure proper files exist tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/test.cache"); File testFileDir = new File(tempDirPath + "/#content"); File[] files = testFileDir.listFiles(); assertEquals(2, files.length); File testFile = new File(testFileDir, "current"); assertTrue(testFile.exists()); testFile = new File(testFileDir, "current.props"); assertTrue(testFile.exists()); // testFile = new File(testFileDir, "1.props-123321"); // assertFalse(testFile.exists()); } public void testMakeNewIdenticalVersionOldWay() throws Exception { props.setProperty(RepositoryNodeImpl.PARAM_KEEP_ALL_PROPS_FOR_DUPE_FILE, "true"); ConfigurationUtil.setCurrentConfigFromProps(props); Properties props = new Properties(); props.setProperty("test 1", "value 1"); MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)createLeaf( "http://www.example.com/testDir/test.cache", "test stream", props)); assertEquals(1, leaf.getCurrentVersion()); // set the file extension leaf.dateValue = 123321; props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream"); leaf.sealNewVersion(); assertEquals(1, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream", resultStr); props = leaf.getNodeContents().getProperties(); assertEquals("value 2", props.getProperty("test 1")); // make sure proper files exist tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/test.cache"); File testFileDir = new File(tempDirPath + "/#content"); File[] files = testFileDir.listFiles(); assertEquals(3, files.length); File testFile = new File(testFileDir, "current"); assertTrue(testFile.exists()); testFile = new File(testFileDir, "current.props"); assertTrue(testFile.exists()); testFile = new File(testFileDir, "1.props-123321"); assertTrue(testFile.exists()); } public void testMakeNewIdenticalVersionNewWay() throws Exception { props.setProperty(RepositoryNodeImpl.PARAM_KEEP_ALL_PROPS_FOR_DUPE_FILE, "false"); ConfigurationUtil.setCurrentConfigFromProps(props); Properties props = new Properties(); props.setProperty("test 1", "value 1"); MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)createLeaf( "http://www.example.com/testDir/test.cache", "test stream", props)); assertEquals(1, leaf.getCurrentVersion()); // set the file extension leaf.dateValue = 123321; props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream"); leaf.sealNewVersion(); assertEquals(1, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream", resultStr); props = leaf.getNodeContents().getProperties(); assertEquals("value 2", props.getProperty("test 1")); // make sure proper files exist tempDirPath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau); tempDirPath = LockssRepositoryImpl.mapUrlToFileLocation(tempDirPath, "http://www.example.com/testDir/test.cache"); File testFileDir = new File(tempDirPath + "/#content"); File[] files = testFileDir.listFiles(); assertEquals(2, files.length); File testFile = new File(testFileDir, "current"); assertTrue(testFile.exists()); testFile = new File(testFileDir, "current.props"); assertTrue(testFile.exists()); // testFile = new File(testFileDir, "1.props-123321"); // assertFalse(testFile.exists()); } public void testIdenticalVersionFixesVersionError() throws Exception { Properties props = new Properties(); MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)createLeaf( "http://www.example.com/testDir/test.cache", "test stream", props)); assertEquals(1, leaf.getCurrentVersion()); props = new Properties(); leaf.makeNewVersion(); leaf.setNewProperties(props); // set to error state leaf.currentVersion = 0; writeToLeaf(leaf, "test stream"); assertEquals(0, leaf.currentVersion); leaf.sealNewVersion(); // fixes error state, even though identical assertEquals(1, leaf.getCurrentVersion()); } public void testMakeNewVersionFixesVersionError() throws Exception { Properties props = new Properties(); MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)createLeaf( "http://www.example.com/testDir/test.cache", "test stream", props)); assertEquals(1, leaf.getCurrentVersion()); props = new Properties(); leaf.makeNewVersion(); // set to error state leaf.currentVersion = -1; leaf.setNewProperties(props); writeToLeaf(leaf, "test stream2"); leaf.sealNewVersion(); // fixes error state assertEquals(1, leaf.getCurrentVersion()); } public void testGetInputStream() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/test.cache", "test stream", null); String resultStr = getLeafContent(leaf); assertEquals("test stream", resultStr); } public void testGetProperties() throws Exception { Properties props = new Properties(); props.setProperty("test 1", "value 1"); RepositoryNode leaf = createLeaf("http://www.example.com/testDir/test.cache", "test stream", props); RepositoryNode.RepositoryNodeContents contents = leaf.getNodeContents(); props = contents.getProperties(); // close stream to allow the file to be renamed later // XXX 'getProperties()' creates an input stream, and 'release()' just // sets it to null. The rename still fails in Windows unless the stream // is closed first. contents.getInputStream().close(); contents.release(); assertEquals("value 1", props.getProperty("test 1")); leaf.makeNewVersion(); props = new Properties(); props.setProperty("test 1", "value 2"); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream 2"); leaf.sealNewVersion(); props = leaf.getNodeContents().getProperties(); assertEquals("value 2", props.getProperty("test 1")); } RepositoryNode createNodeWithCorruptProps(String url) throws Exception { Properties props = new Properties(); props.setProperty("test 1", "value 1"); RepositoryNode leaf = createLeaf(url, "test stream", props); RepositoryNodeImpl leafImpl = (RepositoryNodeImpl)leaf; File propsFile = new File(leafImpl.getContentDir(), RepositoryNodeImpl.CURRENT_PROPS_FILENAME); // Write a Malformed unicode escape that will cause Properties.load() // to throw OutputStream os = new BufferedOutputStream(new FileOutputStream(propsFile, true)); os.write("\\uxxxxfoo=bar".getBytes()); os.close(); return leaf; } public void testCorruptProperties1() throws Exception { RepositoryNode leaf = createNodeWithCorruptProps("http://www.example.com/testDir/test.cache"); assertFalse(leaf.hasContent()); assertTrue(leaf.isDeleted()); leaf.makeNewVersion(); writeToLeaf(leaf, "test stream"); leaf.setNewProperties(new Properties()); leaf.sealNewVersion(); assertTrue(leaf.hasContent()); assertFalse(leaf.isDeleted()); } public void testCorruptProperties2() throws Exception { String stem = "http://www.example.com/testDir"; RepositoryNode leaf = createNodeWithCorruptProps(stem + "/test.cache"); RepositoryNode leaf2 = createLeaf(stem + "/foo", "test stream", props); RepositoryNode dirEntry = repo.getNode("http://www.example.com/testDir"); Iterator childIt = dirEntry.listChildren(null, false); assertEquals(ListUtil.list(leaf2), ListUtil.fromIterator(childIt)); } static String cntnt(int ix) { return "content " + ix + "ABCDEFGHIJKLMNOPQRSTUVWXYZ".substring(0, ix); } static int lngth(int ix) { return cntnt(ix).length(); } public void testGetNodeVersion() throws Exception { int max = 5; String url = "http://www.example.com/versionedcontent.txt"; String key = "key"; String val = "grrl"; Properties props = new Properties(); RepositoryNode leaf = repo.createNewNode(url); // create several versions for (int ix = 1; ix <= max; ix++) { props.setProperty(key, val+ix); createContentVersion(leaf, cntnt(ix), props); } // getNodeVersion(current) should return the main node assertEquals(leaf, leaf.getNodeVersion(leaf.getCurrentVersion())); // loop through other versions checking version, content, props for (int ix = 1; ix < max; ix++) { RepositoryNodeVersion nodeVer = leaf.getNodeVersion(ix); log.debug("ver: " + nodeVer.getVersion() + ", content: " + getLeafContent(nodeVer)); assertEquals(ix, nodeVer.getVersion()); assertEquals(cntnt(ix), getLeafContent(nodeVer)); assertEquals(lngth(ix), nodeVer.getContentSize()); props = nodeVer.getNodeContents().getProperties(); assertEquals(val+ix, props.getProperty(key)); } } public void testGetNodeVersions() throws Exception { int max = 5; String url = "http://www.example.com/versionedcontent.txt"; String key = "key"; String val = "grrl"; Properties props = new Properties(); RepositoryNode leaf = repo.createNewNode(url); // create several versions for (int ix = 1; ix <= max; ix++) { props.setProperty(key, val+ix); createContentVersion(leaf, cntnt(ix), props); } // check expected current version number assertEquals(max, leaf.getCurrentVersion()); assertEquals(max, leaf.getVersion()); // checking version, content, props of current version assertEquals(cntnt(max), getLeafContent(leaf)); assertEquals(lngth(max), leaf.getContentSize()); props = leaf.getNodeContents().getProperties(); assertEquals(val+max, props.getProperty(key)); // ask for all older versions RepositoryNodeVersion[] vers = leaf.getNodeVersions(); assertEquals(max, vers.length); // loop through them checking version, content, props for (int ix = 0; ix < max-1; ix++) { int exp = max - ix; RepositoryNodeVersion nodeVer = vers[ix]; log.debug("ver: " + nodeVer.getVersion() + ", content: " + getLeafContent(nodeVer)); assertEquals(exp, nodeVer.getVersion()); assertEquals(cntnt(exp), getLeafContent(nodeVer)); assertEquals(lngth(exp), nodeVer.getContentSize()); props = nodeVer.getNodeContents().getProperties(); assertEquals(val+exp, props.getProperty(key)); } // now ask for and check a subset of the older versions assertTrue("max must be at least 4 for this test", max >= 4); int numver = max - 2; vers = leaf.getNodeVersions(numver); assertEquals(numver, vers.length); for (int ix = 0; ix < numver-1; ix++) { int exp = max - ix; RepositoryNodeVersion nodeVer = vers[ix]; log.debug("ver: " + nodeVer.getVersion() + ", content: " + getLeafContent(nodeVer)); assertEquals(exp, nodeVer.getVersion()); assertEquals(cntnt(exp), getLeafContent(nodeVer)); assertEquals(lngth(exp), nodeVer.getContentSize()); props = nodeVer.getNodeContents().getProperties(); assertEquals(val+exp, props.getProperty(key)); } } public void testIllegalVersionOperations() throws Exception { RepositoryNode.RepositoryNodeContents rnc; RepositoryNodeVersion nv; RepositoryNode leaf = repo.createNewNode("http://www.example.com/testDir/test.cache"); try { nv = leaf.getNodeVersion(7); fail("No content, shouldn't be able to get versioned node: " + nv); } catch (UnsupportedOperationException e) { } // create first version Properties props = new Properties(); props.setProperty("key", "val1"); createContentVersion(leaf, cntnt(1), props); // We're allowed to get a RepositoryNodeVersion when the version // doesn't exist ... nv = leaf.getNodeVersion(7); // but all operations on it should throw try { nv.getContentSize(); fail("No version; shouldn't get content size"); } catch (UnsupportedOperationException e) { } try { rnc = nv.getNodeContents(); fail("No version; shouldn't get RepositoryNodeContents"); } catch (UnsupportedOperationException e) { } } public void testDirContent() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/test.cache", "test stream", null); assertTrue(leaf.hasContent()); RepositoryNode dir = repo.getNode("http://www.example.com/testDir"); dir.makeNewVersion(); writeToLeaf(dir, "test stream"); dir.setNewProperties(new Properties()); dir.sealNewVersion(); assertTrue(dir.hasContent()); dir = createLeaf("http://www.example.com/testDir/test.cache/new.test", "test stream", null); assertTrue(dir.hasContent()); } public void testNodeSize() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/testDir/test.cache", "test stream", null); assertTrue(leaf.hasContent()); assertEquals(11, (int)leaf.getContentSize()); } public void testTreeSize() throws Exception { createLeaf("http://www.example.com/testDir", "test", null); createLeaf("http://www.example.com/testDir/test1", "test1", null); createLeaf("http://www.example.com/testDir/test2", "test2", null); createLeaf("http://www.example.com/testDir/test3/branch1", "test33", null); createLeaf("http://www.example.com/testDir/test3/branch2", "test33", null); RepositoryNode leaf = repo.getNode("http://www.example.com/testDir"); assertEquals(-1, leaf.getTreeContentSize(null, false)); assertEquals(26, leaf.getTreeContentSize(null, true)); assertEquals(26, leaf.getTreeContentSize(null, false)); leaf = repo.getNode("http://www.example.com/testDir/test1"); assertEquals(5, leaf.getTreeContentSize(null, true)); leaf = repo.getNode("http://www.example.com/testDir/test3"); assertEquals(12, leaf.getTreeContentSize(null, true)); CachedUrlSetSpec cuss = new RangeCachedUrlSetSpec("http://www.example.com/testDir/test3", "/branch1", "/branch1"); assertEquals(6, leaf.getTreeContentSize(cuss, true)); } public void testDetermineParentNode() throws Exception { repo.createNewNode("http://www.example.com"); repo.createNewNode("http://www.example.com/test"); assertNotNull(repo.getNode("http://www.example.com/test")); RepositoryNodeImpl node = (RepositoryNodeImpl)repo.createNewNode( "http://www.example.com/test/branch"); assertEquals("http://www.example.com/test/branch", node.getNodeUrl()); node = node.determineParentNode(); assertEquals("http://www.example.com/test", node.getNodeUrl()); node = node.determineParentNode(); assertEquals("http://www.example.com", node.getNodeUrl()); node = node.determineParentNode(); assertEquals(AuUrl.PROTOCOL, node.getNodeUrl()); node = node.determineParentNode(); assertEquals(AuUrl.PROTOCOL, node.getNodeUrl()); } public void testCacheInvalidation() throws Exception { RepositoryNodeImpl root = (RepositoryNodeImpl)createLeaf("http://www.example.com", "test", null); RepositoryNodeImpl branch = (RepositoryNodeImpl)createLeaf("http://www.example.com/branch", "test", null); RepositoryNodeImpl branch2 = (RepositoryNodeImpl)createLeaf("http://www.example.com/branch/branch2", "test", null); // This one has directory level with no node prop file, to check that // cache invalidation traverses them correctly RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com/branch/branch2/a/b/c/leaf", "test", null); assertNull(branch.nodeProps.getProperty(TREE_SIZE_PROPERTY)); assertNull(leaf.nodeProps.getProperty(TREE_SIZE_PROPERTY)); // force invalidation to happen branch.nodeProps.setProperty(TREE_SIZE_PROPERTY, "789"); branch.invalidateCachedValues(true); // should now be explicitly marked invalid assertEquals(RepositoryNodeImpl.INVALID, branch.nodeProps.getProperty(TREE_SIZE_PROPERTY)); assertEquals(RepositoryNodeImpl.INVALID, branch.nodeProps.getProperty(CHILD_COUNT_PROPERTY)); // fake prop set at root to check invalidation stops properly root.nodeProps.setProperty(TREE_SIZE_PROPERTY, "789"); root.nodeProps.setProperty(CHILD_COUNT_PROPERTY, "3"); // don't set branch so the invalidate stops there branch2.nodeProps.setProperty(TREE_SIZE_PROPERTY, "456"); branch2.nodeProps.setProperty(CHILD_COUNT_PROPERTY, "1"); leaf.nodeProps.setProperty(TREE_SIZE_PROPERTY, "123"); leaf.nodeProps.setProperty(CHILD_COUNT_PROPERTY, "0"); leaf.invalidateCachedValues(true); // shoulddn't be set here anymore assertFalse(isPropValid(leaf.nodeProps.getProperty(TREE_SIZE_PROPERTY))); assertFalse(isPropValid(leaf.nodeProps.getProperty(CHILD_COUNT_PROPERTY))); // or here (requires recursing up through dirs that have no node props // file) assertFalse(isPropValid(branch2.nodeProps.getProperty(TREE_SIZE_PROPERTY))); assertFalse(isPropValid(branch2.nodeProps.getProperty(CHILD_COUNT_PROPERTY))); // still invalid, recursion should have stopped here assertFalse(isPropValid(branch.nodeProps.getProperty(TREE_SIZE_PROPERTY))); assertFalse(isPropValid(branch.nodeProps.getProperty(CHILD_COUNT_PROPERTY))); // so not cleared these assertTrue(isPropValid(root.nodeProps.getProperty(TREE_SIZE_PROPERTY))); assertTrue(isPropValid(root.nodeProps.getProperty(CHILD_COUNT_PROPERTY))); assertEquals("789", root.nodeProps.getProperty(TREE_SIZE_PROPERTY)); assertEquals("3", root.nodeProps.getProperty(CHILD_COUNT_PROPERTY)); } boolean isPropValid(String val) { return RepositoryNodeImpl.isPropValid(val); } boolean isPropInvalid(String val) { return RepositoryNodeImpl.isPropInvalid(val); } public void testTreeSizeCaching() throws Exception { createLeaf("http://www.example.com/testDir", "test", null); RepositoryNodeImpl leaf = (RepositoryNodeImpl)repo.getNode("http://www.example.com/testDir"); assertNull(leaf.nodeProps.getProperty(TREE_SIZE_PROPERTY)); assertEquals(4, leaf.getTreeContentSize(null, true)); assertEquals("4", leaf.nodeProps.getProperty(TREE_SIZE_PROPERTY)); leaf.markAsDeleted(); assertTrue(isPropInvalid(leaf.nodeProps.getProperty(TREE_SIZE_PROPERTY))); assertEquals(0, leaf.getTreeContentSize(null, true)); assertEquals("0", leaf.nodeProps.getProperty(TREE_SIZE_PROPERTY)); } public void testChildCount() throws Exception { createLeaf("http://www.example.com/testDir", "test", null); RepositoryNodeImpl leaf = (RepositoryNodeImpl)repo.getNode("http://www.example.com/testDir"); assertNull(leaf.nodeProps.getProperty(CHILD_COUNT_PROPERTY)); assertEquals(0, leaf.getChildCount()); assertEquals("0", leaf.nodeProps.getProperty(CHILD_COUNT_PROPERTY)); createLeaf("http://www.example.com/testDir/test1", "test1", null); createLeaf("http://www.example.com/testDir/test2", "test2", null); assertEquals(2, leaf.getChildCount()); assertEquals("2", leaf.nodeProps.getProperty(CHILD_COUNT_PROPERTY)); } public void testDeactivate() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com/test1", "test stream", null); assertTrue(leaf.hasContent()); assertFalse(leaf.isContentInactive()); assertEquals(1, leaf.getCurrentVersion()); assertNull(leaf.nodeProps.getProperty(RepositoryNodeImpl.INACTIVE_CONTENT_PROPERTY)); leaf.deactivateContent(); assertFalse(leaf.hasContent()); assertTrue(leaf.isContentInactive()); assertEquals(RepositoryNodeImpl.INACTIVE_VERSION, leaf.getCurrentVersion()); assertEquals("true", leaf.nodeProps.getProperty(RepositoryNodeImpl.INACTIVE_CONTENT_PROPERTY)); } public void testDelete() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com/test1", "test stream", null); assertTrue(leaf.hasContent()); assertFalse(leaf.isDeleted()); assertEquals(1, leaf.getCurrentVersion()); assertNull(leaf.nodeProps.getProperty(RepositoryNodeImpl.DELETION_PROPERTY)); leaf.markAsDeleted(); assertFalse(leaf.hasContent()); assertTrue(leaf.isDeleted()); assertEquals(RepositoryNodeImpl.DELETED_VERSION, leaf.getCurrentVersion()); assertEquals("true", leaf.nodeProps.getProperty(RepositoryNodeImpl.DELETION_PROPERTY)); } public void testUnDelete() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com/test1", "test stream", null); leaf.markAsDeleted(); assertTrue(leaf.isDeleted()); assertEquals(RepositoryNodeImpl.DELETED_VERSION, leaf.getCurrentVersion()); leaf.markAsNotDeleted(); assertFalse(leaf.isContentInactive()); assertFalse(leaf.isDeleted()); assertEquals(1, leaf.getCurrentVersion()); // make to null, not 'false' assertNull(leaf.nodeProps.getProperty(RepositoryNodeImpl.DELETION_PROPERTY)); String resultStr = getLeafContent(leaf); assertEquals("test stream", resultStr); } public void testRestoreLastVersion() throws Exception { Properties props = new Properties(); props.setProperty("test 1", "value 1"); RepositoryNode leaf = createLeaf("http://www.example.com/test1", "test stream 1", props); assertEquals(1, leaf.getCurrentVersion()); props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream 2"); leaf.sealNewVersion(); assertEquals(2, leaf.getCurrentVersion()); leaf.restoreLastVersion(); assertEquals(1, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream 1", resultStr); props = leaf.getNodeContents().getProperties(); assertEquals("value 1", props.getProperty("test 1")); } public void testReactivateViaRestore() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com/test1", "test stream", null); leaf.deactivateContent(); assertTrue(leaf.isContentInactive()); assertEquals(RepositoryNodeImpl.INACTIVE_VERSION, leaf.getCurrentVersion()); leaf.restoreLastVersion(); assertFalse(leaf.isContentInactive()); assertEquals(1, leaf.getCurrentVersion()); // back to null, not 'false' assertNull(leaf.nodeProps.getProperty(RepositoryNodeImpl.INACTIVE_CONTENT_PROPERTY)); String resultStr = getLeafContent(leaf); assertEquals("test stream", resultStr); } public void testReactivateViaNewVersion() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com/test1", "test stream", null); leaf.deactivateContent(); assertTrue(leaf.isContentInactive()); assertEquals(RepositoryNodeImpl.INACTIVE_VERSION, leaf.getCurrentVersion()); Properties props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream 2"); leaf.sealNewVersion(); assertFalse(leaf.isContentInactive()); assertEquals(2, leaf.getCurrentVersion()); String resultStr = getLeafContent(leaf); assertEquals("test stream 2", resultStr); File lastProps = new File(leaf.contentDir, "1.props"); assertTrue(lastProps.exists()); InputStream is = new BufferedInputStream(new FileInputStream(lastProps)); props.load(is); is.close(); // make sure the 'was inactive' property hasn't been lost assertEquals("true", props.getProperty(RepositoryNodeImpl.NODE_WAS_INACTIVE_PROPERTY)); } public void testAbandonReactivateViaNewVersion() throws Exception { RepositoryNode leaf = createLeaf("http://www.example.com/test1", "test stream", null); leaf.deactivateContent(); assertTrue(leaf.isContentInactive()); assertEquals(RepositoryNodeImpl.INACTIVE_VERSION, leaf.getCurrentVersion()); Properties props = new Properties(); props.setProperty("test 1", "value 2"); leaf.makeNewVersion(); leaf.setNewProperties(props); writeToLeaf(leaf, "test stream 2"); leaf.abandonNewVersion(); assertTrue(leaf.isContentInactive()); assertEquals(RepositoryNodeImpl.INACTIVE_VERSION, leaf.getCurrentVersion()); } public void testIsLeaf() throws Exception { createLeaf("http://www.example.com/testDir/test1", "test stream", null); createLeaf("http://www.example.com/testDir/branch1", "test stream", null); createLeaf("http://www.example.com/testDir/branch1/test4", "test stream", null); RepositoryNode leaf = repo.getNode("http://www.example.com/testDir/test1"); assertTrue(leaf.isLeaf()); leaf = repo.getNode("http://www.example.com/testDir/branch1"); assertFalse(leaf.isLeaf()); } public void testListInactiveNodes() throws Exception { createLeaf("http://www.example.com/testDir/test1", "test stream", null); createLeaf("http://www.example.com/testDir/test2", "test stream", null); createLeaf("http://www.example.com/testDir/test3", "test stream", null); createLeaf("http://www.example.com/testDir/branch1", "test stream", null); createLeaf("http://www.example.com/testDir/branch1/test4", "test stream", null); createLeaf("http://www.example.com/testDir/branch2", "test stream", null); createLeaf("http://www.example.com/testDir/branch2/test5", "test stream", null); RepositoryNode dirEntry = repo.getNode("http://www.example.com/testDir"); Iterator childIt = dirEntry.listChildren(null, false); ArrayList childL = new ArrayList(3); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } String[] expectedA = new String[] { "http://www.example.com/testDir/branch1", "http://www.example.com/testDir/branch2", "http://www.example.com/testDir/test1", "http://www.example.com/testDir/test2", "http://www.example.com/testDir/test3" }; assertIsomorphic(expectedA, childL); RepositoryNode leaf = repo.getNode("http://www.example.com/testDir/test2"); leaf.deactivateContent(); // this next shouldn't be excluded since it isn't a leaf node leaf = repo.getNode("http://www.example.com/testDir/branch1"); leaf.deactivateContent(); // this next should be excluded because it's deleted leaf = repo.getNode("http://www.example.com/testDir/branch2"); leaf.markAsDeleted(); childIt = dirEntry.listChildren(null, false); childL = new ArrayList(2); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } expectedA = new String[] { "http://www.example.com/testDir/branch1", "http://www.example.com/testDir/test1", "http://www.example.com/testDir/test3" }; assertIsomorphic("Excluding inactive nodes failed.", expectedA, childL); childIt = dirEntry.listChildren(null, true); childL = new ArrayList(3); while (childIt.hasNext()) { RepositoryNode node = (RepositoryNode)childIt.next(); childL.add(node.getNodeUrl()); } expectedA = new String[] { "http://www.example.com/testDir/branch1", "http://www.example.com/testDir/test1", "http://www.example.com/testDir/test2", "http://www.example.com/testDir/test3" }; assertIsomorphic("Including inactive nodes failed.", expectedA, childL); } public void testDeleteInnerNode() throws Exception { createLeaf("http://www.example.com/testDir/test1", "test stream", null); createLeaf("http://www.example.com/testDir/test2", "test stream", null); RepositoryNode dirEntry = repo.getNode("http://www.example.com/testDir"); assertFalse(dirEntry.isDeleted()); dirEntry.markAsDeleted(); assertTrue(dirEntry.isDeleted()); dirEntry.markAsNotDeleted(); assertFalse(dirEntry.isDeleted()); } public void testGetFileStrings() throws Exception { RepositoryNodeImpl node = (RepositoryNodeImpl)repo.createNewNode( "http://www.example.com/test.url"); node.initNodeRoot(); String contentStr = FileUtil.sysDepPath(node.nodeLocation + "/#content"); assertEquals(contentStr, node.getContentDir().toString()); contentStr = contentStr + File.separator; String expectedStr = contentStr + "123"; assertEquals(expectedStr, node.getVersionedCacheFile(123).getAbsolutePath()); expectedStr = contentStr + "123.props"; assertEquals(expectedStr, node.getVersionedPropsFile(123).getAbsolutePath()); expectedStr = contentStr + "inactive"; assertEquals(expectedStr, node.getInactiveCacheFile().getAbsolutePath()); expectedStr = contentStr + "inactive.props"; assertEquals(expectedStr, node.getInactivePropsFile().getAbsolutePath()); } public void testCheckNodeConsistency() throws Exception { // check returns proper values for errors MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)repo.createNewNode("http://www.example.com/testDir")); leaf.makeNewVersion(); // should abort and return true since version open leaf.failRootConsist = true; assertTrue(leaf.checkNodeConsistency()); // finish write leaf.setNewProperties(new Properties()); writeToLeaf(leaf, "test stream"); leaf.sealNewVersion(); // should return false if node root fails assertFalse(leaf.checkNodeConsistency()); leaf.failRootConsist = false; assertTrue(leaf.checkNodeConsistency()); // check returns false if content fails leaf.failContentConsist = true; assertFalse(leaf.checkNodeConsistency()); leaf.failContentConsist = false; assertTrue(leaf.checkNodeConsistency()); // check returns false if current info load fails leaf.failEnsureCurrentLoaded = true; assertFalse(leaf.checkNodeConsistency()); leaf.failEnsureCurrentLoaded = false; assertTrue(leaf.checkNodeConsistency()); } public void testCheckNodeRootConsistency() throws Exception { MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)repo.createNewNode("http://www.example.com/testDir")); leaf.createNodeLocation(); assertTrue(leaf.nodeRootFile.exists()); // returns true when normal assertTrue(leaf.checkNodeRootConsistency()); leaf.nodeRootFile.delete(); assertFalse(leaf.nodeRootFile.exists()); // creates dir, returns true when missing assertTrue(leaf.checkNodeRootConsistency()); assertTrue(leaf.nodeRootFile.exists()); assertTrue(leaf.nodeRootFile.isDirectory()); // fail node props load leaf.getChildCount(); assertTrue(leaf.nodePropsFile.exists()); File renameFile = new File(leaf.nodePropsFile.getAbsolutePath()+ RepositoryNodeImpl.FAULTY_FILE_EXTENSION); assertFalse(renameFile.exists()); leaf.failPropsLoad = true; assertTrue(leaf.checkNodeRootConsistency()); assertFalse(leaf.nodePropsFile.exists()); assertTrue(renameFile.exists()); } public void testCheckContentConsistency() throws Exception { MyMockRepositoryNode leaf = new MyMockRepositoryNode( (RepositoryNodeImpl)createLeaf("http://www.example.com/testDir", "test stream", null)); leaf.ensureCurrentInfoLoaded(); // should return false if content dir fails MyMockRepositoryNode.failEnsureDirExists = true; assertFalse(leaf.checkContentConsistency()); MyMockRepositoryNode.failEnsureDirExists = false; assertTrue(leaf.checkContentConsistency()); // should return false if content file absent File renameFile = new File(leaf.currentCacheFile.getAbsolutePath()+"RENAME"); assertTrue(PlatformUtil.updateAtomically(leaf.currentCacheFile, renameFile)); assertFalse(leaf.checkContentConsistency()); PlatformUtil.updateAtomically(renameFile, leaf.currentCacheFile); assertTrue(leaf.checkContentConsistency()); // should return false if content props absent PlatformUtil.updateAtomically(leaf.currentPropsFile, renameFile); assertFalse(leaf.checkContentConsistency()); PlatformUtil.updateAtomically(renameFile, leaf.currentPropsFile); assertTrue(leaf.checkContentConsistency()); // should return false if inactive and files missing leaf.currentVersion = RepositoryNodeImpl.INACTIVE_VERSION; assertFalse(leaf.checkContentConsistency()); PlatformUtil.updateAtomically(leaf.currentPropsFile, leaf.getInactivePropsFile()); assertFalse(leaf.checkContentConsistency()); PlatformUtil.updateAtomically(leaf.currentCacheFile, leaf.getInactiveCacheFile()); assertTrue(leaf.checkContentConsistency()); PlatformUtil.updateAtomically(leaf.getInactivePropsFile(), leaf.currentPropsFile); assertFalse(leaf.checkContentConsistency()); // finish restoring PlatformUtil.updateAtomically(leaf.getInactiveCacheFile(), leaf.currentCacheFile); leaf.currentVersion = 1; assertTrue(leaf.checkContentConsistency()); // remove residual files // - create files FileOutputStream fos = new FileOutputStream(leaf.tempCacheFile); StringInputStream sis = new StringInputStream("test stream"); StreamUtil.copy(sis, fos); fos.close(); sis.close(); fos = new FileOutputStream(leaf.tempPropsFile); sis = new StringInputStream("test stream"); StreamUtil.copy(sis, fos); fos.close(); sis.close(); // should be removed assertTrue(leaf.tempCacheFile.exists()); assertTrue(leaf.tempPropsFile.exists()); assertTrue(leaf.checkContentConsistency()); assertFalse(leaf.tempCacheFile.exists()); assertFalse(leaf.tempPropsFile.exists()); } public void testEnsureDirExists() throws Exception { RepositoryNodeImpl leaf = (RepositoryNodeImpl)createLeaf("http://www.example.com", null, null); File testDir = new File(tempDirPath, "testDir"); // should return true if dir exists testDir.mkdir(); assertTrue(testDir.exists()); assertTrue(testDir.isDirectory()); assertTrue(leaf.ensureDirExists(testDir)); // should create dir, return true if not exists testDir.delete(); assertFalse(testDir.exists()); assertTrue(leaf.ensureDirExists(testDir)); assertTrue(testDir.exists()); assertTrue(testDir.isDirectory()); // should rename file, create dir, return true if file exists // -create file testDir.delete(); FileOutputStream fos = new FileOutputStream(testDir); StringInputStream sis = new StringInputStream("test stream"); StreamUtil.copy(sis, fos); fos.close(); sis.close(); assertTrue(testDir.exists()); assertTrue(testDir.isFile()); // rename via 'ensureDirExists()' File renameFile = new File(tempDirPath, "testDir"+ RepositoryNodeImpl.FAULTY_FILE_EXTENSION); assertFalse(renameFile.exists()); assertTrue(leaf.ensureDirExists(testDir)); assertTrue(testDir.isDirectory()); assertEquals("test stream", StringUtil.fromFile(renameFile)); } public void testCheckFileExists() throws Exception { // return false if doesn't exist File testFile = new File(tempDirPath, "testFile"); assertFalse(testFile.exists()); assertFalse(RepositoryNodeImpl.checkFileExists(testFile, "test file")); // rename if dir (to make room for file creation), then return false testFile.mkdir(); File renameDir = new File(tempDirPath, "testFile"+ RepositoryNodeImpl.FAULTY_FILE_EXTENSION); assertTrue(testFile.exists()); assertTrue(testFile.isDirectory()); assertFalse(renameDir.exists()); assertFalse(RepositoryNodeImpl.checkFileExists(testFile, "test file")); assertFalse(testFile.exists()); assertTrue(renameDir.exists()); assertTrue(renameDir.isDirectory()); // return true if exists FileOutputStream fos = new FileOutputStream(testFile); StringInputStream sis = new StringInputStream("test stream"); StreamUtil.copy(sis, fos); fos.close(); sis.close(); assertTrue(testFile.exists()); assertTrue(testFile.isFile()); assertTrue(RepositoryNodeImpl.checkFileExists(testFile, "test file")); assertEquals("test stream", StringUtil.fromFile(testFile)); } public void testCheckChildCountCacheAccuracy() throws Exception { createLeaf("http://www.example.com/testDir/branch2", "test stream", null); createLeaf("http://www.example.com/testDir/branch3", "test stream", null); RepositoryNodeImpl dirEntry = (RepositoryNodeImpl)repo.getNode("http://www.example.com/testDir"); assertEquals(2, dirEntry.getChildCount()); assertEquals("2", dirEntry.nodeProps.getProperty(RepositoryNodeImpl.CHILD_COUNT_PROPERTY)); // check that no change to valid count cache dirEntry.checkChildCountCacheAccuracy(); assertEquals("2", dirEntry.nodeProps.getProperty(RepositoryNodeImpl.CHILD_COUNT_PROPERTY)); // check that invalid cache removed dirEntry.nodeProps.setProperty(RepositoryNodeImpl.CHILD_COUNT_PROPERTY, "3"); dirEntry.checkChildCountCacheAccuracy(); assertEquals(RepositoryNodeImpl.INVALID, dirEntry.nodeProps.getProperty(RepositoryNodeImpl.CHILD_COUNT_PROPERTY)); } private RepositoryNode createLeaf(String url, String content, Properties props) throws Exception { return createLeaf(repo, url, content, props); } public static RepositoryNode createLeaf(LockssRepository repo, String url, String content, Properties props) throws Exception { RepositoryNode leaf = repo.createNewNode(url); createContentVersion(leaf, content, props); return leaf; } public static void createContentVersion(RepositoryNode leaf, String content, Properties props) throws Exception { leaf.makeNewVersion(); writeToLeaf(leaf, content); if (props==null) { props = new Properties(); } leaf.setNewProperties(props); leaf.sealNewVersion(); } public static void writeToLeaf(RepositoryNode leaf, String content) throws Exception { if (content==null) { content = ""; } OutputStream os = leaf.getNewOutputStream(); InputStream is = new StringInputStream(content); StreamUtil.copy(is, os); os.close(); is.close(); } public static String getLeafContent(RepositoryNodeVersion leaf) throws IOException { return getRNCContent(leaf.getNodeContents()); } public static String getRNCContent(RepositoryNode.RepositoryNodeContents rnc) throws IOException { InputStream is = rnc.getInputStream(); OutputStream baos = new ByteArrayOutputStream(20); StreamUtil.copy(is, baos); is.close(); String resultStr = baos.toString(); baos.close(); return resultStr; } public static void main(String[] argv) { String[] testCaseList = { TestRepositoryNodeImpl.class.getName()}; junit.swingui.TestRunner.main(testCaseList); } // this class overrides 'getDatedVersionedPropsFile()' so I can // manipulate the file names for testing. Also allows 'loadNodeProps() // to fail on demand static class MyMockRepositoryNode extends RepositoryNodeImpl { long dateValue; boolean failPropsLoad = false; boolean failRootConsist = false; boolean failContentConsist = false; boolean failEnsureCurrentLoaded = false; static boolean failEnsureDirExists = false; MyMockRepositoryNode(RepositoryNodeImpl nodeImpl) { super(nodeImpl.url, nodeImpl.nodeLocation, nodeImpl.repository); } File getDatedVersionedPropsFile(int version, long date) { StringBuffer buffer = new StringBuffer(); buffer.append(version); buffer.append(PROPS_EXTENSION); buffer.append("-"); buffer.append(dateValue); return new File(getContentDir(), buffer.toString()); } void loadNodeProps(boolean okIfNotThere) { if (failPropsLoad) { throw new LockssRepository.RepositoryStateException("Couldn't load properties file."); } else { super.loadNodeProps(okIfNotThere); } } boolean checkNodeRootConsistency() { if (failRootConsist) { return false; } else { return super.checkNodeRootConsistency(); } } boolean checkContentConsistency() { if (failContentConsist) { return false; } else { return super.checkContentConsistency(); } } void ensureCurrentInfoLoaded() { if (failEnsureCurrentLoaded) { throw new LockssRepository.RepositoryStateException("Couldn't load current info."); } else { super.ensureCurrentInfoLoaded(); } } boolean ensureDirExists(File dirFile) { if (failEnsureDirExists) { return false; } else { return super.ensureDirExists(dirFile); } } } static class MyLockssRepositoryImpl extends LockssRepositoryImpl { boolean dontNormalize = false; void setDontNormalize(boolean val) { dontNormalize = val; } MyLockssRepositoryImpl(String rootPath) { super(rootPath); } public String canonicalizePath(String url) throws MalformedURLException { if (dontNormalize) return url; return super.canonicalizePath(url); } public static LockssRepository createNewLockssRepository(ArchivalUnit au) { String root = getRepositoryRoot(au); if (root == null) { throw new LockssRepository.RepositoryStateException("null root"); } String auDir = LockssRepositoryImpl.mapAuToFileLocation(root, au); log.debug("repo: " + auDir + ", au: " + au.getName()); // staticCacheLocation = extendCacheLocation(root); LockssRepositoryImpl repo = new MyLockssRepositoryImpl(auDir); Plugin plugin = au.getPlugin(); if (plugin != null) { LockssDaemon daemon = plugin.getDaemon(); if (daemon != null) { RepositoryManager mgr = daemon.getRepositoryManager(); if (mgr != null) { mgr.setRepositoryForPath(auDir, repo); } } } return repo; } } }
Convert platform-dependent path expressions to be platform independent. Don't run certain tests on platforms with case-insensitive file systems. git-svn-id: 293778eaa97c8c94097d610b1bd5133a8f478f36@9099 4f837ed2-42f5-46e7-a7a5-fa17313484d4
test/src/org/lockss/repository/TestRepositoryNodeImpl.java
Convert platform-dependent path expressions to be platform independent. Don't run certain tests on platforms with case-insensitive file systems.
Java
mit
c8573e7a6a9360df1dbc06f5b631644e6677d536
0
EDACC/edacc_gui,EDACC/edacc_gui,EDACC/edacc_gui,EDACC/edacc_gui,EDACC/edacc_gui
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package edacc.model; import edacc.properties.PropertySource; import edacc.properties.PropertyTypeNotExistException; import edacc.satinstances.PropertyValueType; import edacc.satinstances.PropertyValueTypeManager; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Map.Entry; import java.util.Set; import java.util.Vector; /** * data access object of the Property class * @author rretz */ public class PropertyDAO { protected static final String table = "Property"; private static final ObjectCache<Property> cache = new ObjectCache<Property>(); private static String deleteQuery = "DELETE FROM " + table + " WHERE idProperty=?;"; private static String updateQuery = "UPDATE " + table + " SET name=?, description=?, propertyType=?, propertySource=? ," + "propertyValueType=?, multipleOccourence=?, idComputationMethod=?, computationMethodParameters=? WHERE idProperty=?;"; private static String insertQuery = "INSERT INTO " + table + " (name, description, propertyType, propertySource ," + "propertyValueType, multipleOccourence, idComputationMethod, computationMethodParameters, isDefault) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);"; /** * Creates a new Property object, saves it into the database and cache, and returns it. * @param name <String> of the Property object * @param prefix <String> prefix of the Property object * @param description <String> description of the Property object * @param valueType related PropertyValueType object * @return new Property which is also deposited in the database. * @throws NoConnectionToDBException * @throws SQLException */ public static Property createProperty(String name, Vector<String> regularExpression, String description, PropertyType type, PropertyValueType valueType, PropertySource source, boolean multiple, ComputationMethod computationMethod, String computationMethodParameters, String parameter, Boolean isDefault) throws NoConnectionToDBException, SQLException, PropertyIsUsedException, PropertyTypeDoesNotExistException, IOException, PropertyNotInDBException, PropertyTypeNotExistException, ComputationMethodDoesNotExistException, PropertyAlreadyInDBException { Property r = new Property(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT name FROM " + table + " WHERE name=?"); ps.setString(1, name); ResultSet rs = ps.executeQuery(); if (rs.next()) { throw new PropertyAlreadyInDBException(); } r.setName(name); r.setDescription(description); r.setType(type); r.setValueType(valueType); r.setPropertySource(source); r.setMultiple(multiple); r.setIsDefault(isDefault); if (!source.equals(PropertySource.Parameter)) { if (computationMethod != null) { r.setComputationMethod(computationMethod); r.setComputationMethodParameters(computationMethodParameters); } else { r.setRegularExpression(regularExpression); } } r.setNew(); save(r); cache.cache(r); return r; } /** * Returns and caches (if necessary) the requested Property object * @param id of the requested Property object * @return the requested Property object * @throws NoConnectionToDBException * @throws SQLException * @throws PropertyNotInDBException */ public static Property getById(int id) throws NoConnectionToDBException, SQLException, PropertyNotInDBException, PropertyTypeNotExistException, IOException, ComputationMethodDoesNotExistException { Property res = cache.getCached(id); if (res != null) { return res; } else { res = new Property(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT name, description, propertyType, propertySource ,propertyValueType, multipleOccourence, idComputationMethod, " + "computationMethodParameters, isDefault FROM " + table + " WHERE idProperty=?"); ps.setInt(1, id); ResultSet rs = ps.executeQuery(); if (!rs.next()) { throw new PropertyNotInDBException(); } res.setId(id); res.setName(rs.getString(1)); res.setDescription(rs.getString(2)); res.setType(rs.getInt(3)); res.setPropertySource(rs.getInt(4)); if (!res.getPropertySource().equals(PropertySource.Parameter)) { res.setRegularExpression(getRegularExpressions(id)); res.setValueType(PropertyValueTypeManager.getInstance().getPropertyValueTypeByName(rs.getString(5))); res.setMultiple(rs.getBoolean(6)); if (res.getRegularExpression().isEmpty()) { res.setComputationMethod(ComputationMethodDAO.getById(rs.getInt(7))); } else { res.setComputationMethod(null); } res.setComputationMethodParameters(rs.getString(8)); } else { res.setRegularExpression(new Vector<String>()); res.setValueType(null); res.setMultiple(false); res.setComputationMethod(null); res.setComputationMethodParameters(""); } res.setIsDefault(rs.getBoolean(9)); res.setSaved(); cache.cache(res); return res; } } /** * Saves the given Property into the database. Dependend on the PersistanteState of * the given object a new entry is created, deleted or updated in the database. * @param r the Property object to save into the database * @throws NoConnectionToDBException * @throws SQLException */ public static void save(Property r) throws NoConnectionToDBException, SQLException, PropertyIsUsedException, PropertyTypeDoesNotExistException, IOException, PropertyNotInDBException, PropertyTypeNotExistException, ComputationMethodDoesNotExistException { if (r.isDeleted()) { if (r.getType().equals(PropertyType.InstanceProperty)) { InstanceHasPropertyDAO.removeAllOfProperty(r); } else { ExperimentResultHasPropertyDAO.removeAllOfProperty(r); } PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement(deleteQuery); ps.setInt(1, r.getId()); ps.executeUpdate(); ps.close(); cache.remove(r); } else if (r.isModified()) { PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement(updateQuery); ps.setString(1, r.getName()); setRegularExpressions(r.getRegularExpression(), r.getId()); ps.setString(2, r.getDescription()); ps.setInt(3, r.getPropertyTypeDBRepresentation()); ps.setInt(4, r.getPropertySourceDBRepresentation()); if (r.getPropertySource().equals(PropertySource.Parameter)) { ps.setNull(5, java.sql.Types.NULL); ps.setNull(6, java.sql.Types.NULL); } else { ps.setString(5, r.getPropertyValueType().getName()); ps.setBoolean(6, r.isMultiple()); } if (r.getComputationMethod() != null) { ps.setInt(7, r.getComputationMethod().getId()); } else { ps.setNull(7, java.sql.Types.NULL); } ps.setString(8, r.getComputationMethodParameters()); ps.setInt(9, r.getId()); ps.executeUpdate(); ps.close(); r.setSaved(); } else if (r.isNew()) { PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement(insertQuery, PreparedStatement.RETURN_GENERATED_KEYS); ps.setString(1, r.getName()); ps.setString(2, r.getDescription()); ps.setInt(3, r.getPropertyTypeDBRepresentation()); ps.setInt(4, r.getPropertySourceDBRepresentation()); if (r.getPropertySource().equals(PropertySource.Parameter)) { ps.setNull(5, java.sql.Types.NULL); ps.setNull(6, java.sql.Types.NULL); } else { ps.setString(5, r.getPropertyValueType().getName()); ps.setBoolean(6, r.isMultiple()); } if (r.getComputationMethod() != null) { ps.setInt(7, r.getComputationMethod().getId()); } else { ps.setNull(7, java.sql.Types.NULL); } ps.setString(8, r.getComputationMethodParameters()); ps.setBoolean(9, r.IsDefault()); ps.executeUpdate(); ResultSet generatedKeys = ps.getGeneratedKeys(); if (generatedKeys.next()) { r.setId(generatedKeys.getInt(1)); } generatedKeys.close(); ps.close(); r.setSaved(); if (r.getRegularExpression() != null) { setRegularExpressions(r.getRegularExpression(), r.getId()); } } } /** * * @return a Vector of all Property Objects which are in the database. * @throws NoConnectionToDBException * @throws SQLException * @throws PropertyNotInDBException * @throws PropertyTypeNotExistException * @throws IOException */ public static Vector<Property> getAll() throws NoConnectionToDBException, SQLException, PropertyNotInDBException, PropertyTypeNotExistException, IOException, ComputationMethodDoesNotExistException { Vector<Property> res = new Vector<Property>(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT idProperty " + "FROM " + table); ResultSet rs = ps.executeQuery(); while (rs.next()) { res.add(PropertyDAO.getById(rs.getInt(1))); } return res; } /** * Removes the given property from the cache and database. * @param solverProperty to remove * @throws NoConnectionToDBException * @throws SQLException * @throws PropertyIsUsedException * @throws PropertyTypeDoesNotExistException */ public static void remove(Property toRemove) throws NoConnectionToDBException, SQLException, PropertyIsUsedException, PropertyTypeDoesNotExistException, IOException, PropertyNotInDBException, PropertyTypeNotExistException, ComputationMethodDoesNotExistException { toRemove.setDeleted(); save(toRemove); } /** * * @return all Property objects with the PropertyType InstanceProperty * @throws NoConnectionToDBException * @throws SQLException * @throws PropertyNotInDBException * @throws PropertyTypeNotExistException * @throws IOException */ public static Vector<Property> getAllInstanceProperties() throws NoConnectionToDBException, SQLException, PropertyNotInDBException, PropertyTypeNotExistException, IOException, ComputationMethodDoesNotExistException { Vector<Property> res = new Vector<Property>(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT idProperty FROM " + table + " WHERE PropertyType=0;"); ResultSet rs = ps.executeQuery(); while (rs.next()) { res.add(PropertyDAO.getById(rs.getInt("idProperty"))); } rs.close(); ps.close(); return res; } /** * * @return all Property objects with the PropertyType ResultProperty * @throws NoConnectionToDBException * @throws SQLException * @throws PropertyNotInDBException * @throws PropertyTypeNotExistException * @throws PropertyTypeNotExistException * @throws IOException */ public static Vector<Property> getAllResultProperties() throws NoConnectionToDBException, SQLException, PropertyNotInDBException, PropertyTypeNotExistException, PropertyTypeNotExistException, IOException, ComputationMethodDoesNotExistException { Vector<Property> res = new Vector<Property>(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT idProperty FROM " + table + " WHERE PropertyType=1;"); ResultSet rs = ps.executeQuery(); while (rs.next()) { res.add(PropertyDAO.getById(rs.getInt("idProperty"))); } rs.close(); ps.close(); return res; } private static Vector<String> getRegularExpressions(int id) throws NoConnectionToDBException, SQLException { PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT regexpr FROM PropertyRegExp WHERE idProperty=?;"); ps.setInt(1, id); ResultSet rs = ps.executeQuery(); Vector<String> res = new Vector<String>(); while (rs.next()) { res.add(rs.getString(1)); } rs.close(); ps.close(); return res; } private static void setRegularExpressions(Vector<String> regularExpression, int id) throws NoConnectionToDBException, SQLException { PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "DELETE FROM PropertyRegExp WHERE idProperty=?;"); ps.setInt(1, id); ps.executeUpdate(); ps = DatabaseConnector.getInstance().getConn().prepareStatement( "INSERT INTO PropertyRegExp (idProperty, regexpr) VALUES (?, ?);"); for (int i = 0; i < regularExpression.size(); i++) { ps.setInt(1, id); ps.setString(2, regularExpression.get(i)); ps.executeUpdate(); } ps.close(); } /** * Exports the Property object into a file, located in the given path. The name of the file * is the name of the property. * @param property Property Object to export * @param path Path in which the Property have to be exported. * @throws FileNotFoundException * @throws IOException */ public static void exportProperty(Property property, String path) throws FileNotFoundException, IOException { File f = new File(path + "/" + property.getName()); OutputStream output = new FileOutputStream(f); ObjectOutputStream o = new ObjectOutputStream(output); o.writeObject(property); } /** * Import the properties included in the given files. * @param files * @throws FileNotFoundException * @throws IOException * @throws ClassNotFoundException * @throws NoConnectionToDBException * @throws SQLException * @throws ComputationMethodAlreadyExistsException * @throws NoComputationMethodBinarySpecifiedException * @throws PropertyIsUsedException * @throws PropertyTypeDoesNotExistException * @throws PropertyNotInDBException * @throws PropertyTypeNotExistException * @throws ComputationMethodDoesNotExistException */ public static void importProperty(File[] files) throws FileNotFoundException, IOException, ClassNotFoundException, NoConnectionToDBException, SQLException, NoComputationMethodBinarySpecifiedException, PropertyIsUsedException, PropertyTypeDoesNotExistException, PropertyNotInDBException, PropertyTypeNotExistException, ComputationMethodDoesNotExistException, ComputationMethodSameNameAlreadyExists { for (int i = 0; i < files.length; i++) { InputStream input = new FileInputStream(files[i]); ObjectInputStream in = new ObjectInputStream(input); Property prop = (Property) in.readObject(); ComputationMethod compMeth = prop.getComputationMethod(); if (compMeth != null) { compMeth.isNew(); try { ComputationMethodDAO.save(compMeth); } catch (ComputationMethodAlreadyExistsException ex) { prop.setComputationMethod(ComputationMethodDAO.getByName(compMeth.getName())); } catch (ComputationMethodSameMD5AlreadyExists ex) { prop.setComputationMethod(ComputationMethodDAO.getByMD5(compMeth.getMd5())); } } prop.isNew(); PropertyDAO.save(prop); } } public static void clearCache() { cache.clear(); } public static void init() throws SQLException, PropertyTypeNotExistException, IOException, NoConnectionToDBException, ComputationMethodDoesNotExistException, PropertyNotInDBException { InstanceHasPropertyDAO.init(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT idProperty, name, description, propertyType, propertySource ,propertyValueType, multipleOccourence, idComputationMethod, " + "computationMethodParameters, isDefault FROM " + table); ResultSet rs = ps.executeQuery(); while (rs.next()) { Property res = new Property(); res.setId(rs.getInt(1)); res.setName(rs.getString(2)); res.setDescription(rs.getString(3)); res.setType(rs.getInt(4)); res.setPropertySource(rs.getInt(5)); if (!res.getPropertySource().equals(PropertySource.Parameter)) { res.setRegularExpression(getRegularExpressions(res.getId())); res.setValueType(PropertyValueTypeManager.getInstance().getPropertyValueTypeByName(rs.getString(6))); res.setMultiple(rs.getBoolean(7)); if (res.getRegularExpression().isEmpty()) { res.setComputationMethod(ComputationMethodDAO.getById(rs.getInt(8))); } else { res.setComputationMethod(null); } res.setComputationMethodParameters(rs.getString(9)); } else { res.setRegularExpression(new Vector<String>()); res.setValueType(null); res.setMultiple(false); res.setComputationMethod(null); res.setComputationMethodParameters(""); } res.setIsDefault(rs.getBoolean(10)); res.setSaved(); cache.cache(res); return; } } /** * Extract and adds the property data from the csvfile to the database. * @param selected Relation between found csvfile properties and existing Property objects. * @param overwrite Overwrite existing property data? * @param csvFile * @param task * @throws IOException * @throws SQLException * @throws NoConnectionToDBException * @throws PropertyTypeNotExistException * @throws ComputationMethodDoesNotExistException * @throws InstanceHasPropertyNotInDBException * @throws InstancesNotFoundException */ public static void importCSV(Set<Entry<Property, String>> selected, Boolean overwrite, File csvFile, Tasks task) throws IOException, SQLException, NoConnectionToDBException, PropertyTypeNotExistException, ComputationMethodDoesNotExistException, InstanceHasPropertyNotInDBException, InstancesNotFoundException { task.setCancelable(true); task.setOperationName("Import properties from csv file"); ArrayList<String[]> instanceError = new ArrayList<String[]>(); BufferedReader br = new BufferedReader(new FileReader(csvFile)); String firstLine = br.readLine(); ArrayList<String> fstLine = new ArrayList<String>(Arrays.asList(firstLine.split(","))); ArrayList<Property> head = generateCSVImportHead(fstLine, selected); if (head == null) { task.cancel(true); return; } Boolean hasMD5 = false;; if (fstLine.get(1).equals("md5") || fstLine.get(1).equals("MD5") || fstLine.get(1).equals("Md5")) { hasMD5 = true; } // To reduces the connection requests to the Databaseconnector during the creation an save Process of InstanceHasProperty Objects PreparedStatement psNew = DatabaseConnector.getInstance().getConn().prepareStatement(InstanceHasPropertyDAO.getInsertQuery(), PreparedStatement.RETURN_GENERATED_KEYS); PreparedStatement psMod = DatabaseConnector.getInstance().getConn().prepareStatement(InstanceHasPropertyDAO.getUpdateQuery()); String line = br.readLine(); while (line != null) { ArrayList<String> tmpLine = new ArrayList<String>(Arrays.asList(line.split(","))); Instance tmp; int count = 0; if (hasMD5) { tmp = InstanceDAO.getByMd5(tmpLine.get(1)); count = 2; } else { tmp = InstanceDAO.getByName(tmpLine.get(0)); count = 1; } if (tmp == null) { if (hasMD5) { instanceError.add(new String[]{tmpLine.get(0), tmpLine.get(1)}); } else { instanceError.add(new String[]{tmpLine.get(0), ""}); } } else { // import property values for (int i = count; i < tmpLine.size(); i++) // Get the matching Instance { if (head.get((i - count)) != null) { InstanceHasPropertyDAO.createInstanceHasInstanceProperty(tmp, head.get((i - count)), tmpLine.get(i), overwrite, psNew, psMod); } } } line = br.readLine(); } if (!instanceError.isEmpty()) { throw new InstancesNotFoundException(instanceError); } } /** * * @param fstLine The first line of the csvfile to import. * @param Set of the relation between existing Property objects and properties from the csv File * @return ArrayList<Property>, which contains in order of the first line of the csvFile, the matching of csvFile * columns an Property objects. If a property of the csv file is without a related Propert object, an null entry is added to the list. * */ private static ArrayList<Property> generateCSVImportHead(ArrayList<String> fstLine, Set<Entry<Property, String>> selected) { ArrayList<Property> head = new ArrayList<Property>(); if (fstLine.size() < 2) { return null; } int count = 0; if (fstLine.get(0).equals("Name") || fstLine.get(0).equals("name") || fstLine.get(0).equals("NAME")) { count++; } else { return null; } if (fstLine.get(1).equals("md5") || fstLine.get(1).equals("MD5") || fstLine.get(1).equals("Md5")) { count++; } for (int i = count; i < fstLine.size(); i++) { Boolean tmp = false; for (Entry ent : selected) { if (ent.getValue().equals(fstLine.get(i))) { head.add((Property) ent.getKey()); tmp = true; break; } } if (!tmp) { head.add(null); } } return head; } /** * * @return All InstanceProperty objects, without the objects whose propertySource is CSVImport. * @throws SQLException * @throws NoConnectionToDBException * @throws PropertyNotInDBException * @throws PropertyTypeNotExistException * @throws IOException */ public static Vector<Property> getAllInstancePropertiesWithoutCSVImport() throws SQLException, NoConnectionToDBException, PropertyNotInDBException, PropertyTypeNotExistException, IOException { Vector<Property> res = new Vector<Property>(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT idProperty FROM " + table + " WHERE PropertyType=0 AND propertySource !=8;"); ResultSet rs = ps.executeQuery(); while (rs.next()) { res.add(PropertyDAO.getById(rs.getInt("idProperty"))); } rs.close(); ps.close(); return res; } }
src/edacc/model/PropertyDAO.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package edacc.model; import edacc.properties.PropertySource; import edacc.properties.PropertyTypeNotExistException; import edacc.satinstances.PropertyValueType; import edacc.satinstances.PropertyValueTypeManager; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Map.Entry; import java.util.Set; import java.util.Vector; /** * data access object of the Property class * @author rretz */ public class PropertyDAO { protected static final String table = "Property"; private static final ObjectCache<Property> cache = new ObjectCache<Property>(); private static String deleteQuery = "DELETE FROM " + table + " WHERE idProperty=?;"; private static String updateQuery = "UPDATE " + table + " SET name=?, description=?, propertyType=?, propertySource=? ," + "propertyValueType=?, multipleOccourence=?, idComputationMethod=?, computationMethodParameters=? WHERE idProperty=?;"; private static String insertQuery = "INSERT INTO " + table + " (name, description, propertyType, propertySource ," + "propertyValueType, multipleOccourence, idComputationMethod, computationMethodParameters, isDefault) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);"; /** * Creates a new Property object, saves it into the database and cache, and returns it. * @param name <String> of the Property object * @param prefix <String> prefix of the Property object * @param description <String> description of the Property object * @param valueType related PropertyValueType object * @return new Property which is also deposited in the database. * @throws NoConnectionToDBException * @throws SQLException */ public static Property createProperty(String name, Vector<String> regularExpression, String description, PropertyType type, PropertyValueType valueType, PropertySource source, boolean multiple, ComputationMethod computationMethod, String computationMethodParameters, String parameter, Boolean isDefault) throws NoConnectionToDBException, SQLException, PropertyIsUsedException, PropertyTypeDoesNotExistException, IOException, PropertyNotInDBException, PropertyTypeNotExistException, ComputationMethodDoesNotExistException, PropertyAlreadyInDBException { Property r = new Property(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT name FROM " + table + " WHERE name=?"); ps.setString(1, name); ResultSet rs = ps.executeQuery(); if (rs.next()) { throw new PropertyAlreadyInDBException(); } r.setName(name); r.setDescription(description); r.setType(type); r.setValueType(valueType); r.setPropertySource(source); r.setMultiple(multiple); r.setIsDefault(isDefault); if (!source.equals(PropertySource.Parameter)) { if (computationMethod != null) { r.setComputationMethod(computationMethod); r.setComputationMethodParameters(computationMethodParameters); } else { r.setRegularExpression(regularExpression); } } r.setNew(); save(r); cache.cache(r); return r; } /** * Returns and caches (if necessary) the requested Property object * @param id of the requested Property object * @return the requested Property object * @throws NoConnectionToDBException * @throws SQLException * @throws PropertyNotInDBException */ public static Property getById(int id) throws NoConnectionToDBException, SQLException, PropertyNotInDBException, PropertyTypeNotExistException, IOException, ComputationMethodDoesNotExistException { Property res = cache.getCached(id); if (res != null) { return res; } else { res = new Property(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT name, description, propertyType, propertySource ,propertyValueType, multipleOccourence, idComputationMethod, " + "computationMethodParameters, isDefault FROM " + table + " WHERE idProperty=?"); ps.setInt(1, id); ResultSet rs = ps.executeQuery(); if (!rs.next()) { throw new PropertyNotInDBException(); } res.setId(id); res.setName(rs.getString(1)); res.setDescription(rs.getString(2)); res.setType(rs.getInt(3)); res.setPropertySource(rs.getInt(4)); if (!res.getPropertySource().equals(PropertySource.Parameter)) { res.setRegularExpression(getRegularExpressions(id)); res.setValueType(PropertyValueTypeManager.getInstance().getPropertyValueTypeByName(rs.getString(5))); res.setMultiple(rs.getBoolean(6)); if (res.getRegularExpression().isEmpty()) { res.setComputationMethod(ComputationMethodDAO.getById(rs.getInt(7))); } else { res.setComputationMethod(null); } res.setComputationMethodParameters(rs.getString(8)); } else { res.setRegularExpression(new Vector<String>()); res.setValueType(null); res.setMultiple(false); res.setComputationMethod(null); res.setComputationMethodParameters(""); } res.setIsDefault(rs.getBoolean(9)); res.setSaved(); cache.cache(res); return res; } } /** * Saves the given Property into the database. Dependend on the PersistanteState of * the given object a new entry is created, deleted or updated in the database. * @param r the Property object to save into the database * @throws NoConnectionToDBException * @throws SQLException */ public static void save(Property r) throws NoConnectionToDBException, SQLException, PropertyIsUsedException, PropertyTypeDoesNotExistException, IOException, PropertyNotInDBException, PropertyTypeNotExistException, ComputationMethodDoesNotExistException { if (r.isDeleted()) { if (r.getType().equals(PropertyType.InstanceProperty)) { InstanceHasPropertyDAO.removeAllOfProperty(r); } else { ExperimentResultHasPropertyDAO.removeAllOfProperty(r); } PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement(deleteQuery); ps.setInt(1, r.getId()); ps.executeUpdate(); ps.close(); cache.remove(r); } else if (r.isModified()) { PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement(updateQuery); ps.setString(1, r.getName()); setRegularExpressions(r.getRegularExpression(), r.getId()); ps.setString(2, r.getDescription()); ps.setInt(3, r.getPropertyTypeDBRepresentation()); ps.setInt(4, r.getPropertySourceDBRepresentation()); if (r.getPropertySource().equals(PropertySource.Parameter)) { ps.setNull(5, java.sql.Types.NULL); ps.setNull(6, java.sql.Types.NULL); } else { ps.setString(5, r.getPropertyValueType().getName()); ps.setBoolean(6, r.isMultiple()); } if (r.getComputationMethod() != null) { ps.setInt(7, r.getComputationMethod().getId()); } else { ps.setNull(7, java.sql.Types.NULL); } ps.setString(8, r.getComputationMethodParameters()); ps.setInt(9, r.getId()); ps.executeUpdate(); ps.close(); r.setSaved(); } else if (r.isNew()) { PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement(insertQuery, PreparedStatement.RETURN_GENERATED_KEYS); ps.setString(1, r.getName()); ps.setString(2, r.getDescription()); ps.setInt(3, r.getPropertyTypeDBRepresentation()); ps.setInt(4, r.getPropertySourceDBRepresentation()); if (r.getPropertySource().equals(PropertySource.Parameter)) { ps.setNull(5, java.sql.Types.NULL); ps.setNull(6, java.sql.Types.NULL); } else { ps.setString(5, r.getPropertyValueType().getName()); ps.setBoolean(6, r.isMultiple()); } if (r.getComputationMethod() != null) { ps.setInt(7, r.getComputationMethod().getId()); } else { ps.setNull(7, java.sql.Types.NULL); } ps.setString(8, r.getComputationMethodParameters()); ps.setBoolean(9, r.IsDefault()); ps.executeUpdate(); ResultSet generatedKeys = ps.getGeneratedKeys(); if (generatedKeys.next()) { r.setId(generatedKeys.getInt(1)); } generatedKeys.close(); ps.close(); r.setSaved(); if (r.getRegularExpression() != null) { setRegularExpressions(r.getRegularExpression(), r.getId()); } } } /** * * @return a Vector of all Property Objects which are in the database. * @throws NoConnectionToDBException * @throws SQLException * @throws PropertyNotInDBException * @throws PropertyTypeNotExistException * @throws IOException */ public static Vector<Property> getAll() throws NoConnectionToDBException, SQLException, PropertyNotInDBException, PropertyTypeNotExistException, IOException, ComputationMethodDoesNotExistException { Vector<Property> res = new Vector<Property>(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT idProperty " + "FROM " + table); ResultSet rs = ps.executeQuery(); while (rs.next()) { res.add(PropertyDAO.getById(rs.getInt(1))); } return res; } /** * Removes the given property from the cache and database. * @param solverProperty to remove * @throws NoConnectionToDBException * @throws SQLException * @throws PropertyIsUsedException * @throws PropertyTypeDoesNotExistException */ public static void remove(Property toRemove) throws NoConnectionToDBException, SQLException, PropertyIsUsedException, PropertyTypeDoesNotExistException, IOException, PropertyNotInDBException, PropertyTypeNotExistException, ComputationMethodDoesNotExistException { toRemove.setDeleted(); save(toRemove); } /** * * @return all Property objects with the PropertyType InstanceProperty * @throws NoConnectionToDBException * @throws SQLException * @throws PropertyNotInDBException * @throws PropertyTypeNotExistException * @throws IOException */ public static Vector<Property> getAllInstanceProperties() throws NoConnectionToDBException, SQLException, PropertyNotInDBException, PropertyTypeNotExistException, IOException, ComputationMethodDoesNotExistException { Vector<Property> res = new Vector<Property>(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT idProperty FROM " + table + " WHERE PropertyType=0;"); ResultSet rs = ps.executeQuery(); while (rs.next()) { res.add(PropertyDAO.getById(rs.getInt("idProperty"))); } rs.close(); ps.close(); return res; } /** * * @return all Property objects with the PropertyType ResultProperty * @throws NoConnectionToDBException * @throws SQLException * @throws PropertyNotInDBException * @throws PropertyTypeNotExistException * @throws PropertyTypeNotExistException * @throws IOException */ public static Vector<Property> getAllResultProperties() throws NoConnectionToDBException, SQLException, PropertyNotInDBException, PropertyTypeNotExistException, PropertyTypeNotExistException, IOException, ComputationMethodDoesNotExistException { Vector<Property> res = new Vector<Property>(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT idProperty FROM " + table + " WHERE PropertyType=1;"); ResultSet rs = ps.executeQuery(); while (rs.next()) { res.add(PropertyDAO.getById(rs.getInt("idProperty"))); } rs.close(); ps.close(); return res; } private static Vector<String> getRegularExpressions(int id) throws NoConnectionToDBException, SQLException { PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT regexpr FROM PropertyRegExp WHERE idProperty=?;"); ps.setInt(1, id); ResultSet rs = ps.executeQuery(); Vector<String> res = new Vector<String>(); while (rs.next()) { res.add(rs.getString(1)); } rs.close(); ps.close(); return res; } private static void setRegularExpressions(Vector<String> regularExpression, int id) throws NoConnectionToDBException, SQLException { PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "DELETE FROM PropertyRegExp WHERE idProperty=?;"); ps.setInt(1, id); ps.executeUpdate(); ps = DatabaseConnector.getInstance().getConn().prepareStatement( "INSERT INTO PropertyRegExp (idProperty, regexpr) VALUES (?, ?);"); for (int i = 0; i < regularExpression.size(); i++) { ps.setInt(1, id); ps.setString(2, regularExpression.get(i)); ps.executeUpdate(); } ps.close(); } /** * Exports the Property object into a file, located in the given path. The name of the file * is the name of the property. * @param property Property Object to export * @param path Path in which the Property have to be exported. * @throws FileNotFoundException * @throws IOException */ public static void exportProperty(Property property, String path) throws FileNotFoundException, IOException { File f = new File(path + "/" + property.getName()); OutputStream output = new FileOutputStream(f); ObjectOutputStream o = new ObjectOutputStream(output); o.writeObject(property); } /** * Import the properties included in the given files. * @param files * @throws FileNotFoundException * @throws IOException * @throws ClassNotFoundException * @throws NoConnectionToDBException * @throws SQLException * @throws ComputationMethodAlreadyExistsException * @throws NoComputationMethodBinarySpecifiedException * @throws PropertyIsUsedException * @throws PropertyTypeDoesNotExistException * @throws PropertyNotInDBException * @throws PropertyTypeNotExistException * @throws ComputationMethodDoesNotExistException */ public static void importProperty(File[] files) throws FileNotFoundException, IOException, ClassNotFoundException, NoConnectionToDBException, SQLException, NoComputationMethodBinarySpecifiedException, PropertyIsUsedException, PropertyTypeDoesNotExistException, PropertyNotInDBException, PropertyTypeNotExistException, ComputationMethodDoesNotExistException, ComputationMethodSameNameAlreadyExists { for (int i = 0; i < files.length; i++) { InputStream input = new FileInputStream(files[i]); ObjectInputStream in = new ObjectInputStream(input); Property prop = (Property) in.readObject(); ComputationMethod compMeth = prop.getComputationMethod(); if (compMeth != null) { compMeth.isNew(); try { ComputationMethodDAO.save(compMeth); } catch (ComputationMethodAlreadyExistsException ex) { prop.setComputationMethod(ComputationMethodDAO.getByName(compMeth.getName())); } catch (ComputationMethodSameMD5AlreadyExists ex) { prop.setComputationMethod(ComputationMethodDAO.getByMD5(compMeth.getMd5())); } } prop.isNew(); PropertyDAO.save(prop); } } public static void clearCache() { cache.clear(); } public static void init() throws SQLException, PropertyTypeNotExistException, IOException, NoConnectionToDBException, ComputationMethodDoesNotExistException, PropertyNotInDBException { InstanceHasPropertyDAO.init(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT idProperty, name, description, propertyType, propertySource ,propertyValueType, multipleOccourence, idComputationMethod, " + "computationMethodParameters, isDefault FROM " + table); ResultSet rs = ps.executeQuery(); while (rs.next()) { Property res = new Property(); res.setId(rs.getInt(1)); res.setName(rs.getString(2)); res.setDescription(rs.getString(3)); res.setType(rs.getInt(4)); res.setPropertySource(rs.getInt(5)); if (!res.getPropertySource().equals(PropertySource.Parameter)) { res.setRegularExpression(getRegularExpressions(res.getId())); res.setValueType(PropertyValueTypeManager.getInstance().getPropertyValueTypeByName(rs.getString(6))); res.setMultiple(rs.getBoolean(7)); if (res.getRegularExpression().isEmpty()) { res.setComputationMethod(ComputationMethodDAO.getById(rs.getInt(8))); } else { res.setComputationMethod(null); } res.setComputationMethodParameters(rs.getString(9)); } else { res.setRegularExpression(new Vector<String>()); res.setValueType(null); res.setMultiple(false); res.setComputationMethod(null); res.setComputationMethodParameters(""); } res.setIsDefault(rs.getBoolean(10)); res.setSaved(); cache.cache(res); return; } } /** * Extract and adds the property data from the csvfile to the database. * @param selected Relation between found csvfile properties and existing Property objects. * @param overwrite Overwrite existing property data? * @param csvFile * @param task * @throws IOException * @throws SQLException * @throws NoConnectionToDBException * @throws PropertyTypeNotExistException * @throws ComputationMethodDoesNotExistException * @throws InstanceHasPropertyNotInDBException * @throws InstancesNotFoundException */ public static void importCSV(Set<Entry<Property, String>> selected, Boolean overwrite, File csvFile, Tasks task) throws IOException, SQLException, NoConnectionToDBException, PropertyTypeNotExistException, ComputationMethodDoesNotExistException, InstanceHasPropertyNotInDBException, InstancesNotFoundException { task.setCancelable(true); task.setOperationName("Import properties from csv file"); ArrayList<String[]> instanceError = new ArrayList<String[]>(); BufferedReader br = new BufferedReader(new FileReader(csvFile)); String firstLine = br.readLine(); ArrayList<String> fstLine = new ArrayList<String>(Arrays.asList(firstLine.split(","))); ArrayList<Property> head = generateCSVImportHead(fstLine, selected); if (head == null) { task.cancel(true); return; } Boolean hasMD5 = false;; if (fstLine.get(1).equals("md5") || fstLine.get(1).equals("MD5") || fstLine.get(1).equals("Md5")) { hasMD5 = true; } // To reduces the connection requests to the Databaseconnector during the creation an save Process of InstanceHasProperty Objects PreparedStatement psNew = DatabaseConnector.getInstance().getConn().prepareStatement(InstanceHasPropertyDAO.getInsertQuery(), PreparedStatement.RETURN_GENERATED_KEYS); PreparedStatement psMod = DatabaseConnector.getInstance().getConn().prepareStatement(InstanceHasPropertyDAO.getUpdateQuery()); String line = br.readLine(); while (line != null) { ArrayList<String> tmpLine = new ArrayList<String>(Arrays.asList(line.split(","))); Instance tmp; int count = 0; if (hasMD5) { tmp = InstanceDAO.getByMd5AndName(tmpLine.get(0), tmpLine.get(1)); count = 2; } else { tmp = InstanceDAO.getByName(tmpLine.get(0)); count = 1; } if (tmp == null) { if (hasMD5) { instanceError.add(new String[]{tmpLine.get(0), tmpLine.get(1)}); } else { instanceError.add(new String[]{tmpLine.get(0), ""}); } } else { // import property values for (int i = count; i < tmpLine.size(); i++) // Get the matching Instance { if (head.get((i - count)) != null) { InstanceHasPropertyDAO.createInstanceHasInstanceProperty(tmp, head.get((i - count)), tmpLine.get(i), overwrite, psNew, psMod); } } } line = br.readLine(); } if (!instanceError.isEmpty()) { throw new InstancesNotFoundException(instanceError); } } /** * * @param fstLine The first line of the csvfile to import. * @param Set of the relation between existing Property objects and properties from the csv File * @return ArrayList<Property>, which contains in order of the first line of the csvFile, the matching of csvFile * columns an Property objects. If a property of the csv file is without a related Propert object, an null entry is added to the list. * */ private static ArrayList<Property> generateCSVImportHead(ArrayList<String> fstLine, Set<Entry<Property, String>> selected) { ArrayList<Property> head = new ArrayList<Property>(); if (fstLine.size() < 2) { return null; } int count = 0; if (fstLine.get(0).equals("Name") || fstLine.get(0).equals("name") || fstLine.get(0).equals("NAME")) { count++; } else { return null; } if (fstLine.get(1).equals("md5") || fstLine.get(1).equals("MD5") || fstLine.get(1).equals("Md5")) { count++; } for (int i = count; i < fstLine.size(); i++) { Boolean tmp = false; for (Entry ent : selected) { if (ent.getValue().equals(fstLine.get(i))) { head.add((Property) ent.getKey()); tmp = true; break; } } if (!tmp) { head.add(null); } } return head; } /** * * @return All InstanceProperty objects, without the objects whose propertySource is CSVImport. * @throws SQLException * @throws NoConnectionToDBException * @throws PropertyNotInDBException * @throws PropertyTypeNotExistException * @throws IOException */ public static Vector<Property> getAllInstancePropertiesWithoutCSVImport() throws SQLException, NoConnectionToDBException, PropertyNotInDBException, PropertyTypeNotExistException, IOException { Vector<Property> res = new Vector<Property>(); PreparedStatement ps = DatabaseConnector.getInstance().getConn().prepareStatement( "SELECT idProperty FROM " + table + " WHERE PropertyType=0 AND propertySource !=8;"); ResultSet rs = ps.executeQuery(); while (rs.next()) { res.add(PropertyDAO.getById(rs.getInt("idProperty"))); } rs.close(); ps.close(); return res; } }
property csv import: if md5 is given, search instance in db with given md5 only
src/edacc/model/PropertyDAO.java
property csv import: if md5 is given, search instance in db with given md5 only
Java
mit
757d0f9394347a381826562e19bbce706101b772
0
letsdrink/ouzo-phpstorm-plugin
package com.github.letsdrink.intellijplugin; import com.github.letsdrink.intellijplugin.index.TranslationCallIndex; import com.google.common.base.Function; import com.google.common.collect.FluentIterable; import com.intellij.analysis.AnalysisScope; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.patterns.PlatformPatterns; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiElementVisitor; import com.intellij.psi.PsiFile; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.usageView.UsageInfo; import com.intellij.util.indexing.FileBasedIndex; import com.jetbrains.php.lang.parser.PhpElementTypes; import com.jetbrains.php.lang.psi.elements.BinaryExpression; import com.jetbrains.php.lang.psi.elements.FunctionReference; import org.jetbrains.annotations.NotNull; import javax.annotation.Nullable; import java.util.Collection; import java.util.HashSet; import java.util.Set; public class TranslationUsagesFinder { private final String searchedKey; public TranslationUsagesFinder(String searchedKey) { this.searchedKey = searchedKey; } public UsageInfo[] findUsages(final Project project) { final ProgressManager progressManager = ProgressManager.getInstance(); Collection<VirtualFile> files = getFilesToSearch(project); final AnalysisScope scope = new AnalysisScope(project, files); final int totalFiles = scope.getFileCount(); final Set<PsiElement> processed = new HashSet<PsiElement>(); Runnable searchRunner = new Runnable() { @Override public void run() { scope.accept(new PsiElementVisitor() { private int myFileCount = 0; @Override public void visitFile(PsiFile file) { myFileCount++; final ProgressIndicator progressIndicator = ProgressManager.getInstance().getProgressIndicator(); if (progressIndicator != null) { final VirtualFile virtualFile = file.getVirtualFile(); if (virtualFile != null) { progressIndicator.setText2(ProjectUtil.calcRelativeToProjectPath(virtualFile, project)); } progressIndicator.setFraction(((double) myFileCount) / totalFiles); } map(file, processed); } }); } }; if (ApplicationManager.getApplication().isDispatchThread()) { if (!progressManager.runProcessWithProgressSynchronously(searchRunner, "Searching usages of " + searchedKey + " ...", true, project)) { return null; } } else { searchRunner.run(); } return FluentIterable.from(processed).transform(toUsageInfo()).toArray(UsageInfo.class); } private Collection<VirtualFile> getFilesToSearch(Project project) { FileBasedIndex index = FileBasedIndex.getInstance(); Collection<VirtualFile> files = new HashSet<>(); String key = searchedKey; while (key != null) { files.addAll(index.getContainingFiles(TranslationCallIndex.KEY, key, GlobalSearchScope.allScope(project))); key = TranslationUtils.getParentKey(key); } return files; } private Function<PsiElement, UsageInfo> toUsageInfo() { return new Function<PsiElement, UsageInfo>() { @Nullable @Override public UsageInfo apply(@Nullable PsiElement psiElement) { return new UsageInfo(psiElement); } }; } public void map(@NotNull PsiFile file, Set<PsiElement> processed) { Collection<FunctionReference> calls = PsiTreeUtil.collectElementsOfType(file, FunctionReference.class); for (FunctionReference call : calls) { if (call.getParameters().length > 0 && (call.getName().equals("t") || call.getText().startsWith("I18n::labels"))) { addKey(processed, call.getParameters()[0]); } } } private void addKey(Set<PsiElement> map, PsiElement parameter) { String key = PsiUtils.getContent(parameter); if (key != null && (searchedKey.equals(key) || TranslationUtils.isParent(key, searchedKey))) { map.add(parameter); } else if (PlatformPatterns.psiElement(PhpElementTypes.CONCATENATION_EXPRESSION).accepts(parameter)) { BinaryExpression binaryExpression = (BinaryExpression) parameter; addKey(map, binaryExpression.getLeftOperand()); } } }
src/com/github/letsdrink/intellijplugin/TranslationUsagesFinder.java
package com.github.letsdrink.intellijplugin; import com.google.common.base.Function; import com.google.common.collect.FluentIterable; import com.intellij.analysis.AnalysisScope; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.patterns.PlatformPatterns; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiElementVisitor; import com.intellij.psi.PsiFile; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.usageView.UsageInfo; import com.jetbrains.php.lang.parser.PhpElementTypes; import com.jetbrains.php.lang.psi.elements.BinaryExpression; import com.jetbrains.php.lang.psi.elements.FunctionReference; import org.jetbrains.annotations.NotNull; import javax.annotation.Nullable; import java.util.Collection; import java.util.HashSet; import java.util.Set; public class TranslationUsagesFinder { private final String searchedKey; public TranslationUsagesFinder(String searchedKey) { this.searchedKey = searchedKey; } public UsageInfo[] findUsages(final Project project) { final ProgressManager progressManager = ProgressManager.getInstance(); final AnalysisScope scope = new AnalysisScope(project); final int totalFiles = scope.getFileCount(); final Set<PsiElement> processed = new HashSet<PsiElement>(); Runnable searchRunner = new Runnable() { @Override public void run() { scope.accept(new PsiElementVisitor() { private int myFileCount = 0; @Override public void visitFile(PsiFile file) { myFileCount++; final ProgressIndicator progressIndicator = ProgressManager.getInstance().getProgressIndicator(); if (progressIndicator != null) { final VirtualFile virtualFile = file.getVirtualFile(); if (virtualFile != null) { progressIndicator.setText2(ProjectUtil.calcRelativeToProjectPath(virtualFile, project)); } progressIndicator.setFraction(((double) myFileCount) / totalFiles); } map(file, processed); } }); } }; if (ApplicationManager.getApplication().isDispatchThread()) { if (!progressManager.runProcessWithProgressSynchronously(searchRunner, "Searching usages of " + searchedKey + " ...", true, project)) { return null; } } else { searchRunner.run(); } return FluentIterable.from(processed).transform(toUsageInfo()).toArray(UsageInfo.class); } private Function<PsiElement, UsageInfo> toUsageInfo() { return new Function<PsiElement, UsageInfo>() { @Nullable @Override public UsageInfo apply(@Nullable PsiElement psiElement) { return new UsageInfo(psiElement); } }; } public void map(@NotNull PsiFile file, Set<PsiElement> processed) { Collection<FunctionReference> calls = PsiTreeUtil.collectElementsOfType(file, FunctionReference.class); for (FunctionReference call : calls) { if (call.getParameters().length > 0 && (call.getName().equals("t") || call.getText().startsWith("I18n::labels"))) { addKey(processed, call.getParameters()[0]); } } } private void addKey(Set<PsiElement> map, PsiElement parameter) { String key = PsiUtils.getContent(parameter); if (key != null && (searchedKey.equals(key) || TranslationUtils.isParent(key, searchedKey))) { map.add(parameter); } else if (PlatformPatterns.psiElement(PhpElementTypes.CONCATENATION_EXPRESSION).accepts(parameter)) { BinaryExpression binaryExpression = (BinaryExpression) parameter; addKey(map, binaryExpression.getLeftOperand()); } } }
Show all usages of a translation key #21: use index
src/com/github/letsdrink/intellijplugin/TranslationUsagesFinder.java
Show all usages of a translation key #21: use index
Java
mit
9a753f5a023b6e63daf3fc2f606250f0e7594bc2
0
TulevaEE/onboarding-service,TulevaEE/onboarding-service,TulevaEE/onboarding-service,TulevaEE/onboarding-service
package ee.tuleva.onboarding.mandate.application; import static ee.tuleva.onboarding.currency.Currency.EUR; import ee.tuleva.onboarding.account.CashFlowService; import ee.tuleva.onboarding.auth.principal.Person; import ee.tuleva.onboarding.epis.cashflows.CashFlow; import ee.tuleva.onboarding.epis.mandate.ApplicationStatus; import ee.tuleva.onboarding.fund.ApiFundResponse; import ee.tuleva.onboarding.fund.FundRepository; import ee.tuleva.onboarding.locale.LocaleService; import ee.tuleva.onboarding.payment.Payment; import ee.tuleva.onboarding.payment.PaymentService; import java.math.BigDecimal; import java.math.RoundingMode; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.TreeMap; import java.util.function.Predicate; import java.util.stream.Stream; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.springframework.stereotype.Service; @Service @RequiredArgsConstructor @Slf4j public class PaymentApplicationService { private static final String TULEVA_3RD_PILLAR_FUND_ISIN = "EE3600001707"; private static final Duration GRACE_PERIOD = Duration.ofMinutes(30); private final PaymentService paymentService; private final CashFlowService cashFlowService; private final FundRepository fundRepository; private final LocaleService localeService; public List<Application<PaymentApplicationDetails>> getPaymentApplications(Person person) { val payments = paymentService.getPayments(person); val cashFlowStatement = cashFlowService.getCashFlowStatement(person); val locale = localeService.getCurrentLocale(); val fund = fundRepository.findByIsin(TULEVA_3RD_PILLAR_FUND_ISIN); val apiFund = new ApiFundResponse(fund, locale); val applications = new ArrayList<Application<PaymentApplicationDetails>>(); val linkedCashFlow = getLinkedCashFlow(payments, cashFlowStatement.getTransactions()); log.info("Linked cash flow: {}", linkedCashFlow); for (val entry : linkedCashFlow.entrySet()) { val payment = entry.getKey(); val linkedCash = entry.getValue(); if (linkedCash.isEmpty() || !cashIsBalanced(linkedCash)) { if (hasRefund(linkedCash)) { log.info("Payment {} has a refund", payment.getId()); applications.add(createApplication(payment, apiFund, ApplicationStatus.FAILED)); } else { log.info("Cash is not balanced or no cash entries for {}", payment.getId()); applications.add(createApplication(payment, apiFund, ApplicationStatus.PENDING)); } } else if (cashIsBalanced(linkedCash)) { if (hasTulevaContribution(linkedCash)) { log.info("Payment {} has Tuleva fund contribution, marking as complete", payment.getId()); applications.add(createApplication(payment, apiFund, ApplicationStatus.COMPLETE)); } else { log.info("Payment {} does not have a Tuleva fund contribution yet", payment.getId()); applications.add(createApplication(payment, apiFund, ApplicationStatus.PENDING)); } } } return applications; } private boolean hasRefund(List<CashFlow> linkedCash) { return linkedCash.stream().anyMatch(CashFlow::isRefund); } private boolean hasTulevaContribution(List<CashFlow> linkedCash) { return linkedCash.stream() .filter(it -> Objects.equals(it.getIsin(), TULEVA_3RD_PILLAR_FUND_ISIN)) .anyMatch(CashFlow::isContribution); } private static boolean cashIsBalanced(List<CashFlow> linkedCash) { return linkedCash.stream() .filter(CashFlow::isCash) .map(CashFlow::getAmount) .reduce(BigDecimal.ZERO, BigDecimal::add) .compareTo(BigDecimal.ZERO) == 0; } private Map<Payment, List<CashFlow>> getLinkedCashFlow( List<Payment> payments, List<CashFlow> cashFlow) { val remainingCashFlow = new ArrayList<>(cashFlow.stream().sorted().toList()); val linkedCashFlow = new TreeMap<Payment, List<CashFlow>>(); for (Payment payment : payments.stream().sorted().toList()) { val payIn = linkedPayIn(remainingCashFlow, payment); val refund = linkedRefund(remainingCashFlow, payIn); val payOut = linkedPayOut(remainingCashFlow, payIn); val contribution = linkedContribution(remainingCashFlow, payOut); val paymentCashFlow = Stream.of(payIn, refund, payOut, contribution) .filter(Optional::isPresent) .map(Optional::get) .toList(); log.info("Payment {} has linked cash flow: {}", payment.getId(), paymentCashFlow); paymentCashFlow.forEach(remainingCashFlow::remove); linkedCashFlow.put(payment, paymentCashFlow); } return linkedCashFlow; } private Optional<CashFlow> linkedRefund( List<CashFlow> remainingCashFlow, Optional<CashFlow> payIn) { return payIn.flatMap( cashFlow -> remainingCashFlow.stream() .filter( isRefundOnOrAfterTimeWithAmount( cashFlow.getTime(), cashFlow.getAmount().negate())) .findFirst()); } private Predicate<CashFlow> isRefundOnOrAfterTimeWithAmount(Instant time, BigDecimal amount) { return ((Predicate<CashFlow>) CashFlow::isRefund) .and((cashFlow -> !cashFlow.getTime().isBefore(time))) .and(hasSameAmount(amount)); } private Optional<CashFlow> linkedContribution( List<CashFlow> remainingCashFlow, Optional<CashFlow> payOut) { return payOut.flatMap( cashFlow -> remainingCashFlow.stream() .filter( isContributionOnOrAfterTimeWithAmount( cashFlow.getPriceTime(), cashFlow.getAmount().negate())) .findFirst()); } private Optional<CashFlow> linkedPayOut( List<CashFlow> remainingCashFlow, Optional<CashFlow> payIn) { return payIn.flatMap( cashFlow -> remainingCashFlow.stream() .filter( isCashAfterTimeWithAmount( cashFlow.getPriceTime(), cashFlow.getAmount().negate())) .findFirst()); } private Optional<CashFlow> linkedPayIn(List<CashFlow> remainingCashFlow, Payment payment) { return remainingCashFlow.stream() .filter(isCashAfterGraceTimeWithAmount(payment.getCreatedTime(), payment.getAmount())) .findFirst(); } private Predicate<CashFlow> isCashAfterGraceTimeWithAmount(Instant time, BigDecimal amount) { return ((Predicate<CashFlow>) CashFlow::isCash) .and(isAfterTimeWithGrace(time)) .and(hasSameAmount(amount)); } private Predicate<CashFlow> isCashAfterTimeWithAmount(Instant time, BigDecimal amount) { return ((Predicate<CashFlow>) CashFlow::isCash) .and((cashFlow -> !cashFlow.getTime().isBefore(time))) .and(hasSameAmount(amount)); } private Predicate<CashFlow> isContributionOnOrAfterTimeWithAmount( Instant time, BigDecimal amount) { return ((Predicate<CashFlow>) CashFlow::isContribution) .and((cashFlow -> !cashFlow.getTime().isBefore(time))) .and(hasSameAmount(amount)); } private Predicate<CashFlow> hasSameAmount(BigDecimal amount) { return (cashFlow) -> cashFlow .getAmount() .setScale(2, RoundingMode.HALF_UP) .subtract(amount) .abs() .compareTo(new BigDecimal("0.01")) <= 0; } private Predicate<CashFlow> isAfterTimeWithGrace(Instant time) { return (cashFlow) -> cashFlow.isAfter(time.minus(GRACE_PERIOD)); } private Application<PaymentApplicationDetails> createApplication( Payment payment, ApiFundResponse apiFund, ApplicationStatus status) { return Application.<PaymentApplicationDetails>builder() .id(payment.getId()) .creationTime(payment.getCreatedTime()) .status(status) .details( PaymentApplicationDetails.builder() .amount(payment.getAmount()) .currency(EUR) .targetFund(apiFund) .build()) .build(); } }
src/main/java/ee/tuleva/onboarding/mandate/application/PaymentApplicationService.java
package ee.tuleva.onboarding.mandate.application; import static ee.tuleva.onboarding.currency.Currency.EUR; import ee.tuleva.onboarding.account.CashFlowService; import ee.tuleva.onboarding.auth.principal.Person; import ee.tuleva.onboarding.epis.cashflows.CashFlow; import ee.tuleva.onboarding.epis.mandate.ApplicationStatus; import ee.tuleva.onboarding.fund.ApiFundResponse; import ee.tuleva.onboarding.fund.FundRepository; import ee.tuleva.onboarding.locale.LocaleService; import ee.tuleva.onboarding.payment.Payment; import ee.tuleva.onboarding.payment.PaymentService; import java.math.BigDecimal; import java.math.RoundingMode; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.TreeMap; import java.util.function.Predicate; import java.util.stream.Stream; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.springframework.stereotype.Service; @Service @RequiredArgsConstructor @Slf4j public class PaymentApplicationService { private static final String TULEVA_3RD_PILLAR_FUND_ISIN = "EE3600001707"; private static final Duration GRACE_PERIOD = Duration.ofMinutes(30); private final PaymentService paymentService; private final CashFlowService cashFlowService; private final FundRepository fundRepository; private final LocaleService localeService; public List<Application<PaymentApplicationDetails>> getPaymentApplications(Person person) { val payments = paymentService.getPayments(person); val cashFlowStatement = cashFlowService.getCashFlowStatement(person); val locale = localeService.getCurrentLocale(); val fund = fundRepository.findByIsin(TULEVA_3RD_PILLAR_FUND_ISIN); val apiFund = new ApiFundResponse(fund, locale); val applications = new ArrayList<Application<PaymentApplicationDetails>>(); log.info("All cash flow transactions: {}", cashFlowStatement.getTransactions()); val linkedCashFlow = getLinkedCashFlow(payments, cashFlowStatement.getTransactions()); log.info("Linked cash flow: {}", linkedCashFlow); for (val entry : linkedCashFlow.entrySet()) { val payment = entry.getKey(); val linkedCash = entry.getValue(); if (linkedCash.isEmpty() || !cashIsBalanced(linkedCash)) { if (hasRefund(linkedCash)) { log.info("Payment {} has a refund", payment.getId()); applications.add(createApplication(payment, apiFund, ApplicationStatus.FAILED)); } else { log.info("Cash is not balanced or no cash entries for {}", payment.getId()); applications.add(createApplication(payment, apiFund, ApplicationStatus.PENDING)); } } else if (cashIsBalanced(linkedCash)) { if (hasTulevaContribution(linkedCash)) { log.info("Payment {} has Tuleva fund contribution, marking as complete", payment.getId()); applications.add(createApplication(payment, apiFund, ApplicationStatus.COMPLETE)); } else { log.info("Payment {} does not have a Tuleva fund contribution yet", payment.getId()); applications.add(createApplication(payment, apiFund, ApplicationStatus.PENDING)); } } } return applications; } private boolean hasRefund(List<CashFlow> linkedCash) { return linkedCash.stream().anyMatch(CashFlow::isRefund); } private boolean hasTulevaContribution(List<CashFlow> linkedCash) { return linkedCash.stream() .filter(it -> Objects.equals(it.getIsin(), TULEVA_3RD_PILLAR_FUND_ISIN)) .anyMatch(CashFlow::isContribution); } private static boolean cashIsBalanced(List<CashFlow> linkedCash) { return linkedCash.stream() .filter(CashFlow::isCash) .map(CashFlow::getAmount) .reduce(BigDecimal.ZERO, BigDecimal::add) .compareTo(BigDecimal.ZERO) == 0; } private Map<Payment, List<CashFlow>> getLinkedCashFlow( List<Payment> payments, List<CashFlow> cashFlow) { val remainingCashFlow = new ArrayList<>(cashFlow.stream().sorted().toList()); val linkedCashFlow = new TreeMap<Payment, List<CashFlow>>(); for (Payment payment : payments.stream().sorted().toList()) { val payIn = linkedPayIn(remainingCashFlow, payment); val refund = linkedRefund(remainingCashFlow, payIn); val payOut = linkedPayOut(remainingCashFlow, payIn); val contribution = linkedContribution(remainingCashFlow, payOut); val paymentCashFlow = Stream.of(payIn, refund, payOut, contribution) .filter(Optional::isPresent) .map(Optional::get) .toList(); log.info("Payment {} has linked cash flow: {}", payment.getId(), paymentCashFlow); paymentCashFlow.forEach(remainingCashFlow::remove); linkedCashFlow.put(payment, paymentCashFlow); } return linkedCashFlow; } private Optional<CashFlow> linkedRefund( List<CashFlow> remainingCashFlow, Optional<CashFlow> payIn) { return payIn.flatMap( cashFlow -> remainingCashFlow.stream() .filter( isRefundOnOrAfterTimeWithAmount( cashFlow.getTime(), cashFlow.getAmount().negate())) .findFirst()); } private Predicate<CashFlow> isRefundOnOrAfterTimeWithAmount(Instant time, BigDecimal amount) { return ((Predicate<CashFlow>) CashFlow::isRefund) .and((cashFlow -> !cashFlow.getTime().isBefore(time))) .and(hasSameAmount(amount)); } private Optional<CashFlow> linkedContribution( List<CashFlow> remainingCashFlow, Optional<CashFlow> payOut) { return payOut.flatMap( cashFlow -> remainingCashFlow.stream() .filter( isContributionOnOrAfterTimeWithAmount( cashFlow.getPriceTime(), cashFlow.getAmount().negate())) .findFirst()); } private Optional<CashFlow> linkedPayOut( List<CashFlow> remainingCashFlow, Optional<CashFlow> payIn) { return payIn.flatMap( cashFlow -> remainingCashFlow.stream() .filter( isCashAfterTimeWithAmount( cashFlow.getPriceTime(), cashFlow.getAmount().negate())) .findFirst()); } private Optional<CashFlow> linkedPayIn(List<CashFlow> remainingCashFlow, Payment payment) { return remainingCashFlow.stream() .filter(isCashAfterGraceTimeWithAmount(payment.getCreatedTime(), payment.getAmount())) .findFirst(); } private Predicate<CashFlow> isCashAfterGraceTimeWithAmount(Instant time, BigDecimal amount) { return ((Predicate<CashFlow>) CashFlow::isCash) .and(isAfterTimeWithGrace(time)) .and(hasSameAmount(amount)); } private Predicate<CashFlow> isCashAfterTimeWithAmount(Instant time, BigDecimal amount) { return ((Predicate<CashFlow>) CashFlow::isCash) .and((cashFlow -> !cashFlow.getTime().isBefore(time))) .and(hasSameAmount(amount)); } private Predicate<CashFlow> isContributionOnOrAfterTimeWithAmount( Instant time, BigDecimal amount) { return ((Predicate<CashFlow>) CashFlow::isContribution) .and((cashFlow -> !cashFlow.getTime().isBefore(time))) .and(hasSameAmount(amount)); } private Predicate<CashFlow> hasSameAmount(BigDecimal amount) { return (cashFlow) -> cashFlow .getAmount() .setScale(2, RoundingMode.HALF_UP) .subtract(amount) .abs() .compareTo(new BigDecimal("0.01")) <= 0; } private Predicate<CashFlow> isAfterTimeWithGrace(Instant time) { return (cashFlow) -> cashFlow.isAfter(time.minus(GRACE_PERIOD)); } private Application<PaymentApplicationDetails> createApplication( Payment payment, ApiFundResponse apiFund, ApplicationStatus status) { return Application.<PaymentApplicationDetails>builder() .id(payment.getId()) .creationTime(payment.getCreatedTime()) .status(status) .details( PaymentApplicationDetails.builder() .amount(payment.getAmount()) .currency(EUR) .targetFund(apiFund) .build()) .build(); } }
Don't log entire cashflow
src/main/java/ee/tuleva/onboarding/mandate/application/PaymentApplicationService.java
Don't log entire cashflow
Java
mit
bbbda35333b232122f76b9010ca9b752591a1495
0
hendrixjoseph/FamilyTree,hendrixjoseph/FamilyTree,hendrixjoseph/FamilyTree
/* * The MIT License (MIT) * * View the full license at: * https://github.com/hendrixjoseph/FamilyTree/blob/master/LICENSE.md * * Copyright (c) 2015 Joseph Hendrix * * Hosted on GitHub at https://github.com/hendrixjoseph/FamilyTree * */ package edu.wright.hendrix11.familyTree.bean.place; import edu.wright.hendrix11.familyTree.bean.AbstractBean; import edu.wright.hendrix11.familyTree.dataBean.DataBean; import edu.wright.hendrix11.familyTree.entity.place.County; import javax.annotation.PostConstruct; import javax.ejb.EJB; import javax.faces.view.ViewScoped; import javax.inject.Named; import java.io.Serializable; /** * @author Joe Hendrix */ @Named @ViewScoped public class CountyBean extends AbstractBean<County> implements Serializable { @EJB private DataBean<County, Integer> countyDataBean; /** * */ @Override @PostConstruct protected void initialize() { countyDataBean.initialize(County.class); super.initialize(countyDataBean); } }
src/main/java/edu/wright/hendrix11/familyTree/bean/place/CountyBean.java
/* * The MIT License (MIT) * * View the full license at: * https://github.com/hendrixjoseph/FamilyTree/blob/master/LICENSE.md * * Copyright (c) 2015 Joseph Hendrix * * Hosted on GitHub at https://github.com/hendrixjoseph/FamilyTree * */ package edu.wright.hendrix11.familyTree.bean.place; import edu.wright.hendrix11.familyTree.bean.AbstractBean; import edu.wright.hendrix11.familyTree.dataBean.DataBean; import edu.wright.hendrix11.familyTree.entity.place.County; import javax.annotation.PostConstruct; import javax.ejb.EJB; import javax.faces.view.ViewScoped; import javax.inject.Named; import java.io.Serializable; /** * @author Joe */ @Named @ViewScoped public class CountyBean extends AbstractBean<County> implements Serializable { @EJB private DataBean<County, Integer> countyDataBean; /** * */ @Override @PostConstruct protected void initialize() { countyDataBean.initialize(County.class); super.initialize(countyDataBean); } }
Update CountyBean.java
src/main/java/edu/wright/hendrix11/familyTree/bean/place/CountyBean.java
Update CountyBean.java
Java
mit
c517e917d97d0d245d34fa9bad8e43667425647a
0
mizool/mizool
/* * Copyright 2017-2019 incub8 Software Labs GmbH * Copyright 2017-2019 protel Hotelsoftware GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.mizool.core; import java.util.Optional; import java.util.function.Function; import java.util.stream.Stream; import lombok.NonNull; import lombok.experimental.UtilityClass; import com.github.mizool.core.exception.DataInconsistencyException; import com.github.mizool.core.exception.ObjectNotFoundException; import com.github.mizool.core.exception.UnprocessableEntityException; @UtilityClass public class Optionals { /** * Used when a user directly requests an object, resulting in an ObjectNotFoundException if it does not exist. */ public <T> T unwrapUserRequestedObject(@NonNull Optional<T> wrapped, @NonNull Class<T> classOfT) { return wrapped.orElseThrow(() -> new ObjectNotFoundException(classOfT.getSimpleName() + " not found")); } /** * Used when a user directly requests an object, resulting in an ObjectNotFoundException if it does not exist. */ public <T> Function<Optional<T>, T> unwrapUserRequestedObject(@NonNull Class<T> classOfT) { return optional -> unwrapUserRequestedObject(optional, classOfT); } /** * Used when a user directly requests an object, resulting in an ObjectNotFoundException if it does not exist. * * @deprecated Use {@link Optionals#unwrapUserRequestedObject(Optional, Class)} instead. */ @Deprecated public <T> T unwrapUserRequestedObject( @NonNull com.google.common.base.Optional<T> wrapped, @NonNull Class<T> classOfT) { if (!wrapped.isPresent()) { throw new ObjectNotFoundException(classOfT.getSimpleName() + " not found"); } return wrapped.get(); } /** * Used when an object can be reasonably expected to exist, resulting in a DataInconsistencyException if it does not * exist. */ public <T> T unwrapRequiredObject(@NonNull Optional<T> wrapped, @NonNull Class<T> classOfT) { return wrapped.orElseThrow(() -> new DataInconsistencyException(classOfT.getSimpleName() + " not found")); } /** * Used when an object can be reasonably expected to exist, resulting in a DataInconsistencyException if it does not * exist. */ public <T> Function<Optional<T>, T> unwrapRequiredObject(@NonNull Class<T> classOfT) { return optional -> unwrapRequiredObject(optional, classOfT); } /** * Used when an object can be reasonably expected to exist, resulting in a DataInconsistencyException if it does not * exist. * * @deprecated Use {@link Optionals#unwrapRequiredObject(Optional, Class)} instead. */ @Deprecated public <T> T unwrapRequiredObject(@NonNull com.google.common.base.Optional<T> wrapped, @NonNull Class<T> classOfT) { if (!wrapped.isPresent()) { throw new DataInconsistencyException(classOfT.getSimpleName() + " not found"); } return wrapped.get(); } /** * Used when a user-submitted entity refers to another object, resulting in a UnprocessableEntityException if that * object does not exist. */ public <T> T unwrapUserMentionedObject(@NonNull Optional<T> wrapped, @NonNull Class<T> classOfT) { return wrapped.orElseThrow(() -> new UnprocessableEntityException(classOfT.getSimpleName() + " not found")); } /** * Used when a user-submitted entity refers to another object, resulting in a UnprocessableEntityException if that * object does not exist. */ public <T> Function<Optional<T>, T> unwrapUserMentionedObject(@NonNull Class<T> classOfT) { return optional -> unwrapUserMentionedObject(optional, classOfT); } /** * Used when a user-submitted entity refers to another object, resulting in a UnprocessableEntityException if that * object does not exist. * * @deprecated Use {@link Optionals#unwrapUserMentionedObject(Optional, Class)} instead. */ @Deprecated public <T> T unwrapUserMentionedObject( @NonNull com.google.common.base.Optional<T> wrapped, @NonNull Class<T> classOfT) { if (!wrapped.isPresent()) { throw new UnprocessableEntityException(classOfT.getSimpleName() + " not found"); } return wrapped.get(); } /** * Used in streams to {@linkplain Stream#flatMap(Function) flat-map} each {@link Optional} to its value if * present.<br> * <br> * This method is intended to be used as follows: * <pre>{@code * .flatMap(Optionals::streamPresentValue) * }</pre> * Using this method is equivalent of chaining {@link Optional#isPresent()} and {@link Optional#get()} like this: * <pre>{@code * .filter(Optional::isPresent) * .map(Optional::get) * }</pre> */ public <T> Stream<T> streamPresentValue(@NonNull Optional<T> optional) { return optional.map(Stream::of).orElseGet(Stream::empty); } }
core/src/main/java/com/github/mizool/core/Optionals.java
/* * Copyright 2017-2019 incub8 Software Labs GmbH * Copyright 2017-2019 protel Hotelsoftware GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.mizool.core; import java.util.Optional; import java.util.function.Function; import java.util.stream.Stream; import lombok.NonNull; import lombok.experimental.UtilityClass; import com.github.mizool.core.exception.DataInconsistencyException; import com.github.mizool.core.exception.ObjectNotFoundException; import com.github.mizool.core.exception.UnprocessableEntityException; @UtilityClass public class Optionals { /** * Used when a user directly requests an object, resulting in an ObjectNotFoundException if it does not exist. */ public <T> T unwrapUserRequestedObject(@NonNull Optional<T> wrapped, @NonNull Class<T> classOfT) { return wrapped.orElseThrow(() -> new ObjectNotFoundException(classOfT.getSimpleName() + " not found")); } /** * Used when a user directly requests an object, resulting in an ObjectNotFoundException if it does not exist. * * @deprecated Use {@link Optionals#unwrapUserRequestedObject(Optional, Class)} instead. */ @Deprecated public <T> T unwrapUserRequestedObject( @NonNull com.google.common.base.Optional<T> wrapped, @NonNull Class<T> classOfT) { if (!wrapped.isPresent()) { throw new ObjectNotFoundException(classOfT.getSimpleName() + " not found"); } return wrapped.get(); } /** * Used when an object can be reasonably expected to exist, resulting in a DataInconsistencyException if it does not * exist. */ public <T> T unwrapRequiredObject(@NonNull Optional<T> wrapped, @NonNull Class<T> classOfT) { return wrapped.orElseThrow(() -> new DataInconsistencyException(classOfT.getSimpleName() + " not found")); } /** * Used when an object can be reasonably expected to exist, resulting in a DataInconsistencyException if it does not * exist. * * @deprecated Use {@link Optionals#unwrapRequiredObject(Optional, Class)} instead. */ @Deprecated public <T> T unwrapRequiredObject(@NonNull com.google.common.base.Optional<T> wrapped, @NonNull Class<T> classOfT) { if (!wrapped.isPresent()) { throw new DataInconsistencyException(classOfT.getSimpleName() + " not found"); } return wrapped.get(); } /** * Used when a user-submitted entity refers to another object, resulting in a UnprocessableEntityException if that * object does not exist. */ public <T> T unwrapUserMentionedObject(@NonNull Optional<T> wrapped, @NonNull Class<T> classOfT) { return wrapped.orElseThrow(() -> new UnprocessableEntityException(classOfT.getSimpleName() + " not found")); } /** * Used when a user-submitted entity refers to another object, resulting in a UnprocessableEntityException if that * object does not exist. * * @deprecated Use {@link Optionals#unwrapUserMentionedObject(Optional, Class)} instead. */ @Deprecated public <T> T unwrapUserMentionedObject( @NonNull com.google.common.base.Optional<T> wrapped, @NonNull Class<T> classOfT) { if (!wrapped.isPresent()) { throw new UnprocessableEntityException(classOfT.getSimpleName() + " not found"); } return wrapped.get(); } /** * Used in streams to {@linkplain Stream#flatMap(Function) flat-map} each {@link Optional} to its value if * present.<br> * <br> * This method is intended to be used as follows: * <pre>{@code * .flatMap(Optionals::streamPresentValue) * }</pre> * Using this method is equivalent of chaining {@link Optional#isPresent()} and {@link Optional#get()} like this: * <pre>{@code * .filter(Optional::isPresent) * .map(Optional::get) * }</pre> */ public <T> Stream<T> streamPresentValue(@NonNull Optional<T> optional) { return optional.map(Stream::of).orElseGet(Stream::empty); } }
added convenience method for streams (MJX-1998)
core/src/main/java/com/github/mizool/core/Optionals.java
added convenience method for streams (MJX-1998)
Java
mit
52f5ab85c08e7065ea5e759bafd596e12b5befdc
0
avalax/FitBuddy
package de.avalax.fitbuddy.presentation; import android.app.Application; import javax.inject.Singleton; import dagger.Component; import de.avalax.fitbuddy.presentation.edit.exercise.EditExerciseFragment; import de.avalax.fitbuddy.presentation.edit.workout.EditWorkoutActivity; import de.avalax.fitbuddy.presentation.edit.workout.ExerciseListFragment; import de.avalax.fitbuddy.presentation.summary.FinishedWorkoutDetailFragment; import de.avalax.fitbuddy.presentation.summary.FinishedWorkoutListFragment; import de.avalax.fitbuddy.presentation.welcome_screen.WorkoutListFragment; import de.avalax.fitbuddy.presentation.workout.ExerciseFragment; public class FitbuddyApplication extends Application { public static final int ADD_WORKOUT = 1; public static final int EDIT_WORKOUT = 2; public static final int ADD_EXERCISE = 3; public static final int EDIT_EXERCISE = 4; public static final int ADD_SET = 5; public static final int EDIT_SET = 6; public static final int FINISHED_WORKOUT_DETAILS = 7; public static final int EDIT_REPS = 8; public static final int EDIT_WEIGHT = 9; private ApplicationComponent component; @Override public void onCreate() { super.onCreate(); component = createComponent(); } protected ApplicationComponent createComponent() { return DaggerFitbuddyApplication_ApplicationComponent.builder() .fitbuddyModule(new FitbuddyModule(this)) .build(); } public ApplicationComponent getComponent() { return component; } @Singleton @Component(modules = FitbuddyModule.class) public interface ApplicationComponent { void inject(EditWorkoutActivity editWorkoutActivity); void inject(ExerciseFragment exerciseFragment); void inject(FinishedWorkoutListFragment finishedWorkoutListFragment); void inject(WorkoutListFragment workoutListFragment); void inject(MainActivity mainActivity); void inject(ExerciseListFragment exerciseListFragment); void inject(EditExerciseFragment setListFragment); void inject(FinishedWorkoutDetailFragment finishedWorkoutDetailFragment); } }
src/main/java/de/avalax/fitbuddy/presentation/FitbuddyApplication.java
package de.avalax.fitbuddy.presentation; import android.app.Application; import javax.inject.Singleton; import dagger.Component; import de.avalax.fitbuddy.presentation.edit.exercise.EditExerciseFragment; import de.avalax.fitbuddy.presentation.edit.set.EditSetActivity; import de.avalax.fitbuddy.presentation.edit.set.EditSetFragment; import de.avalax.fitbuddy.presentation.edit.workout.EditWorkoutActivity; import de.avalax.fitbuddy.presentation.edit.workout.ExerciseListFragment; import de.avalax.fitbuddy.presentation.summary.FinishedWorkoutDetailFragment; import de.avalax.fitbuddy.presentation.summary.FinishedWorkoutListFragment; import de.avalax.fitbuddy.presentation.welcome_screen.WorkoutListFragment; import de.avalax.fitbuddy.presentation.workout.ExerciseFragment; public class FitbuddyApplication extends Application { public static final int ADD_WORKOUT = 1; public static final int EDIT_WORKOUT = 2; public static final int ADD_EXERCISE = 3; public static final int EDIT_EXERCISE = 4; public static final int ADD_SET = 5; public static final int EDIT_SET = 6; public static final int FINISHED_WORKOUT_DETAILS = 7; public static final int EDIT_REPS = 8; public static final int EDIT_WEIGHT = 9; private ApplicationComponent component; @Override public void onCreate() { super.onCreate(); component = createComponent(); } protected ApplicationComponent createComponent() { return DaggerFitbuddyApplication_ApplicationComponent.builder() .fitbuddyModule(new FitbuddyModule(this)) .build(); } public ApplicationComponent getComponent() { return component; } @Singleton @Component(modules = FitbuddyModule.class) public interface ApplicationComponent { void inject(EditWorkoutActivity editWorkoutActivity); void inject(ExerciseFragment exerciseFragment); void inject(FinishedWorkoutListFragment finishedWorkoutListFragment); void inject(WorkoutListFragment workoutListFragment); void inject(MainActivity mainActivity); void inject(ExerciseListFragment exerciseListFragment); void inject(EditExerciseFragment setListFragment); void inject(FinishedWorkoutDetailFragment finishedWorkoutDetailFragment); } }
removed unused imports
src/main/java/de/avalax/fitbuddy/presentation/FitbuddyApplication.java
removed unused imports
Java
agpl-3.0
f3da5aa8ad40a3dd129e33c99d50964a8c89f307
0
USAID-DELIVER-PROJECT/elmis,kelvinmbwilo/vims,USAID-DELIVER-PROJECT/elmis,kelvinmbwilo/vims,kelvinmbwilo/vims,vimsvarcode/elmis,vimsvarcode/elmis,USAID-DELIVER-PROJECT/elmis,OpenLMIS/open-lmis,USAID-DELIVER-PROJECT/elmis,joshzamor/open-lmis,jasolangi/jasolangi.github.io,kelvinmbwilo/vims,jasolangi/jasolangi.github.io,jasolangi/jasolangi.github.io,OpenLMIS/open-lmis,vimsvarcode/elmis,vimsvarcode/elmis,joshzamor/open-lmis,OpenLMIS/open-lmis,vimsvarcode/elmis,joshzamor/open-lmis,OpenLMIS/open-lmis,joshzamor/open-lmis
/* * This program is part of the OpenLMIS logistics management information system platform software. * Copyright © 2013 VillageReach * * This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. *   * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Affero General Public License for more details. * You should have received a copy of the GNU Affero General Public License along with this program.  If not, see http://www.gnu.org/licenses.  For additional information contact info@OpenLMIS.org.  */ package org.openlmis.pageobjects; import org.openlmis.UiUtils.TestWebDriver; import org.openlmis.pageobjects.edi.ConfigureEDIPage; import org.openlmis.pageobjects.edi.ConvertOrderPage; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.PageFactory; import org.openqa.selenium.support.pagefactory.AjaxElementLocatorFactory; import java.io.IOException; import static com.thoughtworks.selenium.SeleneseTestBase.assertTrue; import static com.thoughtworks.selenium.SeleneseTestNgHelper.assertEquals; import static org.openqa.selenium.support.How.*; public class HomePage extends Page { @FindBy(how = LINK_TEXT, using = "Logout") private static WebElement logoutLink = null; @FindBy(how = XPATH, using = "//div[@class='user-info ng-scope']/strong") private static WebElement loggedInUserLabel = null; @FindBy(how = ID, using = "requisitions-menu") private static WebElement requisitionMenuItem = null; @FindBy(how = ID, using = "distributions-menu") private static WebElement distributionsMenuItem = null; @FindBy(how = XPATH, using = "//a[contains(text(),'Program Product ISA')]") private static WebElement programProductISAMenuItem = null; @FindBy(how = XPATH, using = "//a[contains(text(),'Home')]") private static WebElement homeMenuItem = null; @FindBy(how = ID, using = "reports-menu") private static WebElement reportMenuItem = null; @FindBy(how = XPATH, using = "//h2/span[contains(text(),'Reports')]") private static WebElement reportsTitle = null; @FindBy(how = ID, using = "orders-menu") private static WebElement ordersMenuItem = null; @FindBy(how = ID, using = "approveRnr") private static WebElement approveLink = null; @FindBy(how = ID, using = "administration-menu") private static WebElement AdministrationMenuItem = null; @FindBy(how = ID, using = "manage-option") private static WebElement manageFacilityMenuItem = null; @FindBy(how = ID, using = "convertToOrderRnr") private static WebElement convertToOrderMenuItem = null; @FindBy(how = ID, using = "manage-distribution") private static WebElement manageDistributionMenuItem = null; @FindBy(how = XPATH, using = "//a[contains(text(),'Distributions')]") private static WebElement offlineDistributions = null; @FindBy(how = XPATH, using = "//a[contains(text(),'View Orders')]") private static WebElement viewOrdersMenuItem = null; @FindBy(how = ID, using = "viewRnr") private static WebElement viewRequisitionMenuItem = null; @FindBy(how = XPATH, using = "//h2[contains(text(),'View Requisitions')]") private static WebElement viewRequisitionHeader = null; @FindBy(how = XPATH, using = "//h2[contains(text(),'Convert Requisitions to Order')]") private static WebElement convertToOrderHeader = null; @FindBy(how = XPATH, using = "//h2[contains(text(),'Manage a Distribution')]") private static WebElement manageDistributionHeader = null; @FindBy(how = XPATH, using = "//h2[contains(text(),'View Orders')]") private static WebElement viewOrdersHeader = null; @FindBy(how = ID, using = "add-new-facility") private static WebElement createFacility = null; @FindBy(how = XPATH, using = "//div[@class='ng-scope']/div[@ng-hide='facility.id']/h2") private static WebElement facilityHeader = null; @FindBy(how = LINK_TEXT, using = "Configure") private static WebElement TemplateConfigTab = null; @FindBy(how = LINK_TEXT, using = "R & R Template") private static WebElement RnRTemplateConfigTab = null; @FindBy(how = LINK_TEXT, using = "EDI File") private static WebElement ediFileTab = null; @FindBy(how = LINK_TEXT, using = "Regimen Template") private static WebElement RegimenTemplateConfigTab = null; @FindBy(how = XPATH, using = "//h2[contains(text(),'Regimen Template')]") private static WebElement RegimenTemplateHeader = null; @FindBy(how = ID, using = "requisitions-menu") private static WebElement requisitionsLink = null; @FindBy(how = XPATH, using = "//div[@class='submenu']") private static WebElement SubMenuItem = null; @FindBy(how = ID, using = "createRnr") private static WebElement createLink = null; @FindBy(how = XPATH, using = "//input[@id='myFacilityRnr']") private static WebElement myFacilityRadioButton = null; @FindBy(how = LINK_TEXT, using = "Manage") private static WebElement manageLink = null; @FindBy(how = XPATH, using = "//a[contains(text(),'Upload')]") private static WebElement uploadLink = null; @FindBy(how = XPATH, using = "//input[@ng-click='initRnr(row.entity)']") private static WebElement proceedButton = null; @FindBy(how = ID, using = "facility-tab") private static WebElement facilitiesTab = null; @FindBy(how = ID, using = "role-tab") private static WebElement rolesTab = null; @FindBy(how = ID, using = "schedule-tab") private static WebElement schedulesTab = null; @FindBy(how = ID, using = "user-tab") private static WebElement usersTab = null; @FindBy(how = XPATH, using = "//div[@class='ngCellText ng-scope col1 colt1']/span") private static WebElement startDate = null; @FindBy(how = XPATH, using = "//div[@class='ngCellText ng-scope col2 colt2']/span") private static WebElement endDate = null; @FindBy(how = ID, using = "saveSuccessMsgDiv") private static WebElement errorMsg = null; @FindBy(how = ID, using = "program") private static WebElement selectProgramSelectBox = null; @FindBy(how = ID, using = "rnrType") private static WebElement rnrTypeSelectBox = null; @FindBy(how = XPATH, using = "//div/div/div[1]/div[2]/div/span") private static WebElement firstPeriodLabel = null; @FindBy(how = XPATH, using = "//input[@id='supervisedFacilityRnr']") private static WebElement supervisedFacilityRadioButton = null; @FindBy(how = XPATH, using = "//select[@id='programListSupervisedFacility']") private static WebElement ProgramDropDownSupervisedFacility = null; @FindBy(how = ID, using = "facilityList") private static WebElement facilityDropDown = null; @FindBy(how = XPATH, using = "//select[@id='programListMyFacility']") private static WebElement programDropDown = null; @FindBy(how = XPATH, using = "//a[contains(text(),'Manage POD')]") private static WebElement viewManagePODMenuItem = null; @FindBy(how = XPATH, using = "//h2[contains(text(),'Manage Proof of Delivery')]") private static WebElement viewManagePODHeader = null; public HomePage(TestWebDriver driver) throws IOException { super(driver); PageFactory.initElements(new AjaxElementLocatorFactory(TestWebDriver.getDriver(), 10), this); testWebDriver.setImplicitWait(10); } public WebElement getLogoutLink() { return logoutLink; } public LoginPage logout(String baseUrl) throws IOException { testWebDriver.waitForElementToAppear(logoutLink); logoutLink.click(); return new LoginPage(testWebDriver, baseUrl); } public ManageFacilityPage navigateManageFacility() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(manageFacilityMenuItem); testWebDriver.keyPress(manageFacilityMenuItem); return ManageFacilityPage.getInstance(testWebDriver); } public void clickCreateFacilityButton() { testWebDriver.waitForElementToAppear(createFacility); testWebDriver.sleep(1000); testWebDriver.keyPress(createFacility); } public void verifyHeader(String headingToVerify) { testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(facilityHeader); assertEquals(facilityHeader.getText().trim(), headingToVerify); } public void verifyAdminTabs() { testWebDriver.waitForElementToAppear(facilitiesTab); assertTrue(facilitiesTab.isDisplayed()); assertTrue(rolesTab.isDisplayed()); assertTrue(schedulesTab.isDisplayed()); assertTrue(usersTab.isDisplayed()); } public TemplateConfigPage selectProgramToConfigTemplate(String programme) { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(TemplateConfigTab); testWebDriver.keyPress(TemplateConfigTab); testWebDriver.waitForElementToAppear(RnRTemplateConfigTab); testWebDriver.keyPress(RnRTemplateConfigTab); testWebDriver.waitForElementToAppear(testWebDriver.getElementById(programme)); testWebDriver.getElementById(programme).click(); return new TemplateConfigPage(testWebDriver); } public ConfigureEDIPage navigateEdiScreen() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(TemplateConfigTab); testWebDriver.keyPress(TemplateConfigTab); testWebDriver.waitForElementToAppear(ediFileTab); testWebDriver.keyPress(ediFileTab); return new ConfigureEDIPage(testWebDriver); } public RegimenTemplateConfigPage navigateToRegimenConfigTemplate() { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(TemplateConfigTab); testWebDriver.keyPress(TemplateConfigTab); testWebDriver.waitForElementToAppear(RegimenTemplateConfigTab); testWebDriver.keyPress(RegimenTemplateConfigTab); testWebDriver.waitForElementToAppear(RegimenTemplateHeader); return new RegimenTemplateConfigPage(testWebDriver); } public String navigateAndInitiateRnr(String program) throws IOException { navigateRnr(); myFacilityRadioButton.click(); testWebDriver.sleep(2000); testWebDriver.waitForElementToAppear(programDropDown); testWebDriver.selectByVisibleText(programDropDown, program); testWebDriver.waitForElementToAppear(startDate); return (startDate.getText().trim() + " - " + endDate.getText().trim()); } public void navigateInitiateRnRScreenAndSelectingRequiredFields(String program, String type) throws IOException { navigateRnr(); myFacilityRadioButton.click(); testWebDriver.sleep(500); testWebDriver.waitForElementToAppear(programDropDown); testWebDriver.selectByVisibleText(programDropDown, program); testWebDriver.selectByVisibleText(rnrTypeSelectBox, type); testWebDriver.sleep(1000); } public void clickRequisitionSubMenuItem() throws IOException { testWebDriver.waitForElementToAppear(requisitionsLink); testWebDriver.keyPress(requisitionsLink); } public void verifySubMenuItems(String[] expectedSubMenuItem) throws IOException { String[] subMenuItem = SubMenuItem.getText().split("\n"); assertEquals(subMenuItem, expectedSubMenuItem); } public InitiateRnRPage clickProceed() throws IOException { testWebDriver.setImplicitWait(100); testWebDriver.waitForElementToAppear(proceedButton); proceedButton.click(); testWebDriver.sleep(1000); return new InitiateRnRPage(testWebDriver); } public ViewRequisitionPage navigateViewRequisition() throws IOException { testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(requisitionMenuItem); testWebDriver.keyPress(requisitionMenuItem); testWebDriver.waitForElementToAppear(viewRequisitionMenuItem); testWebDriver.keyPress(viewRequisitionMenuItem); testWebDriver.waitForElementToAppear(viewRequisitionHeader); return new ViewRequisitionPage(testWebDriver); } public ReportPage navigateReportScreen() throws IOException { testWebDriver.waitForElementToAppear(reportMenuItem); testWebDriver.keyPress(reportMenuItem); testWebDriver.waitForElementToAppear(reportsTitle); return new ReportPage(testWebDriver); } public ManageFacilityPage navigateSearchFacility() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(manageLink); testWebDriver.keyPress(manageLink); testWebDriver.waitForElementToAppear(facilitiesTab); facilitiesTab.click(); return ManageFacilityPage.getInstance(testWebDriver); } public RolesPage navigateRoleAssignments() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(manageLink); testWebDriver.keyPress(manageLink); testWebDriver.waitForElementToAppear(rolesTab); testWebDriver.keyPress(rolesTab); return new RolesPage(testWebDriver); } public UploadPage navigateUploads() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(uploadLink); uploadLink.click(); return new UploadPage(testWebDriver); } public ManageSchedulePage navigateToSchedule() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(manageLink); testWebDriver.keyPress(manageLink); testWebDriver.waitForElementToAppear(schedulesTab); schedulesTab.click(); return new ManageSchedulePage(testWebDriver); } public UserPage navigateToUser() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(manageLink); testWebDriver.keyPress(manageLink); testWebDriver.waitForElementToAppear(usersTab); usersTab.click(); return new UserPage(testWebDriver); } public ApprovePage navigateToApprove() throws IOException { testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(requisitionMenuItem); testWebDriver.keyPress(requisitionMenuItem); testWebDriver.waitForElementToAppear(approveLink); testWebDriver.keyPress(approveLink); return new ApprovePage(testWebDriver); } public ConvertOrderPage navigateConvertToOrder() throws IOException { testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(requisitionMenuItem); testWebDriver.keyPress(requisitionMenuItem); testWebDriver.waitForElementToAppear(convertToOrderMenuItem); testWebDriver.keyPress(convertToOrderMenuItem); testWebDriver.sleep(5000); testWebDriver.waitForElementToAppear(convertToOrderHeader); return new ConvertOrderPage(testWebDriver); } public DistributionPage navigateToDistributionWhenOnline() throws IOException { testWebDriver.waitForElementToAppear(distributionsMenuItem); testWebDriver.keyPress(distributionsMenuItem); testWebDriver.waitForElementToAppear(manageDistributionMenuItem); testWebDriver.keyPress(manageDistributionMenuItem); testWebDriver.waitForElementToAppear(manageDistributionHeader); return new DistributionPage(testWebDriver); } public DistributionPage navigateOfflineDistribution() throws IOException { testWebDriver.waitForElementToAppear(offlineDistributions); testWebDriver.keyPress(offlineDistributions); testWebDriver.waitForElementToAppear(manageDistributionMenuItem); testWebDriver.keyPress(manageDistributionMenuItem); return new DistributionPage(testWebDriver); } public ProgramProductISAPage navigateProgramProductISA() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(TemplateConfigTab); testWebDriver.keyPress(TemplateConfigTab); testWebDriver.waitForElementToAppear(programProductISAMenuItem); testWebDriver.keyPress(programProductISAMenuItem); testWebDriver.waitForElementToAppear(selectProgramSelectBox); return new ProgramProductISAPage(testWebDriver); } public HomePage navigateHomePage() throws IOException { testWebDriver.waitForElementToAppear(homeMenuItem); testWebDriver.keyPress(homeMenuItem); testWebDriver.sleep(500); return new HomePage(testWebDriver); } public ViewOrdersPage navigateViewOrders() throws IOException { testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(ordersMenuItem); testWebDriver.keyPress(ordersMenuItem); testWebDriver.waitForElementToAppear(viewOrdersMenuItem); testWebDriver.keyPress(viewOrdersMenuItem); testWebDriver.waitForElementToAppear(viewOrdersHeader); return new ViewOrdersPage(testWebDriver); } public String getErrorMessage() { testWebDriver.waitForElementToAppear(errorMsg); return errorMsg.getText().trim(); } public void verifyLoggedInUser(String Username) { testWebDriver.waitForElementToAppear(loggedInUserLabel); assertEquals(loggedInUserLabel.getText(), Username); } public void navigateAndInitiateEmergencyRnr(String program) throws IOException { navigateRnr(); myFacilityRadioButton.click(); testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(programDropDown); testWebDriver.selectByVisibleText(programDropDown, program); testWebDriver.selectByVisibleText(rnrTypeSelectBox, "Emergency"); } public String getFirstPeriod() { return firstPeriodLabel.getText().trim(); } public void navigateRnr() throws IOException { testWebDriver.waitForElementToBeEnabled(requisitionsLink); testWebDriver.keyPress(requisitionsLink); testWebDriver.waitForElementToBeEnabled(createLink); testWebDriver.sleep(1000); testWebDriver.keyPress(createLink); testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(myFacilityRadioButton); } public boolean isHomeMenuTabDisplayed() { return homeMenuItem.isDisplayed(); } public boolean isRequisitionsMenuTabDisplayed() { return requisitionMenuItem.isDisplayed(); } public void navigateAndInitiateRnrForSupervisedFacility(String program) throws IOException { navigateRnr(); supervisedFacilityRadioButton.click(); testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(ProgramDropDownSupervisedFacility); testWebDriver.selectByVisibleText(ProgramDropDownSupervisedFacility, program); testWebDriver.sleep(1000); } public void selectFacilityForSupervisoryNodeRnR(String facilityName) { testWebDriver.waitForElementToAppear(facilityDropDown); testWebDriver.selectByVisibleText(facilityDropDown, facilityName); testWebDriver.sleep(100); } public String getFacilityDropDownList() { return facilityDropDown.getText(); } public String getFacilityDropDownListForViewRequisition() { return testWebDriver.findElement(By.name("selectFacility")).getText(); } public ManagePodPage navigateManagePOD() throws IOException { testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(ordersMenuItem); testWebDriver.keyPress(ordersMenuItem); testWebDriver.waitForElementToAppear(viewManagePODMenuItem); testWebDriver.keyPress(viewManagePODMenuItem); testWebDriver.waitForElementToAppear(viewManagePODHeader); return new ManagePodPage(testWebDriver); } }
test-modules/functional-tests/src/main/java/org/openlmis/pageobjects/HomePage.java
/* * This program is part of the OpenLMIS logistics management information system platform software. * Copyright © 2013 VillageReach * * This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. *   * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Affero General Public License for more details. * You should have received a copy of the GNU Affero General Public License along with this program.  If not, see http://www.gnu.org/licenses.  For additional information contact info@OpenLMIS.org.  */ package org.openlmis.pageobjects; import org.openlmis.UiUtils.TestWebDriver; import org.openlmis.pageobjects.edi.ConfigureEDIPage; import org.openlmis.pageobjects.edi.ConvertOrderPage; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.PageFactory; import org.openqa.selenium.support.pagefactory.AjaxElementLocatorFactory; import java.io.IOException; import static com.thoughtworks.selenium.SeleneseTestBase.assertTrue; import static com.thoughtworks.selenium.SeleneseTestNgHelper.assertEquals; import static org.openqa.selenium.support.How.*; public class HomePage extends Page { @FindBy(how = LINK_TEXT, using = "Logout") private static WebElement logoutLink = null; @FindBy(how = XPATH, using = "//div[@class='user-info ng-scope']/strong") private static WebElement loggedInUserLabel = null; @FindBy(how = ID, using = "requisitions-menu") private static WebElement requisitionMenuItem = null; @FindBy(how = ID, using = "distributions-menu") private static WebElement distributionsMenuItem = null; @FindBy(how = XPATH, using = "//a[contains(text(),'Program Product ISA')]") private static WebElement programProductISAMenuItem = null; @FindBy(how = XPATH, using = "//a[contains(text(),'Home')]") private static WebElement homeMenuItem = null; @FindBy(how = ID, using = "reports-menu") private static WebElement reportMenuItem = null; @FindBy(how = XPATH, using = "//h2/span[contains(text(),'Reports')]") private static WebElement reportsTitle = null; @FindBy(how = ID, using = "orders-menu") private static WebElement ordersMenuItem = null; @FindBy(how = ID, using = "approveRnr") private static WebElement approveLink = null; @FindBy(how = ID, using = "administration-menu") private static WebElement AdministrationMenuItem = null; @FindBy(how = ID, using = "manage-option") private static WebElement manageFacilityMenuItem = null; @FindBy(how = ID, using = "convertToOrderRnr") private static WebElement convertToOrderMenuItem = null; @FindBy(how = ID, using = "manage-distribution") private static WebElement manageDistributionMenuItem = null; @FindBy(how = XPATH, using = "//a[contains(text(),'Distributions')]") private static WebElement offlineDistributions = null; @FindBy(how = XPATH, using = "//a[contains(text(),'View Orders')]") private static WebElement viewOrdersMenuItem = null; @FindBy(how = ID, using = "viewRnr") private static WebElement viewRequisitionMenuItem = null; @FindBy(how = XPATH, using = "//h2[contains(text(),'View Requisitions')]") private static WebElement viewRequisitionHeader = null; @FindBy(how = XPATH, using = "//h2[contains(text(),'Convert Requisitions to Order')]") private static WebElement convertToOrderHeader = null; @FindBy(how = XPATH, using = "//h2[contains(text(),'Manage a Distribution')]") private static WebElement manageDistributionHeader = null; @FindBy(how = XPATH, using = "//h2[contains(text(),'View Orders')]") private static WebElement viewOrdersHeader = null; @FindBy(how = ID, using = "add-new-facility") private static WebElement createFacility = null; @FindBy(how = XPATH, using = "//div[@class='ng-scope']/div[@ng-hide='facility.id']/h2") private static WebElement facilityHeader = null; @FindBy(how = LINK_TEXT, using = "Configure") private static WebElement TemplateConfigTab = null; @FindBy(how = LINK_TEXT, using = "R & R Template") private static WebElement RnRTemplateConfigTab = null; @FindBy(how = LINK_TEXT, using = "EDI File") private static WebElement ediFileTab = null; @FindBy(how = LINK_TEXT, using = "Regimen Template") private static WebElement RegimenTemplateConfigTab = null; @FindBy(how = XPATH, using = "//h2[contains(text(),'Regimen Template')]") private static WebElement RegimenTemplateHeader = null; @FindBy(how = ID, using = "requisitions-menu") private static WebElement requisitionsLink = null; @FindBy(how = XPATH, using = "//div[@class='submenu']") private static WebElement SubMenuItem = null; @FindBy(how = ID, using = "createRnr") private static WebElement createLink = null; @FindBy(how = XPATH, using = "//input[@id='myFacilityRnr']") private static WebElement myFacilityRadioButton = null; @FindBy(how = LINK_TEXT, using = "Manage") private static WebElement manageLink = null; @FindBy(how = XPATH, using = "//a[contains(text(),'Upload')]") private static WebElement uploadLink = null; @FindBy(how = XPATH, using = "//input[@ng-click='initRnr(row.entity)']") private static WebElement proceedButton = null; @FindBy(how = ID, using = "facility-tab") private static WebElement facilitiesTab = null; @FindBy(how = ID, using = "role-tab") private static WebElement rolesTab = null; @FindBy(how = ID, using = "schedule-tab") private static WebElement schedulesTab = null; @FindBy(how = ID, using = "user-tab") private static WebElement usersTab = null; @FindBy(how = XPATH, using = "//div[@class='ngCellText ng-scope col1 colt1']/span") private static WebElement startDate = null; @FindBy(how = XPATH, using = "//div[@class='ngCellText ng-scope col2 colt2']/span") private static WebElement endDate = null; @FindBy(how = ID, using = "saveSuccessMsgDiv") private static WebElement errorMsg = null; @FindBy(how = ID, using = "program") private static WebElement selectProgramSelectBox = null; @FindBy(how = ID, using = "rnrType") private static WebElement rnrTypeSelectBox = null; @FindBy(how = XPATH, using = "//div/div/div[1]/div[2]/div/span") private static WebElement firstPeriodLabel = null; @FindBy(how = XPATH, using = "//input[@id='supervisedFacilityRnr']") private static WebElement supervisedFacilityRadioButton = null; @FindBy(how = XPATH, using = "//select[@id='programListSupervisedFacility']") private static WebElement ProgramDropDownSupervisedFacility = null; @FindBy(how = ID, using = "facilityList") private static WebElement facilityDropDown = null; @FindBy(how = XPATH, using = "//select[@id='programListMyFacility']") private static WebElement programDropDown = null; @FindBy(how = XPATH, using = "//a[contains(text(),'Manage POD')]") private static WebElement viewManagePODMenuItem = null; @FindBy(how = XPATH, using = "//h2[contains(text(),'Manage Proof of Delivery')]") private static WebElement viewManagePODHeader = null; public HomePage(TestWebDriver driver) throws IOException { super(driver); PageFactory.initElements(new AjaxElementLocatorFactory(TestWebDriver.getDriver(), 10), this); testWebDriver.setImplicitWait(10); } public WebElement getLogoutLink() { return logoutLink; } public LoginPage logout(String baseUrl) throws IOException { testWebDriver.waitForElementToAppear(logoutLink); logoutLink.click(); return new LoginPage(testWebDriver, baseUrl); } public ManageFacilityPage navigateManageFacility() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(manageFacilityMenuItem); testWebDriver.keyPress(manageFacilityMenuItem); return ManageFacilityPage.getInstance(testWebDriver); } public void clickCreateFacilityButton() { testWebDriver.waitForElementToAppear(createFacility); testWebDriver.sleep(1000); testWebDriver.keyPress(createFacility); } public void verifyHeader(String headingToVerify) { testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(facilityHeader); assertEquals(facilityHeader.getText().trim(), headingToVerify); } public void verifyAdminTabs() { testWebDriver.waitForElementToAppear(facilitiesTab); assertTrue(facilitiesTab.isDisplayed()); assertTrue(rolesTab.isDisplayed()); assertTrue(schedulesTab.isDisplayed()); assertTrue(usersTab.isDisplayed()); } public TemplateConfigPage selectProgramToConfigTemplate(String programme) { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(TemplateConfigTab); testWebDriver.keyPress(TemplateConfigTab); testWebDriver.waitForElementToAppear(RnRTemplateConfigTab); testWebDriver.keyPress(RnRTemplateConfigTab); testWebDriver.waitForElementToAppear(testWebDriver.getElementById(programme)); testWebDriver.getElementById(programme).click(); return new TemplateConfigPage(testWebDriver); } public ConfigureEDIPage navigateEdiScreen() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(TemplateConfigTab); testWebDriver.keyPress(TemplateConfigTab); testWebDriver.waitForElementToAppear(ediFileTab); testWebDriver.keyPress(ediFileTab); return new ConfigureEDIPage(testWebDriver); } public RegimenTemplateConfigPage navigateToRegimenConfigTemplate() { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(TemplateConfigTab); testWebDriver.keyPress(TemplateConfigTab); testWebDriver.waitForElementToAppear(RegimenTemplateConfigTab); testWebDriver.keyPress(RegimenTemplateConfigTab); testWebDriver.waitForElementToAppear(RegimenTemplateHeader); return new RegimenTemplateConfigPage(testWebDriver); } public String navigateAndInitiateRnr(String program) throws IOException { navigateRnr(); myFacilityRadioButton.click(); testWebDriver.sleep(2000); testWebDriver.waitForElementToAppear(programDropDown); testWebDriver.selectByVisibleText(programDropDown, program); testWebDriver.waitForElementToAppear(startDate); return (startDate.getText().trim() + " - " + endDate.getText().trim()); } public void navigateInitiateRnRScreenAndSelectingRequiredFields(String program, String type) throws IOException { navigateRnr(); myFacilityRadioButton.click(); testWebDriver.sleep(500); testWebDriver.waitForElementToAppear(programDropDown); testWebDriver.selectByVisibleText(programDropDown, program); testWebDriver.selectByVisibleText(rnrTypeSelectBox, type); testWebDriver.sleep(1000); } public void clickRequisitionSubMenuItem() throws IOException { testWebDriver.waitForElementToAppear(requisitionsLink); testWebDriver.keyPress(requisitionsLink); } public void verifySubMenuItems(String[] expectedSubMenuItem) throws IOException { String[] subMenuItem = SubMenuItem.getText().split("\n"); assertEquals(subMenuItem, expectedSubMenuItem); } public InitiateRnRPage clickProceed() throws IOException { testWebDriver.setImplicitWait(100); testWebDriver.waitForElementToAppear(proceedButton); proceedButton.click(); testWebDriver.sleep(1000); return new InitiateRnRPage(testWebDriver); } public ViewRequisitionPage navigateViewRequisition() throws IOException { testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(requisitionMenuItem); testWebDriver.keyPress(requisitionMenuItem); testWebDriver.waitForElementToAppear(viewRequisitionMenuItem); testWebDriver.keyPress(viewRequisitionMenuItem); testWebDriver.waitForElementToAppear(viewRequisitionHeader); return new ViewRequisitionPage(testWebDriver); } public ReportPage navigateReportScreen() throws IOException { testWebDriver.waitForElementToAppear(reportMenuItem); testWebDriver.keyPress(reportMenuItem); testWebDriver.waitForElementToAppear(reportsTitle); return new ReportPage(testWebDriver); } public ManageFacilityPage navigateSearchFacility() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(manageLink); testWebDriver.keyPress(manageLink); testWebDriver.waitForElementToAppear(facilitiesTab); facilitiesTab.click(); return ManageFacilityPage.getInstance(testWebDriver); } public RolesPage navigateRoleAssignments() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(manageLink); testWebDriver.keyPress(manageLink); testWebDriver.waitForElementToAppear(rolesTab); testWebDriver.keyPress(rolesTab); return new RolesPage(testWebDriver); } public UploadPage navigateUploads() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(uploadLink); uploadLink.click(); return new UploadPage(testWebDriver); } public ManageSchedulePage navigateToSchedule() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(manageLink); testWebDriver.keyPress(manageLink); testWebDriver.waitForElementToAppear(schedulesTab); schedulesTab.click(); return new ManageSchedulePage(testWebDriver); } public UserPage navigateToUser() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(manageLink); testWebDriver.keyPress(manageLink); testWebDriver.waitForElementToAppear(usersTab); usersTab.click(); return new UserPage(testWebDriver); } public ApprovePage navigateToApprove() throws IOException { testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(requisitionMenuItem); testWebDriver.keyPress(requisitionMenuItem); testWebDriver.waitForElementToAppear(approveLink); testWebDriver.keyPress(approveLink); return new ApprovePage(testWebDriver); } public ConvertOrderPage navigateConvertToOrder() throws IOException { testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(requisitionMenuItem); testWebDriver.keyPress(requisitionMenuItem); testWebDriver.waitForElementToAppear(convertToOrderMenuItem); testWebDriver.keyPress(convertToOrderMenuItem); testWebDriver.sleep(5000); testWebDriver.waitForElementToAppear(convertToOrderHeader); return new ConvertOrderPage(testWebDriver); } public DistributionPage navigateToDistributionWhenOnline() throws IOException { testWebDriver.waitForElementToAppear(distributionsMenuItem); testWebDriver.keyPress(distributionsMenuItem); testWebDriver.waitForElementToAppear(manageDistributionMenuItem); testWebDriver.keyPress(manageDistributionMenuItem); testWebDriver.waitForElementToAppear(manageDistributionHeader); return new DistributionPage(testWebDriver); } public DistributionPage navigateOfflineDistribution() throws IOException { testWebDriver.waitForElementToAppear(offlineDistributions); testWebDriver.keyPress(offlineDistributions); testWebDriver.waitForElementToAppear(manageDistributionMenuItem); testWebDriver.keyPress(manageDistributionMenuItem); return new DistributionPage(testWebDriver); } public ProgramProductISAPage navigateProgramProductISA() throws IOException { testWebDriver.waitForElementToAppear(AdministrationMenuItem); testWebDriver.keyPress(AdministrationMenuItem); testWebDriver.waitForElementToAppear(TemplateConfigTab); testWebDriver.keyPress(TemplateConfigTab); testWebDriver.waitForElementToAppear(programProductISAMenuItem); testWebDriver.keyPress(programProductISAMenuItem); testWebDriver.waitForElementToAppear(selectProgramSelectBox); return new ProgramProductISAPage(testWebDriver); } public HomePage navigateHomePage() throws IOException { testWebDriver.waitForElementToAppear(homeMenuItem); testWebDriver.keyPress(homeMenuItem); testWebDriver.sleep(500); return new HomePage(testWebDriver); } public ViewOrdersPage navigateViewOrders() throws IOException { testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(ordersMenuItem); testWebDriver.keyPress(ordersMenuItem); testWebDriver.waitForElementToAppear(viewOrdersMenuItem); testWebDriver.keyPress(viewOrdersMenuItem); testWebDriver.waitForElementToAppear(viewOrdersHeader); return new ViewOrdersPage(testWebDriver); } public String getErrorMessage() { testWebDriver.waitForElementToAppear(errorMsg); return errorMsg.getText().trim(); } public void verifyLoggedInUser(String Username) { testWebDriver.waitForElementToAppear(loggedInUserLabel); assertEquals(loggedInUserLabel.getText(), Username); } public void navigateAndInitiateEmergencyRnr(String program) throws IOException { navigateRnr(); myFacilityRadioButton.click(); testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(programDropDown); testWebDriver.selectByVisibleText(programDropDown, program); testWebDriver.selectByVisibleText(rnrTypeSelectBox, "Emergency"); } public String getFirstPeriod() { return firstPeriodLabel.getText().trim(); } public void navigateRnr() throws IOException { testWebDriver.waitForElementToAppear(requisitionsLink); testWebDriver.keyPress(requisitionsLink); testWebDriver.waitForElementToAppear(createLink); testWebDriver.sleep(1000); testWebDriver.keyPress(createLink); testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(myFacilityRadioButton); } public boolean isHomeMenuTabDisplayed() { return homeMenuItem.isDisplayed(); } public boolean isRequisitionsMenuTabDisplayed() { return requisitionMenuItem.isDisplayed(); } public void navigateAndInitiateRnrForSupervisedFacility(String program) throws IOException { navigateRnr(); supervisedFacilityRadioButton.click(); testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(ProgramDropDownSupervisedFacility); testWebDriver.selectByVisibleText(ProgramDropDownSupervisedFacility, program); testWebDriver.sleep(1000); } public void selectFacilityForSupervisoryNodeRnR(String facilityName) { testWebDriver.waitForElementToAppear(facilityDropDown); testWebDriver.selectByVisibleText(facilityDropDown, facilityName); testWebDriver.sleep(100); } public String getFacilityDropDownList() { return facilityDropDown.getText(); } public String getFacilityDropDownListForViewRequisition() { return testWebDriver.findElement(By.name("selectFacility")).getText(); } public ManagePodPage navigateManagePOD() throws IOException { testWebDriver.sleep(1000); testWebDriver.waitForElementToAppear(ordersMenuItem); testWebDriver.keyPress(ordersMenuItem); testWebDriver.waitForElementToAppear(viewManagePODMenuItem); testWebDriver.keyPress(viewManagePODMenuItem); testWebDriver.waitForElementToAppear(viewManagePODHeader); return new ManagePodPage(testWebDriver); } }
|#000| +Shilpi, Anshul | adding wait for element for apper in HomePage create rnr link
test-modules/functional-tests/src/main/java/org/openlmis/pageobjects/HomePage.java
|#000| +Shilpi, Anshul | adding wait for element for apper in HomePage create rnr link
Java
agpl-3.0
13c0570df5c8a7df31d8e9f019f99d0932c91307
0
KinshipSoftware/KinOathKinshipArchiver,KinshipSoftware/KinOathKinshipArchiver,PeterWithers/temp-to-delete1,PeterWithers/temp-to-delete1
package nl.mpi.kinnate.ui; import java.awt.Dimension; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionAdapter; import javax.swing.JTabbedPane; /** * Document : HidePane * Created on : Mar 11, 2011, 9:03:55 AM * Author : Peter Withers */ public class HidePane extends JTabbedPane { public enum HidePanePosition { left, right, top, bottom } private boolean hiddenState = true; private int lastSelectedTab = -1; private int defaultShownWidth = 300; private int shownWidth; private int hiddenWidth = 30; private HidePanePosition borderPosition; private boolean horizontalDivider; private int dragStartPosition = 0; private boolean lastWasDrag = false; public HidePane(HidePanePosition borderPositionLocal, int startWidth) { shownWidth = startWidth; borderPosition = borderPositionLocal; horizontalDivider = (!borderPosition.equals(HidePanePosition.left) && !borderPosition.equals(HidePanePosition.right)); switch (borderPosition) { case left: this.setTabPlacement(javax.swing.JTabbedPane.RIGHT); break; case right: this.setTabPlacement(javax.swing.JTabbedPane.LEFT); break; case top: this.setTabPlacement(javax.swing.JTabbedPane.BOTTOM); break; case bottom: this.setTabPlacement(javax.swing.JTabbedPane.TOP); break; } // this.add(contentComponent, labelStringLocal); this.addMouseMotionListener(new MouseMotionAdapter() { @Override public void mouseDragged(MouseEvent e) { // todo: check the max space and prevent oversizing lastWasDrag = true; if (hiddenState) { hiddenState = false; shownWidth = hiddenWidth; } switch (borderPosition) { case left: shownWidth = shownWidth - dragStartPosition + e.getXOnScreen(); dragStartPosition = e.getXOnScreen(); break; case right: shownWidth = shownWidth + dragStartPosition - e.getXOnScreen(); dragStartPosition = e.getXOnScreen(); break; case top: shownWidth = shownWidth - dragStartPosition + e.getYOnScreen(); dragStartPosition = e.getYOnScreen(); break; case bottom: shownWidth = shownWidth + dragStartPosition - e.getYOnScreen(); dragStartPosition = e.getYOnScreen(); break; } if (shownWidth < hiddenWidth) { shownWidth = hiddenWidth; hiddenState = true; } if (horizontalDivider) { HidePane.this.setPreferredSize(new Dimension(HidePane.this.getPreferredSize().width, shownWidth)); } else { HidePane.this.setPreferredSize(new Dimension(shownWidth, HidePane.this.getPreferredSize().height)); } // if (horizontalDivider) { // if (borderPosition.equals(BorderLayout.PAGE_END)) { // shownWidth = shownWidth - lastXpos + e.getY(); // } else { // shownWidth = shownWidth - lastXpos - e.getY(); // } //// if (shownWidth < removeButton.getPreferredSize().height * 2) { //// shownWidth = removeButton.getPreferredSize().height * 2; //// } else if (shownWidth > HidePane.this.getParent().getHeight()) { //// shownWidth = HidePane.this.getParent().getHeight() - removeButton.getPreferredSize().height; //// } // HidePane.this.setPreferredSize(new Dimension(HidePane.this.getPreferredSize().width, shownWidth)); // } else { // if (borderPosition.equals(BorderLayout.LINE_END)) { // shownWidth = shownWidth - lastXpos + e.getX(); // } else { // shownWidth = shownWidth - lastXpos - e.getX(); // } //// if (shownWidth < removeButton.getPreferredSize().width * 2) { //// shownWidth = removeButton.getPreferredSize().width * 2; //// } else if (shownWidth > HidePane.this.getParent().getWidth()) { //// shownWidth = HidePane.this.getParent().getWidth() - removeButton.getPreferredSize().width; //// } // HidePane.this.setPreferredSize(new Dimension(shownWidth, HidePane.this.getPreferredSize().height)); // } HidePane.this.revalidate(); HidePane.this.repaint(); } }); this.addMouseListener(new java.awt.event.MouseAdapter() { @Override public void mouseReleased(MouseEvent e) { super.mouseReleased(e); if (!hiddenState && lastSelectedTab != HidePane.this.getSelectedIndex()) { // skip hide action when the selected tab changes lastSelectedTab = HidePane.this.getSelectedIndex(); return; } lastSelectedTab = HidePane.this.getSelectedIndex(); if (!lastWasDrag) { toggleHiddenState(); } else if (shownWidth < hiddenWidth * 2) { shownWidth = hiddenWidth; hiddenState = true; if (horizontalDivider) { HidePane.this.setPreferredSize(new Dimension(HidePane.this.getPreferredSize().width, shownWidth)); } else { HidePane.this.setPreferredSize(new Dimension(shownWidth, HidePane.this.getPreferredSize().height)); } HidePane.this.revalidate(); HidePane.this.repaint(); } } @Override public void mousePressed(MouseEvent e) { lastWasDrag = false; if (horizontalDivider) { dragStartPosition = e.getYOnScreen(); } else { dragStartPosition = e.getXOnScreen(); } super.mousePressed(e); } }); if (horizontalDivider) { HidePane.this.setPreferredSize(new Dimension(HidePane.this.getPreferredSize().width, hiddenWidth)); } else { HidePane.this.setPreferredSize(new Dimension(hiddenWidth, HidePane.this.getPreferredSize().height)); } } public void toggleHiddenState() { if (!hiddenState) { if (horizontalDivider) { HidePane.this.setPreferredSize(new Dimension(HidePane.this.getPreferredSize().width, hiddenWidth)); } else { HidePane.this.setPreferredSize(new Dimension(hiddenWidth, HidePane.this.getPreferredSize().height)); } } else { if (shownWidth < hiddenWidth * 2) { shownWidth = defaultShownWidth; } if (horizontalDivider) { HidePane.this.setPreferredSize(new Dimension(HidePane.this.getPreferredSize().width, shownWidth)); } else { HidePane.this.setPreferredSize(new Dimension(shownWidth, HidePane.this.getPreferredSize().height)); } } hiddenState = !hiddenState; HidePane.this.revalidate(); HidePane.this.repaint(); } public boolean isHidden() { return hiddenState; } }
desktop/src/main/java/nl/mpi/kinnate/ui/HidePane.java
package nl.mpi.kinnate.ui; import java.awt.Component; import java.awt.Dimension; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionAdapter; import javax.swing.JTabbedPane; /** * Document : HidePane * Created on : Mar 11, 2011, 9:03:55 AM * Author : Peter Withers */ public class HidePane extends JTabbedPane { public enum HidePanePosition { left, right, top, bottom } private boolean hiddenState = true; private int defaultShownWidth = 300; private int shownWidth; private int hiddenWidth = 30; private HidePanePosition borderPosition; private boolean horizontalDivider; private int dragStartPosition = 0; private boolean lastWasDrag = false; public HidePane(HidePanePosition borderPositionLocal, int startWidth) { shownWidth = startWidth; borderPosition = borderPositionLocal; horizontalDivider = (!borderPosition.equals(HidePanePosition.left) && !borderPosition.equals(HidePanePosition.right)); switch (borderPosition) { case left: this.setTabPlacement(javax.swing.JTabbedPane.RIGHT); break; case right: this.setTabPlacement(javax.swing.JTabbedPane.LEFT); break; case top: this.setTabPlacement(javax.swing.JTabbedPane.BOTTOM); break; case bottom: this.setTabPlacement(javax.swing.JTabbedPane.TOP); break; } // this.add(contentComponent, labelStringLocal); this.addMouseMotionListener(new MouseMotionAdapter() { @Override public void mouseDragged(MouseEvent e) { lastWasDrag = true; if (hiddenState) { hiddenState = false; shownWidth = hiddenWidth; } switch (borderPosition) { case left: shownWidth = shownWidth - dragStartPosition + e.getX(); dragStartPosition = e.getX(); break; case right: shownWidth = shownWidth - dragStartPosition + e.getX(); dragStartPosition = e.getX(); break; case top: shownWidth = shownWidth - dragStartPosition + e.getY(); dragStartPosition = e.getY(); break; case bottom: shownWidth = shownWidth - dragStartPosition + e.getY(); dragStartPosition = e.getY(); break; } if (shownWidth < hiddenWidth) { shownWidth = hiddenWidth; hiddenState = true; } if (horizontalDivider) { HidePane.this.setPreferredSize(new Dimension(HidePane.this.getPreferredSize().width, shownWidth)); } else { HidePane.this.setPreferredSize(new Dimension(shownWidth, HidePane.this.getPreferredSize().height)); } // if (horizontalDivider) { // if (borderPosition.equals(BorderLayout.PAGE_END)) { // shownWidth = shownWidth - lastXpos + e.getY(); // } else { // shownWidth = shownWidth - lastXpos - e.getY(); // } //// if (shownWidth < removeButton.getPreferredSize().height * 2) { //// shownWidth = removeButton.getPreferredSize().height * 2; //// } else if (shownWidth > HidePane.this.getParent().getHeight()) { //// shownWidth = HidePane.this.getParent().getHeight() - removeButton.getPreferredSize().height; //// } // HidePane.this.setPreferredSize(new Dimension(HidePane.this.getPreferredSize().width, shownWidth)); // } else { // if (borderPosition.equals(BorderLayout.LINE_END)) { // shownWidth = shownWidth - lastXpos + e.getX(); // } else { // shownWidth = shownWidth - lastXpos - e.getX(); // } //// if (shownWidth < removeButton.getPreferredSize().width * 2) { //// shownWidth = removeButton.getPreferredSize().width * 2; //// } else if (shownWidth > HidePane.this.getParent().getWidth()) { //// shownWidth = HidePane.this.getParent().getWidth() - removeButton.getPreferredSize().width; //// } // HidePane.this.setPreferredSize(new Dimension(shownWidth, HidePane.this.getPreferredSize().height)); // } HidePane.this.revalidate(); HidePane.this.repaint(); } }); this.addMouseListener(new java.awt.event.MouseAdapter() { @Override public void mouseReleased(MouseEvent e) { if (!lastWasDrag) { toggleHiddenState(); } else if (shownWidth < hiddenWidth * 2) { shownWidth = hiddenWidth; hiddenState = true; if (horizontalDivider) { HidePane.this.setPreferredSize(new Dimension(HidePane.this.getPreferredSize().width, shownWidth)); } else { HidePane.this.setPreferredSize(new Dimension(shownWidth, HidePane.this.getPreferredSize().height)); } HidePane.this.revalidate(); HidePane.this.repaint(); } } @Override public void mousePressed(MouseEvent e) { lastWasDrag = false; if (horizontalDivider) { dragStartPosition = e.getY(); } else { dragStartPosition = e.getX(); } super.mousePressed(e); } }); if (horizontalDivider) { HidePane.this.setPreferredSize(new Dimension(HidePane.this.getPreferredSize().width, hiddenWidth)); } else { HidePane.this.setPreferredSize(new Dimension(hiddenWidth, HidePane.this.getPreferredSize().height)); } } public void toggleHiddenState() { if (!hiddenState) { if (horizontalDivider) { HidePane.this.setPreferredSize(new Dimension(HidePane.this.getPreferredSize().width, hiddenWidth)); } else { HidePane.this.setPreferredSize(new Dimension(hiddenWidth, HidePane.this.getPreferredSize().height)); } } else { if (shownWidth < hiddenWidth * 2) { shownWidth = defaultShownWidth; } if (horizontalDivider) { HidePane.this.setPreferredSize(new Dimension(HidePane.this.getPreferredSize().width, shownWidth)); } else { HidePane.this.setPreferredSize(new Dimension(shownWidth, HidePane.this.getPreferredSize().height)); } } hiddenState = !hiddenState; HidePane.this.revalidate(); HidePane.this.repaint(); } public boolean isHidden() { return hiddenState; } }
Updated the UI layout to match the internal changes.
desktop/src/main/java/nl/mpi/kinnate/ui/HidePane.java
Updated the UI layout to match the internal changes.
Java
lgpl-2.1
3a49eca9f8e6d7fa24054f19663eed44e92a2a33
0
julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine
package org.intermine.web.struts; /* * Copyright (C) 2002-2011 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.io.StringReader; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.commons.lang.StringUtils; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.intermine.api.InterMineAPI; import org.intermine.api.profile.Profile; import org.intermine.api.profile.ProfileManager; import org.intermine.api.profile.TagManager; import org.intermine.api.search.SearchRepository; import org.intermine.api.tag.TagTypes; import org.intermine.api.xml.TagBinding; import org.intermine.web.logic.session.SessionMethods; /** * Import tags. * * @author Thomas Riley */ public class ImportTagsAction extends InterMineAction { /** * Import user's tags. * * @param mapping The ActionMapping used to select this instance * @param form The optional ActionForm bean for this request (if any) * @param request The HTTP request we are processing * @param response The HTTP response we are creating * @return an ActionForward object defining where control goes next * * @exception Exception if the application business logic throws * an exception */ public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, @SuppressWarnings("unused") HttpServletResponse response) throws Exception { ImportTagsForm f = (ImportTagsForm) form; HttpSession session = request.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = SessionMethods.getProfile(session); ProfileManager pm = im.getProfileManager(); if (f.isOverwriting()) { TagManager tm = im.getTagManager(); tm.deleteTags(null, null, null, profile.getUsername()); } StringReader reader = new StringReader(f.getXml()); int count = 0; if (!StringUtils.isEmpty(f.getXml())) { try { count = new TagBinding().unmarshal(pm, profile.getUsername(), reader); } catch (Exception ex) { SessionMethods.recordError( "Problems importing tags. Please check the XML structure.", session); return mapping.findForward("importTag"); } } recordMessage(new ActionMessage("history.importedTags", new Integer(count)), request); if (SessionMethods.isSuperUser(session)) { SearchRepository sr = SessionMethods.getGlobalSearchRepository( session.getServletContext()); sr.globalChange(TagTypes.TEMPLATE); sr.globalChange(TagTypes.BAG); } f.reset(); return mapping.findForward("success"); } }
intermine/web/main/src/org/intermine/web/struts/ImportTagsAction.java
package org.intermine.web.struts; /* * Copyright (C) 2002-2011 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.io.StringReader; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.commons.lang.StringUtils; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.intermine.api.InterMineAPI; import org.intermine.api.profile.Profile; import org.intermine.api.profile.ProfileManager; import org.intermine.api.profile.TagManager; import org.intermine.api.search.SearchRepository; import org.intermine.api.tag.TagTypes; import org.intermine.api.xml.TagBinding; import org.intermine.web.logic.session.SessionMethods; /** * Import tags. * * @author Thomas Riley */ public class ImportTagsAction extends InterMineAction { /** * Import user's tags. * * @param mapping The ActionMapping used to select this instance * @param form The optional ActionForm bean for this request (if any) * @param request The HTTP request we are processing * @param response The HTTP response we are creating * @return an ActionForward object defining where control goes next * * @exception Exception if the application business logic throws * an exception */ public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, @SuppressWarnings("unused") HttpServletResponse response) throws Exception { ImportTagsForm f = (ImportTagsForm) form; HttpSession session = request.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = SessionMethods.getProfile(session); ProfileManager pm = im.getProfileManager(); if (f.isOverwriting()) { TagManager tm = im.getTagManager(); tm.deleteTags(null, null, null, profile.getUsername()); } StringReader reader = new StringReader(f.getXml()); int count = 0; if (!StringUtils.isEmpty(f.getXml())) { try { count = new TagBinding().unmarshal(pm, profile.getUsername(), reader); } catch (Exception ex) { SessionMethods.recordError( "Problems importing tags. Please check the XML structure.", session); return mapping.findForward("importTag"); } } recordMessage(new ActionMessage("history.importedTags", new Integer(count)), request); if (SessionMethods.isSuperUser(session)) { SearchRepository sr = SessionMethods.getGlobalSearchRepository( session.getServletContext()); sr.globalChange(TagTypes.TEMPLATE); sr.globalChange(TagTypes.BAG); } return mapping.findForward("success"); } }
Import a tag, then import a tag again, the form dispayed will be empty. Former-commit-id: c8b0ceb01bbcf72e644f55c540b9cb21f4520670
intermine/web/main/src/org/intermine/web/struts/ImportTagsAction.java
Import a tag, then import a tag again, the form dispayed will be empty.
Java
unlicense
1d8c730c4cc3b582f5226f97d8dd0ce0ba8a255a
0
dave-cassettari/sapelli,dave-cassettari/sapelli,dave-cassettari/sapelli
/** * Sapelli data collection platform: http://sapelli.org * * Copyright 2012-2014 University College London - ExCiteS group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.ac.ucl.excites.sapelli.collector.load.parse; import java.util.HashMap; import java.util.Hashtable; import java.util.Map.Entry; import java.util.Stack; import org.xml.sax.SAXException; import uk.ac.ucl.excites.sapelli.collector.control.Controller.Mode; import uk.ac.ucl.excites.sapelli.collector.model.Field; import uk.ac.ucl.excites.sapelli.collector.model.FieldParameters; import uk.ac.ucl.excites.sapelli.collector.model.Form; import uk.ac.ucl.excites.sapelli.collector.model.Form.AudioFeedback; import uk.ac.ucl.excites.sapelli.collector.model.JumpSource; import uk.ac.ucl.excites.sapelli.collector.model.Project; import uk.ac.ucl.excites.sapelli.collector.model.Trigger; import uk.ac.ucl.excites.sapelli.collector.model.fields.AudioField; import uk.ac.ucl.excites.sapelli.collector.model.fields.BelongsToField; import uk.ac.ucl.excites.sapelli.collector.model.fields.ButtonField; import uk.ac.ucl.excites.sapelli.collector.model.fields.ButtonField.ButtonColumnType; import uk.ac.ucl.excites.sapelli.collector.model.fields.CheckBoxField; import uk.ac.ucl.excites.sapelli.collector.model.fields.ChoiceField; import uk.ac.ucl.excites.sapelli.collector.model.fields.EndField; import uk.ac.ucl.excites.sapelli.collector.model.fields.LabelField; import uk.ac.ucl.excites.sapelli.collector.model.fields.LinksToField; import uk.ac.ucl.excites.sapelli.collector.model.fields.LocationField; import uk.ac.ucl.excites.sapelli.collector.model.fields.MediaField; import uk.ac.ucl.excites.sapelli.collector.model.fields.MultiListField; import uk.ac.ucl.excites.sapelli.collector.model.fields.MultiListField.MultiListItem; import uk.ac.ucl.excites.sapelli.collector.model.fields.OrientationField; import uk.ac.ucl.excites.sapelli.collector.model.fields.Page; import uk.ac.ucl.excites.sapelli.collector.model.fields.PhotoField; import uk.ac.ucl.excites.sapelli.collector.model.fields.Relationship; import uk.ac.ucl.excites.sapelli.collector.model.fields.TextBoxField; import uk.ac.ucl.excites.sapelli.collector.ui.ControlsUI.Control; import uk.ac.ucl.excites.sapelli.shared.util.StringUtils; import uk.ac.ucl.excites.sapelli.shared.util.xml.SubtreeParser; import uk.ac.ucl.excites.sapelli.shared.util.xml.XMLAttributes; import uk.ac.ucl.excites.sapelli.storage.model.Schema; import uk.ac.ucl.excites.sapelli.storage.queries.constraints.RuleConstraint; /** * A {@link SubtreeParser} for <Form>s * * @author mstevens */ public class FormParser extends SubtreeParser<ProjectParser> { // STATICS-------------------------------------------------------- //TAGS static private final String TAG_FORM = "Form"; static private final String TAG_CHOICE = "Choice"; static private final String TAG_AUDIO = "Audio"; static private final String TAG_PHOTO = "Photo"; static private final String TAG_LOCATION = "Location"; static private final String TAG_ORIENTATION = "Orientation"; static public final String TAG_BELONGS_TO = "BelongsTo"; static public final String TAG_LINKS_TO = "LinksTo"; static private final String TAG_CONSTRAINT = "Constraint"; static private final String TAG_BUTTON = "Button"; static private final String TAG_LABEL = "Label"; static private final String TAG_TEXTFIELD = "Text"; static private final String TAG_CHECKBOX = "Check"; static private final String TAG_LIST = "List"; static private final String TAG_MULTILIST = "MultiList"; static private final String TAG_LISTITEM = "Item"; static private final String TAG_PAGE = "Page"; static private final String TAG_TRIGGER = "Trigger"; static private final String TAG_ARGUMENT = "Argument"; //ATTRIBUTES static private final String ATTRIBUTE_FORM_NAME = "name"; static private final String ATTRIBUTE_FORM_ID = "id"; static private final String ATTRIBUTE_FORM_SCHEMA_ID = Schema.V1X_ATTRIBUTE_SCHEMA_ID; static private final String ATTRIBUTE_FORM_SCHEMA_VERSION = Schema.V1X_ATTRIBUTE_SCHEMA_VERSION; static private final String ATTRIBUTE_FORM_STORE_END_TIME = "storeEndTime"; static private final String ATTRIBUTE_FORM_START_FIELD = "startField"; static private final String ATTRIBUTE_FORM_END = "end"; // 1.x compatibility static private final String ATTRIBUTE_FORM_NEXT = "next"; static private final String ATTRIBUTE_FORM_END_SOUND = "endSound"; // 1.x compatibility static private final String ATTRIBUTE_FORM_SAVE_SOUND = "saveSound"; static private final String ATTRIBUTE_FORM_END_VIBRATE = "endVibrate"; // 1.x compatibility static private final String ATTRIBUTE_FORM_SAVE_VIBRATE = "saveVibrate"; static private final String ATTRIBUTE_FORM_FORWARD_BUTTON_IMG = "forwardButtonImg"; static private final String ATTRIBUTE_FORM_CANCEL_BUTTON_IMG = "cancelButtonImg"; static private final String ATTRIBUTE_FORM_BACK_BUTTON_IMG = "backButtonImg"; static private final String ATTRIBUTE_FORM_FORWARD_BUTTON_DESC = "forwardButtonDesc"; static private final String ATTRIBUTE_FORM_CANCEL_BUTTON_DESC = "cancelButtonDesc"; static private final String ATTRIBUTE_FORM_BACK_BUTTON_DESC = "backButtonDesc"; static private final String ATTRIBUTE_FORM_BUTTON_BACKGROUND_COLOR = "buttonBackgroundColor"; static private final String ATTRIBUTE_FORM_SHORTCUT_IMAGE = "shortcutImage"; static private final String ATTRIBUTE_FORM_CLICK_ANIMATION = "clickAnimation"; static private final String ATTRIBUTE_FORM_ANIMATION = "animation"; // 1.x compatibility, the same as clickAnimation static private final String ATTRIBUTE_FORM_SCREEN_TRANSITION = "screenTransition"; static private final String ATTRIBUTE_FORM_AUDIO_FEEDBACK = "audioFeedback"; static private final String ATTRIBUTE_FORM_OBFUSCATE_MEDIA_FILES = "obfuscateMediaFiles"; static private final String ATTRIBUTE_FORM_SINGLE_PAGE = "singlePage"; static private final String ATTRIBUTE_SKIP_ON_BACK = "skipOnBack"; // used on both FORM and FIELD static private final String ATTRIBUTE_FIELD_ID = "id"; static private final String ATTRIBUTE_FIELD_JUMP = "jump"; static private final String ATTRIBUTE_FIELD_OPTIONAL = "optional"; static private final String ATTRIBUTE_FIELD_NO_COLUMN = "noColumn"; static private final String ATTRIBUTE_FIELD_EDITABLE = "editable"; static private final String ATTRIBUTE_FIELD_ALT = "alt"; static private final String ATTRIBUTE_FIELD_IMG = "img"; static private final String ATTRIBUTE_FIELD_ANSWER_DESC = "answerDesc"; static private final String ATTRIBUTE_FIELD_QUESTION_DESC = "questionDesc"; static private final String ATTRIBUTE_FIELD_CAPTION = "caption"; static private final String ATTRIBUTE_FIELD_CAPTIONS = "captions"; static private final String ATTRIBUTE_FIELD_LABEL = "label"; // synonym for caption static private final String ATTRIBUTE_FIELD_LABELS = "labels"; // synonym for captions static private final String[] ATTRIBUTE_FIELD_CAPTION_SINGULAR = { ATTRIBUTE_FIELD_CAPTION, ATTRIBUTE_FIELD_LABEL }; static private final String[] ATTRIBUTE_FIELD_CAPTION_PLURAL = { ATTRIBUTE_FIELD_CAPTION, ATTRIBUTE_FIELD_CAPTIONS, ATTRIBUTE_FIELD_LABEL, ATTRIBUTE_FIELD_LABELS }; static private final String ATTRIBUTE_FIELD_BACKGROUND_COLOR = "backgroundColor"; static private final String ATTRIBUTE_FIELD_SHOW_ON_CREATE = "showOnCreate"; static private final String ATTRIBUTE_FIELD_SHOW_ON_EDIT = "showOnEdit"; static private final String ATTRIBUTE_FIELD_SHOW_FORWARD = "showForward"; static private final String ATTRIBUTE_FIELD_SHOW_BACK_ON_CREATE = "showBackOnCreate"; static private final String ATTRIBUTE_FIELD_SHOW_BACK_ON_EDIT = "showBackOnEdit"; static private final String ATTRIBUTE_FIELD_SHOW_CANCEL = "showCancel"; static private final String ATTRIBUTE_FIELD_SHOW_CANCEL_ON_CREATE = "showCancelOnCreate"; static private final String ATTRIBUTE_FIELD_SHOW_CANCEL_ON_EDIT = "showCancelOnEdit"; static private final String ATTRIBUTE_FIELD_SHOW_FORWARD_ON_CREATE = "showForwardOnCreate"; static private final String ATTRIBUTE_FIELD_SHOW_FORWARD_ON_EDIT = "showForwardOnEdit"; static private final String ATTRIBUTE_FIELD_SHOW_BACK = "showBack"; static private final String ATTRIBUTE_FIELD_VALUE = "value"; static private final String ATTRIBUTE_FIELD_DEFAULTVALUE = "defaultValue"; static private final String ATTRIBUTE_FIELD_INITVALUE = "initialValue"; static private final String ATTRIBUTE_DISABLE_FIELD = "disableField"; static private final String ATTRIBUTE_CHOICE_ROWS = "rows"; static private final String ATTRIBUTE_CHOICE_COLS = "cols"; static private final String ATTRIBUTE_LOCATION_START_WITH = "startWith"; static private final String ATTRIBUTE_LOCATION_START_WITH_FORM = "startWithForm"; // deprecated in favour of enum above static private final String ATTRIBUTE_RELATIONSHIP_FORM = "form"; static private final String ATTRIBUTE_RELATIONSHIP_HOLD = "hold"; static private final String ATTRIBUTE_CONSTRAINT_COLUMN = "column"; static private final String ATTRIBUTE_TEXT_MINLENGTH = "minLength"; static private final String ATTRIBUTE_TEXT_MAXLENGTH = "maxLength"; static private final String ATTRIBUTE_TEXT_MULTILINE = "multiLine"; static private final String ATTRIBUTE_TEXT_CONTENT = "content"; static private final String ATTRIBUTE_TEXT_REGEX = "regex"; static private final String ATTRIBUTE_TEXT_CAPITALISATION = "autoCaps"; static private final String ATTRIBUTE_LABEL_SCALE = "scale"; static private final String ATTRIBUTE_LABEL_CENTERED = "centered"; static private final String ATTRIBUTE_LIST_PRESELECT = "preSelectDefault"; static private final String ATTRIBUTE_LISTITEM_DEFAULT = "default"; static private final String ATTRIBUTE_BUTTON_COLUMN = "column"; static private final String ATTRIBUTE_MEDIA_MAX = "max"; static private final String ATTRIBUTE_TRIGGER_KEY = "key"; static private final String ATTRIBUTE_TRIGGER_KEYS = "keys"; static private final String ATTRIBUTE_TRIGGER_FIXED_TIMER = "fixedTimer"; static private final String ATTRIBUTE_TRIGGER_JUMP = "jump"; static private final String ATTRIBUTE_ARGUMENT_PARAM = "param"; static private final String ATTRIBUTE_ARGUMENT_VALUE = "value"; // DYNAMICS------------------------------------------------------- private Project project; private Form currentForm; private String formStartFieldId; private Boolean v1xFormShowBack = null; private Boolean v1xFormShowCancel = null; private Boolean v1xFormShowForward = null; private Stack<Field> openFields; private Trigger openTrigger; private MultiListItem currentListItem; private HashMap<JumpSource, String> jumpSourceToJumpTargetId; private Hashtable<String, Field> idToField; private HashMap<MediaField, String> mediaAttachToDisableId; public FormParser(ProjectParser projectParser) { super(projectParser, TAG_FORM); this.project = projectParser.getProject(); this.openFields = new Stack<Field>(); this.jumpSourceToJumpTargetId = new HashMap<JumpSource, String>(); this.idToField = new Hashtable<String, Field>(); this.mediaAttachToDisableId = new HashMap<MediaField, String>(); } @Override public void reset() { currentForm = null; openFields.clear(); openTrigger = null; currentListItem = null; formStartFieldId = null; jumpSourceToJumpTargetId.clear(); idToField.clear(); mediaAttachToDisableId.clear(); v1xFormShowBack = null; v1xFormShowCancel = null; v1xFormShowForward = null; } @Override protected void parseStartElement(String uri, String localName, String qName, XMLAttributes attributes) throws Exception { // <Form> if(qName.equals(TAG_FORM)) { if(currentForm != null) throw new SAXException("Forms cannot be nested!"); String id = attributes.getRequiredString(TAG_FORM, true, false, ATTRIBUTE_FORM_ID, ATTRIBUTE_FORM_NAME); // "name" is v1.x syntax but still accepted in v2.0 (yet "id" is preferred) ProjectParser.Format format = owner.getFormat(); if(format == ProjectParser.Format.v1_x) { // Backwards compatibility if(project.getForms().isEmpty()) // only for 1st, and assumed only, currentForm { int schemaID = attributes.getRequiredInteger(TAG_FORM, ATTRIBUTE_FORM_SCHEMA_ID, "because this is a v1.x project"); int schemaVersion = attributes.getInteger(ATTRIBUTE_FORM_SCHEMA_VERSION, Schema.V1X_DEFAULT_SCHEMA_VERSION); project.setV1XSchemaInfo(schemaID, schemaVersion); //schemaID will be used as projectID } else throw new SAXException("Only single-Form v1.x projects are supported"); } currentForm = new Form(project, id); // the form will add itself to the project and take the next available form position // Shortcut image: currentForm.setShortcutImageRelativePath(attributes.getString(ATTRIBUTE_FORM_SHORTCUT_IMAGE, null, false, false)); // Next/end: try { currentForm.setNext(attributes.getString(Form.DEFAULT_NEXT.name(), true, false, ATTRIBUTE_FORM_NEXT, ATTRIBUTE_FORM_END)); } catch(IllegalArgumentException iae) { throw new SAXException("Invalid '" + ATTRIBUTE_FORM_NEXT + "' attribute value on <" + TAG_FORM + ">.", iae); } // Store end time?: currentForm.setStoreEndTime(attributes.getBoolean(ATTRIBUTE_FORM_STORE_END_TIME, Form.END_TIME_DEFAULT)); // Sound end vibration at the end of the currentForm: currentForm.setSaveSoundRelativePath(attributes.getString(null, false, false, ATTRIBUTE_FORM_SAVE_SOUND, ATTRIBUTE_FORM_END_SOUND)); // Get the sound path currentForm.setVibrateOnSave(attributes.getBoolean(Form.DEFAULT_VIBRATE, ATTRIBUTE_FORM_SAVE_VIBRATE, ATTRIBUTE_FORM_END_VIBRATE)); // Which buttons are allowed to show (deprecated in format >= 2): if(attributes.contains(ATTRIBUTE_FIELD_SHOW_BACK) || attributes.contains(ATTRIBUTE_FIELD_SHOW_CANCEL) || attributes.contains(ATTRIBUTE_FIELD_SHOW_FORWARD)) { if(format == ProjectParser.Format.v1_x) { v1xFormShowBack = attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_BACK, Form.V1X_DEFAULT_SHOW_BACK); v1xFormShowCancel = attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_CANCEL, Form.V1X_DEFAULT_SHOW_CANCEL); v1xFormShowForward = attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_FORWARD, Form.V1X_DEFAULT_SHOW_FORWARD); } else addWarning("Attributes '" + ATTRIBUTE_FIELD_SHOW_BACK + "', '" + ATTRIBUTE_FIELD_SHOW_CANCEL + "' & '" + ATTRIBUTE_FIELD_SHOW_FORWARD + "' are deprecated on <Form> in format >= 2."); } // Click Animation: currentForm.setClickAnimation(attributes.getBoolean(Form.DEFAULT_CLICK_ANIMATION, ATTRIBUTE_FORM_CLICK_ANIMATION, ATTRIBUTE_FORM_ANIMATION)); // Screen Transition: try { currentForm.setScreenTransition(attributes.getString(ATTRIBUTE_FORM_SCREEN_TRANSITION, Form.DEFAULT_SCREEN_TRANSITION.name(), true, false)); } catch(IllegalArgumentException iae) { addWarning("Invalid '" + ATTRIBUTE_FORM_SCREEN_TRANSITION + "' attribute value on <" + TAG_FORM + ">. Default Screen Transition is going to be used."); } // Add AudioFeedbakc: try { currentForm.setAudioFeedback(attributes.getString(ATTRIBUTE_FORM_AUDIO_FEEDBACK, Form.DEFAULT_AUDIO_FEEDBACK.name(), true, false)); if(currentForm.getAudioFeedback() != null && currentForm.getAudioFeedback() != AudioFeedback.NONE) addWarning("Older Android devices may require SpeechSynthesis Data Installer to be installed for text-to-speech to work"); } catch(IllegalArgumentException iae) { addWarning("Invalid '" + ATTRIBUTE_FORM_AUDIO_FEEDBACK + "' attribute value on <" + TAG_FORM + ">. Default Audio Feedback is going to be used."); } // Obfuscate Media Files: currentForm.setObfuscateMediaFiles(attributes.getBoolean(ATTRIBUTE_FORM_OBFUSCATE_MEDIA_FILES, Form.DEFAULT_OBFUSCATE_MEDIA_FILES)); // Control button images: currentForm.setBackButtonImageRelativePath(attributes.getString(ATTRIBUTE_FORM_BACK_BUTTON_IMG, null, false, false)); currentForm.setCancelButtonImageRelativePath(attributes.getString(ATTRIBUTE_FORM_CANCEL_BUTTON_IMG, null, false, false)); currentForm.setForwardButtonImageRelativePath(attributes.getString(ATTRIBUTE_FORM_FORWARD_BUTTON_IMG, null, false, false)); // ButtonField background colour: currentForm.setBackButtonDescription(attributes.getString(ATTRIBUTE_FORM_BACK_BUTTON_DESC, Form.DEFAULT_BACK_BUTTON_DESCRIPTION, false, false)); currentForm.setCancelButtonDescription(attributes.getString(ATTRIBUTE_FORM_CANCEL_BUTTON_DESC, Form.DEFAULT_CANCEL_BUTTON_DESCRIPTION, false, false)); currentForm.setForwardButtonDescription(attributes.getString(ATTRIBUTE_FORM_FORWARD_BUTTON_DESC, Form.DEFAULT_FORWARD_BUTTON_DESCRIPTION, false, false)); // ButtonField background colour: currentForm.setButtonBackgroundColor(attributes.getString(ATTRIBUTE_FORM_BUTTON_BACKGROUND_COLOR, Form.DEFAULT_BUTTON_BACKGROUND_COLOR, true, false)); // Single page form (all fields will be added to a single page): if(attributes.getBoolean(Form.DEFAULT_SINGLE_PAGE, ATTRIBUTE_FORM_SINGLE_PAGE)) newPage(null); // Start field: formStartFieldId = attributes.getString(ATTRIBUTE_FORM_START_FIELD, null, true, false); // skipOnBack: currentForm.setSkipOnBack(attributes.getBoolean(ATTRIBUTE_SKIP_ON_BACK, Form.DEFAULT_SKIP_ON_BACK)); //Activate this subtree parser: activate(); //!!! } // Within a form... else if(currentForm != null) { // Children of <Form> (fields & triggers)... // <Choice> if(qName.equals(TAG_CHOICE)) { ChoiceField choice = new ChoiceField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), attributes.getValue(ATTRIBUTE_FIELD_VALUE), !openFields.isEmpty() && openFields.peek() instanceof ChoiceField ? (ChoiceField) openFields.peek() : null, // old currentChoice becomes the parent (if it is null that's ok) readCaption(attributes, TAG_CHOICE, false)); newField(choice, attributes); // noColumn: choice.setNoColumn(attributes.getBoolean(ATTRIBUTE_FIELD_NO_COLUMN, Field.DEFAULT_NO_COLUMN)); // Other attributes: choice.setImageRelativePath(attributes.getString(ATTRIBUTE_FIELD_IMG, null, false, false)); choice.setAnswerDesc(attributes.getString(ATTRIBUTE_FIELD_ANSWER_DESC, null, false, false)); choice.setQuestionDesc(attributes.getString(ATTRIBUTE_FIELD_QUESTION_DESC, null, false, false)); choice.setAltText(attributes.getString(ATTRIBUTE_FIELD_ALT, null, false, false)); choice.setCols(attributes.getInteger(ATTRIBUTE_CHOICE_COLS, ChoiceField.DEFAULT_NUM_COLS)); choice.setRows(attributes.getInteger(ATTRIBUTE_CHOICE_ROWS, ChoiceField.DEFAULT_NUM_ROWS)); choice.setCrossed(attributes.getBoolean("crossed", ChoiceField.DEFAULT_CROSSED)); choice.setCrossColor(attributes.getString("crossColor", ChoiceField.DEFAULT_CROSS_COLOR, true, false)); } // <Location> else if(qName.equals(TAG_LOCATION)) { newLocation(new LocationField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_LOCATION, false)), attributes); } // <Photo> else if(qName.equals(TAG_PHOTO)) { PhotoField photoField = new PhotoField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_PHOTO, false)); newMediaField(photoField, attributes); photoField.setUseNativeApp(attributes.getBoolean("useNativeApp", PhotoField.DEFAULT_USE_NATIVE_APP)); // Camera options (only used when useNativeApp=false): photoField.setUseFrontFacingCamera(attributes.getBoolean("useFrontCamera", PhotoField.DEFAULT_USE_FRONT_FACING_CAMERA)); String flashText = attributes.getValue("flash"); PhotoField.FlashMode flash = PhotoField.DEFAULT_FLASH_MODE; if(flashText != null && !flashText.isEmpty()) { flashText = flashText.trim(); if(flashText.equalsIgnoreCase("on") || flashText.equalsIgnoreCase("always") || flashText.equalsIgnoreCase("true")) flash = PhotoField.FlashMode.ON; else if(flashText.equalsIgnoreCase("auto")) flash = PhotoField.FlashMode.AUTO; else if(flashText.equalsIgnoreCase("off") || flashText.equalsIgnoreCase("never") || flashText.equalsIgnoreCase("false")) flash = PhotoField.FlashMode.OFF; } photoField.setFlashMode(flash); // Custom buttons (only used when useNativeApp=false): photoField.setCaptureButtonImageRelativePath(attributes.getString("captureImg", null, false, false)); photoField.setApproveButtonImageRelativePath(attributes.getString("approveImg", null, false, false)); photoField.setDiscardButtonImageRelativePath(attributes.getString("discardImg", null, false, false)); } // <Audio> else if(qName.equals(TAG_AUDIO)) { AudioField audioField = new AudioField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_AUDIO, false)); newMediaField(audioField, attributes); audioField.setStartRecImageRelativePath(attributes.getString("startRecImg", null, false, false)); audioField.setStopRecImageRelativePath(attributes.getString("stopRecImg", null, false, false)); } // <Orientation> else if(qName.equals(TAG_ORIENTATION)) { OrientationField orField = new OrientationField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_ORIENTATION, false)); newField(orField, attributes); orField.setStoreAzimuth(attributes.getBoolean("storeAzimuth", OrientationField.DEFAULT_STORE_AZIMUTH)); orField.setStoreAzimuth(attributes.getBoolean("storePitch", OrientationField.DEFAULT_STORE_PITCH)); orField.setStoreAzimuth(attributes.getBoolean("storeRoll", OrientationField.DEFAULT_STORE_ROLL)); } // <BelongsTo> else if(qName.equals(TAG_BELONGS_TO)) { newRelationship(new BelongsToField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID)), attributes); } // <LinksTo> else if(qName.equals(TAG_LINKS_TO)) { newRelationship(new LinksToField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID)), attributes); } // <Button> else if(qName.equals(TAG_BUTTON)) { ButtonField btn = new ButtonField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_BUTTON, true)); newField(btn, attributes); try { btn.setColumnType(attributes.getString(ATTRIBUTE_BUTTON_COLUMN, ButtonField.DEFAULT_COLUMN_TYPE.name(), true, false)); } catch(IllegalArgumentException iae) { throw new SAXException("Invalid '" + ATTRIBUTE_BUTTON_COLUMN + "' attribute value on <" + TAG_BUTTON + ">.", iae); } if(btn.getColumnType() == ButtonColumnType.DATETIME && !btn.isOptional()) addWarning("Button \"" + btn.getID() + "\" has a DateTime column but is not optional, this means the button will *have* to be pressed."); } // <Label> else if(qName.equals(TAG_LABEL)) { LabelField lbl = new LabelField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_LABEL, true)); newField(lbl, attributes); lbl.setTextSizeScale(attributes.getFloat(ATTRIBUTE_LABEL_SCALE, LabelField.DEFAULT_TEXT_SIZE_SCALE)); lbl.setCentered(attributes.getBoolean(ATTRIBUTE_LABEL_CENTERED, LabelField.DEFAULT_TEXT_CENTERED)); } // <Text> else if(qName.equals(TAG_TEXTFIELD)) { TextBoxField txtField = new TextBoxField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_TEXTFIELD, true)); newField(txtField, attributes); // first set general things like optionality (needed for getDefaultMinLength() below). // Deal with minimum & maximum length: if(!txtField.isOptional() && !attributes.contains(ATTRIBUTE_TEXT_MINLENGTH)) addWarning("Text field \"" + txtField.getID() + "\" is non-optional but no minimal length is defined, therefore the minimum will be set to " + TextBoxField.DEFAULT_MIN_LENGTH_NON_OPTIONAL + " character(s). It is recommended to use the '" + ATTRIBUTE_TEXT_MINLENGTH + "' attribute to set an appropriate minimum length explicitly."); txtField.setMinMaxLength( attributes.getInteger(ATTRIBUTE_TEXT_MINLENGTH, TextBoxField.GetDefaultMinLength(txtField.isOptional())), attributes.getInteger(ATTRIBUTE_TEXT_MAXLENGTH, TextBoxField.DEFAULT_MAX_LENGTH)); // Multi-line: txtField.setMultiline(attributes.getBoolean(ATTRIBUTE_TEXT_MULTILINE, TextBoxField.DEFAULT_MULTILINE)); // Initial value (must happen after min/maxLength are set): txtField.setInitialValue(attributes.getString(TextBoxField.GetDefaultInitialValue(txtField.isOptional()), false, true, ATTRIBUTE_FIELD_DEFAULTVALUE, ATTRIBUTE_FIELD_INITVALUE)); // Content types: txtField.setContent(attributes.getString(ATTRIBUTE_TEXT_CONTENT, TextBoxField.DEFAULT_CONTENT.name(), true, false)); // Regular expression pattern (to check input against): txtField.setRegexPattern(attributes.getString(ATTRIBUTE_TEXT_REGEX, null, false, false)); // Auto capitalisation: txtField.setCapitalisation(attributes.getString(ATTRIBUTE_TEXT_CAPITALISATION, TextBoxField.DEFAULT_CAPITALISATION.name(), true, false)); } // <Check> else if(qName.equals(TAG_CHECKBOX)) { CheckBoxField chbxField = new CheckBoxField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_CHECKBOX, true)); chbxField.setInitialValue(attributes.getBoolean(ATTRIBUTE_FIELD_DEFAULTVALUE, CheckBoxField.DEFAULT_INITIAL_VALUE)); newField(chbxField, attributes); } // <List> or <MultiList> (these are in fact just synonyms, but we added both to avoid confusing novice form designers with terminoly that refers to a multi-level list when they only need a flat list) else if(qName.equals(TAG_LIST) || qName.equals(TAG_MULTILIST)) { MultiListField ml = new MultiListField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, qName.equals(TAG_LIST) ? TAG_LIST : TAG_MULTILIST, true, true)); ml.setPreSelect(attributes.getBoolean(ATTRIBUTE_LIST_PRESELECT, MultiListField.DEFAULT_PRESELECT)); newField(ml, attributes); currentListItem = ml.getItemsRoot(); } // <Page> (Field composite) else if(qName.equals(TAG_PAGE)) { newPage(attributes); } // <Trigger> else if(qName.equals(TAG_TRIGGER)) { newTrigger(new Trigger(), attributes); } // Add future field types here // ... // Tags appearing within Field tags else if(!openFields.isEmpty()) { Field currentField = openFields.peek(); // <Argument> if(qName.equals(TAG_ARGUMENT)) { parseArgument(currentField, attributes); } // <Item> (contained within <List> or <MultiList>, and maybe other things later) else if(qName.equals(TAG_LISTITEM)) { if(currentListItem != null) { currentListItem = new MultiListItem(currentListItem, attributes.getRequiredString(TAG_LISTITEM, ATTRIBUTE_FIELD_VALUE, false, true)); if(attributes.getBoolean(ATTRIBUTE_LISTITEM_DEFAULT, false)) { if(currentListItem.getParent().getDefaultChild() == null) currentListItem.getParent().setDefaultChild(currentListItem); else addWarning("More than 1 item marked as default within one of the (sub)lists of MultiListField " + currentListItem.getField().getID() + ", using 1st item marked as default as the default for the list."); } } else addWarning("Ignored <" + TAG_LISTITEM + "> element occuring outside <" + TAG_LIST + "> or <" + TAG_MULTILIST + ">."); } // <Constraint> (contained within <BelongsTo> or <LinksTo>, and maybe other things later) else if(qName.equals(TAG_CONSTRAINT)) { if(currentField instanceof Relationship) { Relationship currentRelationship = (Relationship) currentField; String columnName = attributes.getRequiredString(getRelationshipTag(currentRelationship), ATTRIBUTE_CONSTRAINT_COLUMN, true, false); // Comparison attribute name: String comparisonAttrib = null; for(String compStr : RuleConstraint.COMPARISON_STRINGS) if(attributes.contains(compStr)) { comparisonAttrib = compStr; break; } if(comparisonAttrib == null) addWarning("<" + TAG_CONSTRAINT + "> does not contain an comparison attribute (i.e. 1 of: " + StringUtils.join(RuleConstraint.COMPARISON_STRINGS, ", ") + ")."); else owner.addRelationshipConstraint(currentRelationship, columnName, comparisonAttrib, attributes.getRequiredString(getRelationshipTag(currentRelationship), comparisonAttrib, true, true)); } // <Constraint> in something else than <BelongsTo> or <LinksTo> else addWarning("Ignored <" + TAG_CONSTRAINT + "> element occuring outside <" + TAG_BELONGS_TO + "> or <" + TAG_LINKS_TO + ">."); } // <?> within field else { addWarning("Ignored unrecognised or invalidly placed element <" + qName + "> occuring within field with id \"" + currentField.getID() + "\"."); } } // Tags appearing within <Trigger> else if(openTrigger != null) { // <Argument> if(qName.equals(TAG_ARGUMENT)) { parseArgument(openTrigger, attributes); } // <?> within trigger else { addWarning("Ignored unrecognised or invalidly placed element <" + qName + "> occuring within <" + TAG_TRIGGER + ">."); } } // <?> within <Form> else { addWarning("Ignored unrecognised or invalidly placed element <" + qName + "> occuring within <" + TAG_FORM + ">."); } } // <?> outside of <Form> (shouldn't happen) else { throw new IllegalArgumentException("FormParser only deals with elements that are equal to, or contained within <" + TAG_FORM + ">."); } } /** * @param attributes may be null for implicit pages (i.e. the one for a singlePage form) * @throws SAXException */ private void newPage(XMLAttributes attributes) throws SAXException { if(!openFields.isEmpty()) throw new SAXException("<Page> elements must be apprear directly within <Form> and cannot be nested."); Page newPage = new Page(currentForm, attributes == null ? currentForm.getID() + "_page" : attributes.getString(currentForm.getID() + "_page_" + currentForm.getFields().size(), true, false, ATTRIBUTE_FIELD_ID)); newField(newPage, attributes); } private void newLocation(LocationField locField, XMLAttributes attributes) throws Exception { newField(locField, attributes); // Location type: String type = attributes.getValue("type"); if("Any".equalsIgnoreCase(type)) locField.setType(LocationField.TYPE_ANY); else if("GPS".equalsIgnoreCase(type)) locField.setType(LocationField.TYPE_GPS); else if("Network".equalsIgnoreCase(type)) locField.setType(LocationField.TYPE_NETWORK); else if(type != null) // unrecognised location type addWarning("Unknown Location type (" + type + ")."); // When to start listening for a location: String startWith = attributes.getString(ATTRIBUTE_LOCATION_START_WITH, null, true, false); if("field".equalsIgnoreCase(startWith)) locField.setStartWith(LocationField.START_WITH.FIELD); else if("page".equalsIgnoreCase(startWith)) { if(getCurrentPage() != null) locField.setStartWith(LocationField.START_WITH.PAGE); else { // told to start on page, but there is no page! Start with field instead (assume the user was trying to avoid "form") addWarning("Location field specified to start with page, but no containing page was found. Location detection will start with the field instead."); locField.setStartWith(LocationField.START_WITH.FIELD); } } else if("form".equalsIgnoreCase(startWith) || attributes.getBoolean(ATTRIBUTE_LOCATION_START_WITH_FORM, false)) locField.setStartWith(LocationField.START_WITH.FORM); else if (startWith != null) // unknown setting, default will be used addWarning("Unknown location field start preference (" + startWith +")."); locField.setWaitAtField(attributes.getBoolean("waitAtField", LocationField.DEFAULT_WAIT_AT_FIELD)); locField.setTimeoutS(attributes.getInteger("timeout", LocationField.DEFAULT_TIMEOUT_S)); locField.setMaxAgeS(attributes.getInteger("maxAge", LocationField.DEFAULT_MAX_AGE_S)); locField.setMaxAccuracyRadius(attributes.getFloat("maxAccuracyRadius", LocationField.DEFAULT_MAX_ACCURACY_RADIUS)); locField.setUseBestNonQualifyingLocationAfterTimeout(attributes.getBoolean("useBestKnownLocationOnTimeout", LocationField.DEFAULT_USE_BEST_NON_QUALIFYING_LOCATION_AFTER_TIMEOUT)); // Storage settings: locField.setDoublePrecision(attributes.getBoolean("doublePrecision", LocationField.DEFAULT_DOUBLE_PRECISION)); locField.setStoreAltitude(attributes.getBoolean("storeAltitude", LocationField.DEFAULT_STORE_ALTITUDE)); locField.setStoreBearing(attributes.getBoolean("storeBearing", LocationField.DEFAULT_STORE_BEARING)); locField.setStoreSpeed(attributes.getBoolean("storeSpeed", LocationField.DEFAULT_STORE_SPEED)); locField.setStoreAccuracy(attributes.getBoolean("storeAccuracy", LocationField.DEFAULT_STORE_ACCURACY)); locField.setStoreProvider(attributes.getBoolean("storeProvider", LocationField.DEFAULT_STORE_PROVIDER)); } private void newRelationship(Relationship relationship, XMLAttributes attributes) throws Exception { newField(relationship, attributes); // Remember form name (to resolved later): owner.addRelationship(relationship, attributes.getRequiredString(getRelationshipTag(relationship), ATTRIBUTE_RELATIONSHIP_FORM, true, false)); // Other attributes: relationship.setHoldForeignRecord(attributes.getBoolean(ATTRIBUTE_RELATIONSHIP_HOLD, Relationship.DEFAULT_HOLD_FOREIGN_RECORD)); // TODO ? updateStartTimeUponLeave, saveBeforeFormChange, discardBeforeLeave (only for linksTo) ? } private void newMediaField(MediaField ma, XMLAttributes attributes) throws SAXException { newField(ma, attributes); ma.setMax(attributes.getInteger(ATTRIBUTE_MEDIA_MAX , MediaField.DEFAULT_MAX)); if(attributes.getValue(ATTRIBUTE_DISABLE_FIELD) != null) mediaAttachToDisableId.put(ma, attributes.getValue(ATTRIBUTE_DISABLE_FIELD).trim().toUpperCase()); // upper cased, for case insensitivity } /** * Adds field to current currentForm or currentPage, sets optionalness, remembers id & jump & reads various Field attributes * * @param field the Field object * @param attributes may be null for implicit fields (fields that are inserted by the parser but do not explicitly appear in the XML, e.g. the Page for a singlePage form) * @throws SAXException */ private void newField(Field field, XMLAttributes attributes) throws SAXException { try { // Warn about IDs starting with '_': //TODO test if no invalid XML chars if(field.getID().startsWith("_")) { // For really stupid cases ;-): for(EndField ef : EndField.GetEndFields(currentForm)) if(ef.getID().equals(field.getID())) throw new SAXException(field.getID() + " is a reserved ID, don't use it for user-defined fields."); addWarning("Please avoid field IDs starting with '_' (" + field.getID() + ")."); } // Get current page if there is one: Page currentPage = getCurrentPage(); // If the field is a root field (note: even elements on a page are root fields)... if(field.isRoot()) { // Add it to the form or page: if(currentPage == null) { // field is top-level (directly contained within the form, and not in a page first)... currentForm.addField(field); // ... and therefore it can be jumped to, so remember its ID (upper cased, for case insensitivity): if(idToField.put(field.getID().toUpperCase(), field) != null) throw new SAXException("Duplicate field ID '" + field.getID() + "' in Form '" + currentForm.getID() + "'! (Note: field and form IDs are case insensitive)"); } else // the field is contained by a page: currentPage.addField(field); if(attributes != null) { // Set optionalness: String optText = attributes.getValue(ATTRIBUTE_FIELD_OPTIONAL); boolean opt = currentPage == null ? Field.DEFAULT_OPTIONAL : currentPage.isOptional(); // use default optionalness or that of the containing page if(optText != null && !optText.trim().isEmpty()) { optText = optText.trim(); if("always".equalsIgnoreCase(optText) || Boolean.TRUE.toString().equalsIgnoreCase(optText)) opt = true; else if("notIfReached".equalsIgnoreCase(optText)) // deprecated, but still parsed on all format versions (for backwards compatibility) opt = false; else if("never".equalsIgnoreCase(optText) || Boolean.FALSE.toString().equalsIgnoreCase(optText)) opt = false; } field.setOptional(opt); // Show on create/edit: field.setShowOnCreate(attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_ON_CREATE, Field.DEFAULT_SHOW_ON_CREATE)); field.setShowOnEdit(attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_ON_EDIT, Field.DEFAULT_SHOW_ON_EDIT)); // Set editable (inherit from page if on page): field.setEditable(attributes.getBoolean(ATTRIBUTE_FIELD_EDITABLE, currentPage == null ? Field.DEFAULT_EDITABLE : currentPage.isEditable())); } } // Read various optional Field attributes: if(attributes != null) { // Remember jumps (always "intra-Form", and not leaving a page unless this type of field is allowed to do that): if(attributes.getValue(ATTRIBUTE_FIELD_JUMP) != null) { if(currentPage == null || field.canJumpFromPage()) jumpSourceToJumpTargetId.put(field, attributes.getValue(ATTRIBUTE_FIELD_JUMP).trim().toUpperCase()); // trimmed (because id's on fields are too) & upper cased (for case insensitivity) else if(currentPage != null) addWarning("Field \"" + field.getID() + "\" tries to jump away from the page, but is not allowed."); } // Skip on back: field.setSkipOnBack(attributes.getBoolean(ATTRIBUTE_SKIP_ON_BACK, Field.DEFAULT_SKIP_ON_BACK)); // Background colour: field.setBackgroundColor(attributes.getString(ATTRIBUTE_FIELD_BACKGROUND_COLOR, Field.DEFAULT_BACKGROUND_COLOR, true, false)); // Which buttons are allowed to show... // Mode-specific: field.setShowControlOnMode(Control.BACK, Mode.CREATE, attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_BACK_ON_CREATE, Field.DEFAULT_SHOW_BACK)); field.setShowControlOnMode(Control.BACK, Mode.EDIT, attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_BACK_ON_EDIT, Field.DEFAULT_SHOW_BACK)); field.setShowControlOnMode(Control.CANCEL, Mode.CREATE, attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_CANCEL_ON_CREATE, Field.DEFAULT_SHOW_CANCEL)); field.setShowControlOnMode(Control.CANCEL, Mode.EDIT, attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_CANCEL_ON_EDIT, Field.DEFAULT_SHOW_CANCEL)); field.setShowControlOnMode(Control.FORWARD, Mode.CREATE, attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_FORWARD_ON_CREATE, Field.DEFAULT_SHOW_FORWARD)); field.setShowControlOnMode(Control.FORWARD, Mode.EDIT, attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_FORWARD_ON_EDIT, Field.DEFAULT_SHOW_FORWARD)); // Across all modes (overrules mode-specific settings) + with backwards compatibility for v1.0 forms which may have shopBack/showCancel/showForward at the form level: if(attributes.contains(ATTRIBUTE_FIELD_SHOW_BACK) || v1xFormShowBack != null) field.setShowBack((v1xFormShowBack != null ? v1xFormShowBack : true) && attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_BACK, Field.DEFAULT_SHOW_BACK)); if(attributes.contains(ATTRIBUTE_FIELD_SHOW_CANCEL) || v1xFormShowCancel != null) field.setShowCancel((v1xFormShowCancel != null ? v1xFormShowCancel : true) && attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_CANCEL, Field.DEFAULT_SHOW_CANCEL)); if(attributes.contains(ATTRIBUTE_FIELD_SHOW_FORWARD) || v1xFormShowForward != null) field.setShowForward((v1xFormShowForward != null ? v1xFormShowForward : true) && attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_FORWARD, Field.DEFAULT_SHOW_FORWARD)); } // Remember current field: openFields.push(field); //!!! } catch(Exception e) { throw new SAXException("Error on parsing field '" + field.getID() + "'", e); } } private void newTrigger(Trigger trigger, XMLAttributes attributes) { // Parse the attributes String keys = attributes.getString(null, true, false, ATTRIBUTE_TRIGGER_KEY, ATTRIBUTE_TRIGGER_KEYS); if(keys != null) for(String k : keys.split(Trigger.KEY_SEPARATOR)) { try { trigger.addKey(Trigger.Key.valueOf(k.toUpperCase())); } catch(Exception e) { addWarning("Unrecognised Trigger key: " + k); } } trigger.setFixedTimer(attributes.getInteger(ATTRIBUTE_TRIGGER_FIXED_TIMER, Trigger.NO_TIMEOUT)); if(attributes.contains(ATTRIBUTE_TRIGGER_JUMP)) // Remember jump (always "intra-Form") jumpSourceToJumpTargetId.put(trigger, attributes.getValue(ATTRIBUTE_TRIGGER_JUMP).trim().toUpperCase()); // upper cased, for insensitivity // Add the trigger to the current Page Page currentPage = getCurrentPage(); if(currentPage != null) currentPage.addTrigger(trigger); // else add the triggers to the Form else currentForm.addTrigger(trigger); // Remember trigger (so arguments can be added): openTrigger = trigger; } private void parseArgument(JumpSource source, XMLAttributes tagAttributes) throws Exception { if(!source.hasNextFieldArguements()) source.setNextFieldArguments(new FieldParameters()); source.getNextFieldArguments().put( tagAttributes.getRequiredString(TAG_ARGUMENT, ATTRIBUTE_ARGUMENT_PARAM, true, false), tagAttributes.getRequiredString(TAG_ARGUMENT, ATTRIBUTE_ARGUMENT_VALUE, false, true)); // TODO Let Field instance validate param & value? } private Page getCurrentPage() { // Iterate through the stack from the top down and look for a Page: for(int f = openFields.size() - 1; f >= 0; f--) if(openFields.get(f) instanceof Page) return (Page) openFields.get(f); // No current page: return null; } protected void closePage(Page page) { /* The 'optional' attribute of a page is only used to inherit from by contained fields (see newField()), * at runtime it doesn't have meaning in itself because the page does not have a column of its own and * whether or not the page can be skipped or left is to be decided based on the optionalness and acquired * values of the contained fields. * Because of this the optionalness of the page is reset to ALWAYS after all contained fields are parsed. */ page.setOptional(true); } @Override protected void parseEndElement(String uri, String localName, String qName) throws SAXException { // Close field: </Choice>, </Location>, </Photo>, </Audio>, </Orientation>, </BelongsTo>, </LinksTo>, </Button>, </Label>, </Textbox>, </Checkbox>, </List>, </MultiList>, </Page> if( !openFields.isEmpty() && ( qName.equals(TAG_CHOICE) || qName.equals(TAG_LOCATION) || qName.equals(TAG_PHOTO) || qName.equals(TAG_AUDIO) || qName.equals(TAG_ORIENTATION) || qName.equals(TAG_BELONGS_TO) || qName.equals(TAG_LINKS_TO) || qName.equals(TAG_BUTTON) || qName.equals(TAG_LABEL) || qName.equals(TAG_TEXTFIELD) || qName.equals(TAG_CHECKBOX) || qName.equals(TAG_LIST) || qName.equals(TAG_MULTILIST) || qName.equals(TAG_PAGE))) { Field currentField = openFields.pop(); // pop the field // </Choice> if(qName.equals(TAG_CHOICE) && currentField instanceof ChoiceField) { ChoiceField currentChoice = (ChoiceField) currentField; if(currentChoice.isRoot() && currentChoice.isLeaf()) throw new SAXException("Root choices need at least 1 child (but 2 or more children probably makes more sense)."); } // </Page> else if(qName.equals(TAG_PAGE) && currentField instanceof Page) closePage((Page) currentField); } // </Item>, </List> or </MultiList> else if(qName.equals(TAG_LISTITEM) || qName.equals(TAG_LIST) || qName.equals(TAG_MULTILIST)) { if(currentListItem.isRoot() && currentListItem.isLeaf()) throw new SAXException("A list needs at least 1 <Item> (but 2 or more probably makes more sense)."); if(!currentListItem.isLeaf() && currentListItem.getDefaultChild() == null) currentListItem.setDefaultChild(currentListItem.getChildren().get(0)); // first child become default currentListItem = currentListItem.getParent(); // parent (possibly null in case of root) becomes currentListItem } // </Trigger> else if(qName.equals(TAG_TRIGGER)) { openTrigger = null; } // </Form> else if(qName.equals(TAG_FORM)) { // close page in case of a singePage form: Page currentPage = getCurrentPage(); if(currentPage != null) { closePage(currentPage); openFields.pop(); } // Resolve/set currentForm start field: Field startField = currentForm.getFields().get(0); // first field is the default start field if(formStartFieldId != null) // try with field specified by ID in <Form startField="..."> (may be null) { Field specifiedStartField = currentForm.getField(formStartFieldId); // uses equalsIgnoreCase() if(specifiedStartField == null) //TODO throw exception instead addWarning("The specified start field (\"" + formStartFieldId + "\") of currentForm \"" + currentForm.getName() + "\" does not exist, using first field instead."); else startField = specifiedStartField; } currentForm.setStartField(startField); // Add EndField instances to idToField map (these don't need to be added as actual fields to the form itself) for(EndField endF : EndField.GetEndFields(currentForm)) idToField.put(endF.getID().toUpperCase(), endF); // upper cased, for case insensitivity (they should already be upper case, but just in case...) // Resolve jumps... for(Entry<JumpSource, String> jump : jumpSourceToJumpTargetId.entrySet()) { Field target = idToField.get(jump.getValue()); if(target == null) addWarning("Cannot resolve jump ID '" + jump.getValue() + "' (case insensitive)."); else jump.getKey().setJump(target); // set jump pointer (to a field object) } // Resolve disabling of Choices by MediaAttachments... for(Entry<MediaField, String> disable : mediaAttachToDisableId.entrySet()) { Field target = idToField.get(disable.getValue()); if(target == null) addWarning("Cannot resolve disable field ID '" + disable.getValue() + "' (case insensitive)."); else disable.getKey().setDisableChoice((ChoiceField) target); } // Deactivate this subtree parser: deactivate(); //will call reset() (+ warnings will be copied to owner) } } private String readCaption(XMLAttributes tagAttributes, String tag, boolean required) throws Exception { return readCaption(tagAttributes, tag, required, false); // singular by default } private String readCaption(XMLAttributes tagAttributes, String tag, boolean required, boolean plural) throws Exception { if(required) return tagAttributes.getRequiredString(tag, false, true, plural ? ATTRIBUTE_FIELD_CAPTION_PLURAL : ATTRIBUTE_FIELD_CAPTION_SINGULAR); else return tagAttributes.getString(null, false, true, plural ? ATTRIBUTE_FIELD_CAPTION_PLURAL : ATTRIBUTE_FIELD_CAPTION_SINGULAR); } private String getRelationshipTag(Relationship relationship) { if(relationship instanceof BelongsToField) return TAG_BELONGS_TO; if(relationship instanceof LinksToField) return TAG_LINKS_TO; throw new IllegalArgumentException("Unsupported relationship type"); } @Override protected boolean isSingleUse() { return false; } }
Library/src/uk/ac/ucl/excites/sapelli/collector/load/parse/FormParser.java
/** * Sapelli data collection platform: http://sapelli.org * * Copyright 2012-2014 University College London - ExCiteS group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.ac.ucl.excites.sapelli.collector.load.parse; import java.util.HashMap; import java.util.Hashtable; import java.util.Map.Entry; import java.util.Stack; import org.xml.sax.SAXException; import uk.ac.ucl.excites.sapelli.collector.control.Controller.Mode; import uk.ac.ucl.excites.sapelli.collector.model.Field; import uk.ac.ucl.excites.sapelli.collector.model.FieldParameters; import uk.ac.ucl.excites.sapelli.collector.model.Form; import uk.ac.ucl.excites.sapelli.collector.model.Form.AudioFeedback; import uk.ac.ucl.excites.sapelli.collector.model.JumpSource; import uk.ac.ucl.excites.sapelli.collector.model.Project; import uk.ac.ucl.excites.sapelli.collector.model.Trigger; import uk.ac.ucl.excites.sapelli.collector.model.fields.AudioField; import uk.ac.ucl.excites.sapelli.collector.model.fields.BelongsToField; import uk.ac.ucl.excites.sapelli.collector.model.fields.ButtonField; import uk.ac.ucl.excites.sapelli.collector.model.fields.ButtonField.ButtonColumnType; import uk.ac.ucl.excites.sapelli.collector.model.fields.CheckBoxField; import uk.ac.ucl.excites.sapelli.collector.model.fields.ChoiceField; import uk.ac.ucl.excites.sapelli.collector.model.fields.EndField; import uk.ac.ucl.excites.sapelli.collector.model.fields.LabelField; import uk.ac.ucl.excites.sapelli.collector.model.fields.LinksToField; import uk.ac.ucl.excites.sapelli.collector.model.fields.LocationField; import uk.ac.ucl.excites.sapelli.collector.model.fields.MediaField; import uk.ac.ucl.excites.sapelli.collector.model.fields.MultiListField; import uk.ac.ucl.excites.sapelli.collector.model.fields.MultiListField.MultiListItem; import uk.ac.ucl.excites.sapelli.collector.model.fields.OrientationField; import uk.ac.ucl.excites.sapelli.collector.model.fields.Page; import uk.ac.ucl.excites.sapelli.collector.model.fields.PhotoField; import uk.ac.ucl.excites.sapelli.collector.model.fields.Relationship; import uk.ac.ucl.excites.sapelli.collector.model.fields.TextBoxField; import uk.ac.ucl.excites.sapelli.collector.ui.ControlsUI.Control; import uk.ac.ucl.excites.sapelli.shared.util.StringUtils; import uk.ac.ucl.excites.sapelli.shared.util.xml.SubtreeParser; import uk.ac.ucl.excites.sapelli.shared.util.xml.XMLAttributes; import uk.ac.ucl.excites.sapelli.storage.model.Schema; import uk.ac.ucl.excites.sapelli.storage.queries.constraints.RuleConstraint; /** * A {@link SubtreeParser} for <Form>s * * @author mstevens */ public class FormParser extends SubtreeParser<ProjectParser> { // STATICS-------------------------------------------------------- //TAGS static private final String TAG_FORM = "Form"; static private final String TAG_CHOICE = "Choice"; static private final String TAG_AUDIO = "Audio"; static private final String TAG_PHOTO = "Photo"; static private final String TAG_LOCATION = "Location"; static private final String TAG_ORIENTATION = "Orientation"; static public final String TAG_BELONGS_TO = "BelongsTo"; static public final String TAG_LINKS_TO = "LinksTo"; static private final String TAG_CONSTRAINT = "Constraint"; static private final String TAG_BUTTON = "Button"; static private final String TAG_LABEL = "Label"; static private final String TAG_TEXTFIELD = "Text"; static private final String TAG_CHECKBOX = "Check"; static private final String TAG_LIST = "List"; static private final String TAG_MULTILIST = "MultiList"; static private final String TAG_LISTITEM = "Item"; static private final String TAG_PAGE = "Page"; static private final String TAG_TRIGGER = "Trigger"; static private final String TAG_ARGUMENT = "Argument"; //ATTRIBUTES static private final String ATTRIBUTE_FORM_NAME = "name"; static private final String ATTRIBUTE_FORM_ID = "id"; static private final String ATTRIBUTE_FORM_SCHEMA_ID = Schema.V1X_ATTRIBUTE_SCHEMA_ID; static private final String ATTRIBUTE_FORM_SCHEMA_VERSION = Schema.V1X_ATTRIBUTE_SCHEMA_VERSION; static private final String ATTRIBUTE_FORM_STORE_END_TIME = "storeEndTime"; static private final String ATTRIBUTE_FORM_START_FIELD = "startField"; static private final String ATTRIBUTE_FORM_END = "end"; // 1.x compatibility static private final String ATTRIBUTE_FORM_NEXT = "next"; static private final String ATTRIBUTE_FORM_END_SOUND = "endSound"; // 1.x compatibility static private final String ATTRIBUTE_FORM_SAVE_SOUND = "saveSound"; static private final String ATTRIBUTE_FORM_END_VIBRATE = "endVibrate"; // 1.x compatibility static private final String ATTRIBUTE_FORM_SAVE_VIBRATE = "saveVibrate"; static private final String ATTRIBUTE_FORM_FORWARD_BUTTON_IMG = "forwardButtonImg"; static private final String ATTRIBUTE_FORM_CANCEL_BUTTON_IMG = "cancelButtonImg"; static private final String ATTRIBUTE_FORM_BACK_BUTTON_IMG = "backButtonImg"; static private final String ATTRIBUTE_FORM_FORWARD_BUTTON_DESC = "forwardButtonDesc"; static private final String ATTRIBUTE_FORM_CANCEL_BUTTON_DESC = "cancelButtonDesc"; static private final String ATTRIBUTE_FORM_BACK_BUTTON_DESC = "backButtonDesc"; static private final String ATTRIBUTE_FORM_BUTTON_BACKGROUND_COLOR = "buttonBackgroundColor"; static private final String ATTRIBUTE_FORM_SHORTCUT_IMAGE = "shortcutImage"; static private final String ATTRIBUTE_FORM_CLICK_ANIMATION = "clickAnimation"; static private final String ATTRIBUTE_FORM_ANIMATION = "animation"; // 1.x compatibility, the same as clickAnimation static private final String ATTRIBUTE_FORM_SCREEN_TRANSITION = "screenTransition"; static private final String ATTRIBUTE_FORM_AUDIO_FEEDBACK = "audioFeedback"; static private final String ATTRIBUTE_FORM_OBFUSCATE_MEDIA_FILES = "obfuscateMediaFiles"; static private final String ATTRIBUTE_FORM_SINGLE_PAGE = "singlePage"; static private final String ATTRIBUTE_SKIP_ON_BACK = "skipOnBack"; // used on both FORM and FIELD static private final String ATTRIBUTE_FIELD_ID = "id"; static private final String ATTRIBUTE_FIELD_JUMP = "jump"; static private final String ATTRIBUTE_FIELD_OPTIONAL = "optional"; static private final String ATTRIBUTE_FIELD_NO_COLUMN = "noColumn"; static private final String ATTRIBUTE_FIELD_EDITABLE = "editable"; static private final String ATTRIBUTE_FIELD_ALT = "alt"; static private final String ATTRIBUTE_FIELD_IMG = "img"; static private final String ATTRIBUTE_FIELD_ANSWER_DESC = "answerDesc"; static private final String ATTRIBUTE_FIELD_QUESTION_DESC = "questionDesc"; static private final String ATTRIBUTE_FIELD_CAPTION = "caption"; static private final String ATTRIBUTE_FIELD_CAPTIONS = "captions"; static private final String ATTRIBUTE_FIELD_LABEL = "label"; // synonym for caption static private final String ATTRIBUTE_FIELD_LABELS = "labels"; // synonym for captions static private final String[] ATTRIBUTE_FIELD_CAPTION_SINGULAR = { ATTRIBUTE_FIELD_CAPTION, ATTRIBUTE_FIELD_LABEL }; static private final String[] ATTRIBUTE_FIELD_CAPTION_PLURAL = { ATTRIBUTE_FIELD_CAPTION, ATTRIBUTE_FIELD_CAPTIONS, ATTRIBUTE_FIELD_LABEL, ATTRIBUTE_FIELD_LABELS }; static private final String ATTRIBUTE_FIELD_BACKGROUND_COLOR = "backgroundColor"; static private final String ATTRIBUTE_FIELD_SHOW_ON_CREATE = "showOnCreate"; static private final String ATTRIBUTE_FIELD_SHOW_ON_EDIT = "showOnEdit"; static private final String ATTRIBUTE_FIELD_SHOW_FORWARD = "showForward"; static private final String ATTRIBUTE_FIELD_SHOW_BACK_ON_CREATE = "showBackOnCreate"; static private final String ATTRIBUTE_FIELD_SHOW_BACK_ON_EDIT = "showBackOnEdit"; static private final String ATTRIBUTE_FIELD_SHOW_CANCEL = "showCancel"; static private final String ATTRIBUTE_FIELD_SHOW_CANCEL_ON_CREATE = "showCancelOnCreate"; static private final String ATTRIBUTE_FIELD_SHOW_CANCEL_ON_EDIT = "showCancelOnEdit"; static private final String ATTRIBUTE_FIELD_SHOW_FORWARD_ON_CREATE = "showForwardOnCreate"; static private final String ATTRIBUTE_FIELD_SHOW_FORWARD_ON_EDIT = "showForwardOnEdit"; static private final String ATTRIBUTE_FIELD_SHOW_BACK = "showBack"; static private final String ATTRIBUTE_FIELD_VALUE = "value"; static private final String ATTRIBUTE_FIELD_DEFAULTVALUE = "defaultValue"; static private final String ATTRIBUTE_FIELD_INITVALUE = "initialValue"; static private final String ATTRIBUTE_DISABLE_FIELD = "disableField"; static private final String ATTRIBUTE_CHOICE_ROWS = "rows"; static private final String ATTRIBUTE_CHOICE_COLS = "cols"; static private final String ATTRIBUTE_LOCATION_START_WITH = "startWith"; static private final String ATTRIBUTE_LOCATION_START_WITH_FORM = "startWithForm"; // deprecated in favour of enum above static private final String ATTRIBUTE_RELATIONSHIP_FORM = "form"; static private final String ATTRIBUTE_RELATIONSHIP_HOLD = "hold"; static private final String ATTRIBUTE_CONSTRAINT_COLUMN = "column"; static private final String ATTRIBUTE_TEXT_MINLENGTH = "minLength"; static private final String ATTRIBUTE_TEXT_MAXLENGTH = "maxLength"; static private final String ATTRIBUTE_TEXT_MULTILINE = "multiLine"; static private final String ATTRIBUTE_TEXT_CONTENT = "content"; static private final String ATTRIBUTE_TEXT_REGEX = "regex"; static private final String ATTRIBUTE_TEXT_CAPITALISATION = "autoCaps"; static private final String ATTRIBUTE_LABEL_SCALE = "scale"; static private final String ATTRIBUTE_LABEL_CENTERED = "centered"; static private final String ATTRIBUTE_LIST_PRESELECT = "preSelectDefault"; static private final String ATTRIBUTE_LISTITEM_DEFAULT = "default"; static private final String ATTRIBUTE_BUTTON_COLUMN = "column"; static private final String ATTRIBUTE_MEDIA_MAX = "max"; static private final String ATTRIBUTE_TRIGGER_KEY = "key"; static private final String ATTRIBUTE_TRIGGER_KEYS = "keys"; static private final String ATTRIBUTE_TRIGGER_FIXED_TIMER = "fixedTimer"; static private final String ATTRIBUTE_TRIGGER_JUMP = "jump"; static private final String ATTRIBUTE_ARGUMENT_PARAM = "param"; static private final String ATTRIBUTE_ARGUMENT_VALUE = "value"; // DYNAMICS------------------------------------------------------- private Project project; private Form currentForm; private String formStartFieldId; private Boolean v1xFormShowBack = null; private Boolean v1xFormShowCancel = null; private Boolean v1xFormShowForward = null; private Stack<Field> openFields; private Trigger openTrigger; private MultiListItem currentListItem; private HashMap<JumpSource, String> jumpSourceToJumpTargetId; private Hashtable<String, Field> idToField; private HashMap<MediaField, String> mediaAttachToDisableId; public FormParser(ProjectParser projectParser) { super(projectParser, TAG_FORM); this.project = projectParser.getProject(); this.openFields = new Stack<Field>(); this.jumpSourceToJumpTargetId = new HashMap<JumpSource, String>(); this.idToField = new Hashtable<String, Field>(); this.mediaAttachToDisableId = new HashMap<MediaField, String>(); } @Override public void reset() { currentForm = null; openFields.clear(); openTrigger = null; currentListItem = null; formStartFieldId = null; jumpSourceToJumpTargetId.clear(); idToField.clear(); mediaAttachToDisableId.clear(); v1xFormShowBack = null; v1xFormShowCancel = null; v1xFormShowForward = null; } @Override protected void parseStartElement(String uri, String localName, String qName, XMLAttributes attributes) throws Exception { // <Form> if(qName.equals(TAG_FORM)) { if(currentForm != null) throw new SAXException("Forms cannot be nested!"); String id = attributes.getRequiredString(TAG_FORM, true, false, ATTRIBUTE_FORM_ID, ATTRIBUTE_FORM_NAME); // "name" is v1.x syntax but still accepted in v2.0 (yet "id" is preferred) ProjectParser.Format format = owner.getFormat(); if(format == ProjectParser.Format.v1_x) { // Backwards compatibility if(project.getForms().isEmpty()) // only for 1st, and assumed only, currentForm { int schemaID = attributes.getRequiredInteger(TAG_FORM, ATTRIBUTE_FORM_SCHEMA_ID, "because this is a v1.x project"); int schemaVersion = attributes.getInteger(ATTRIBUTE_FORM_SCHEMA_VERSION, Schema.V1X_DEFAULT_SCHEMA_VERSION); project.setV1XSchemaInfo(schemaID, schemaVersion); //schemaID will be used as projectID } else throw new SAXException("Only single-Form v1.x projects are supported"); } currentForm = new Form(project, id); // the form will add itself to the project and take the next available form position // Shortcut image: currentForm.setShortcutImageRelativePath(attributes.getString(ATTRIBUTE_FORM_SHORTCUT_IMAGE, null, false, false)); // Next/end: try { currentForm.setNext(attributes.getString(Form.DEFAULT_NEXT.name(), true, false, ATTRIBUTE_FORM_NEXT, ATTRIBUTE_FORM_END)); } catch(IllegalArgumentException iae) { throw new SAXException("Invalid '" + ATTRIBUTE_FORM_NEXT + "' attribute value on <" + TAG_FORM + ">.", iae); } // Store end time?: currentForm.setStoreEndTime(attributes.getBoolean(ATTRIBUTE_FORM_STORE_END_TIME, Form.END_TIME_DEFAULT)); // Sound end vibration at the end of the currentForm: currentForm.setSaveSoundRelativePath(attributes.getString(null, false, false, ATTRIBUTE_FORM_SAVE_SOUND, ATTRIBUTE_FORM_END_SOUND)); // Get the sound path currentForm.setVibrateOnSave(attributes.getBoolean(Form.DEFAULT_VIBRATE, ATTRIBUTE_FORM_SAVE_VIBRATE, ATTRIBUTE_FORM_END_VIBRATE)); // Which buttons are allowed to show (deprecated in format >= 2): if(attributes.contains(ATTRIBUTE_FIELD_SHOW_BACK) || attributes.contains(ATTRIBUTE_FIELD_SHOW_CANCEL) || attributes.contains(ATTRIBUTE_FIELD_SHOW_FORWARD)) { if(format == ProjectParser.Format.v1_x) { v1xFormShowBack = attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_BACK, Form.V1X_DEFAULT_SHOW_BACK); v1xFormShowCancel = attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_CANCEL, Form.V1X_DEFAULT_SHOW_CANCEL); v1xFormShowForward = attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_FORWARD, Form.V1X_DEFAULT_SHOW_FORWARD); } else addWarning("Attributes '" + ATTRIBUTE_FIELD_SHOW_BACK + "', '" + ATTRIBUTE_FIELD_SHOW_CANCEL + "' & '" + ATTRIBUTE_FIELD_SHOW_FORWARD + "' are deprecated on <Form> in format >= 2."); } // Click Animation: currentForm.setClickAnimation(attributes.getBoolean(Form.DEFAULT_CLICK_ANIMATION, ATTRIBUTE_FORM_CLICK_ANIMATION, ATTRIBUTE_FORM_ANIMATION)); // Screen Transition: try { currentForm.setScreenTransition(attributes.getString(ATTRIBUTE_FORM_SCREEN_TRANSITION, Form.DEFAULT_SCREEN_TRANSITION.name(), true, false)); } catch(IllegalArgumentException iae) { addWarning("Invalid '" + ATTRIBUTE_FORM_SCREEN_TRANSITION + "' attribute value on <" + TAG_FORM + ">. Default Screen Transition is going to be used."); } // Add AudioFeedbakc: try { currentForm.setAudioFeedback(attributes.getString(ATTRIBUTE_FORM_AUDIO_FEEDBACK, Form.DEFAULT_AUDIO_FEEDBACK.name(), true, false)); if(currentForm.getAudioFeedback() != null && currentForm.getAudioFeedback() != AudioFeedback.NONE) addWarning("Older Android devices may require SpeechSynthesis Data Installer to be installed for text-to-speech to work"); } catch(IllegalArgumentException iae) { addWarning("Invalid '" + ATTRIBUTE_FORM_AUDIO_FEEDBACK + "' attribute value on <" + TAG_FORM + ">. Default Audio Feedback is going to be used."); } // Obfuscate Media Files: currentForm.setObfuscateMediaFiles(attributes.getBoolean(ATTRIBUTE_FORM_OBFUSCATE_MEDIA_FILES, Form.DEFAULT_OBFUSCATE_MEDIA_FILES)); // Control button images: currentForm.setBackButtonImageRelativePath(attributes.getString(ATTRIBUTE_FORM_BACK_BUTTON_IMG, null, false, false)); currentForm.setCancelButtonImageRelativePath(attributes.getString(ATTRIBUTE_FORM_CANCEL_BUTTON_IMG, null, false, false)); currentForm.setForwardButtonImageRelativePath(attributes.getString(ATTRIBUTE_FORM_FORWARD_BUTTON_IMG, null, false, false)); // ButtonField background colour: currentForm.setBackButtonDescription(attributes.getString(ATTRIBUTE_FORM_BACK_BUTTON_DESC, Form.DEFAULT_BACK_BUTTON_DESCRIPTION, false, false)); currentForm.setCancelButtonDescription(attributes.getString(ATTRIBUTE_FORM_CANCEL_BUTTON_DESC, Form.DEFAULT_CANCEL_BUTTON_DESCRIPTION, false, false)); currentForm.setForwardButtonDescription(attributes.getString(ATTRIBUTE_FORM_FORWARD_BUTTON_DESC, Form.DEFAULT_FORWARD_BUTTON_DESCRIPTION, false, false)); // ButtonField background colour: currentForm.setButtonBackgroundColor(attributes.getString(ATTRIBUTE_FORM_BUTTON_BACKGROUND_COLOR, Form.DEFAULT_BUTTON_BACKGROUND_COLOR, true, false)); // Single page form (all fields will be added to a single page): if(attributes.getBoolean(Form.DEFAULT_SINGLE_PAGE, ATTRIBUTE_FORM_SINGLE_PAGE)) newPage(null); // Start field: formStartFieldId = attributes.getString(ATTRIBUTE_FORM_START_FIELD, null, true, false); // skipOnBack: currentForm.setSkipOnBack(attributes.getBoolean(ATTRIBUTE_SKIP_ON_BACK, Form.DEFAULT_SKIP_ON_BACK)); //Activate this subtree parser: activate(); //!!! } // Within a form... else if(currentForm != null) { // Children of <Form> (fields & triggers)... // <Choice> if(qName.equals(TAG_CHOICE)) { ChoiceField choice = new ChoiceField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), attributes.getValue(ATTRIBUTE_FIELD_VALUE), !openFields.isEmpty() && openFields.peek() instanceof ChoiceField ? (ChoiceField) openFields.peek() : null, // old currentChoice becomes the parent (if it is null that's ok) readCaption(attributes, TAG_CHOICE, false)); newField(choice, attributes); // noColumn: choice.setNoColumn(attributes.getBoolean(ATTRIBUTE_FIELD_NO_COLUMN, Field.DEFAULT_NO_COLUMN)); // Other attributes: choice.setImageRelativePath(attributes.getString(ATTRIBUTE_FIELD_IMG, null, false, false)); choice.setAnswerDesc(attributes.getString(ATTRIBUTE_FIELD_ANSWER_DESC, null, false, false)); choice.setQuestionDesc(attributes.getString(ATTRIBUTE_FIELD_QUESTION_DESC, null, false, false)); choice.setAltText(attributes.getString(ATTRIBUTE_FIELD_ALT, null, false, false)); choice.setCols(attributes.getInteger(ATTRIBUTE_CHOICE_COLS, ChoiceField.DEFAULT_NUM_COLS)); choice.setRows(attributes.getInteger(ATTRIBUTE_CHOICE_ROWS, ChoiceField.DEFAULT_NUM_ROWS)); choice.setCrossed(attributes.getBoolean("crossed", ChoiceField.DEFAULT_CROSSED)); choice.setCrossColor(attributes.getString("crossColor", ChoiceField.DEFAULT_CROSS_COLOR, true, false)); } // <Location> else if(qName.equals(TAG_LOCATION)) { LocationField locField = new LocationField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_LOCATION, false)); newField(locField, attributes); // Location type: String type = attributes.getValue("type"); if("Any".equalsIgnoreCase(type)) locField.setType(LocationField.TYPE_ANY); else if("GPS".equalsIgnoreCase(type)) locField.setType(LocationField.TYPE_GPS); else if("Network".equalsIgnoreCase(type)) locField.setType(LocationField.TYPE_NETWORK); else if(type != null) // unrecognised location type addWarning("Unknown Location type (" + type + ")."); // When to start listening for a location: String startWith = attributes.getString(ATTRIBUTE_LOCATION_START_WITH, null, true, false); if ("field".equalsIgnoreCase(startWith)) locField.setStartWith(LocationField.START_WITH.FIELD); else if ("page".equalsIgnoreCase(startWith)) if (getCurrentPage() != null) locField.setStartWith(LocationField.START_WITH.PAGE); else { // told to start on page, but there is no page! Start with field instead (assume the user was trying to avoid "form") addWarning("Location field specified to start with page, but no containing page was found. Location detection will start with the field instead."); locField.setStartWith(LocationField.START_WITH.FIELD); } else if ("form".equalsIgnoreCase(startWith) || attributes.getBoolean(ATTRIBUTE_LOCATION_START_WITH_FORM, false)) locField.setStartWith(LocationField.START_WITH.FORM); else if (startWith != null) // unknown setting, default will be used addWarning("Unknown location field start preference (" + startWith +")."); locField.setWaitAtField(attributes.getBoolean("waitAtField", LocationField.DEFAULT_WAIT_AT_FIELD)); locField.setTimeoutS(attributes.getInteger("timeout", LocationField.DEFAULT_TIMEOUT_S)); locField.setMaxAgeS(attributes.getInteger("maxAge", LocationField.DEFAULT_MAX_AGE_S)); locField.setMaxAccuracyRadius(attributes.getFloat("maxAccuracyRadius", LocationField.DEFAULT_MAX_ACCURACY_RADIUS)); locField.setUseBestNonQualifyingLocationAfterTimeout(attributes.getBoolean("useBestKnownLocationOnTimeout", LocationField.DEFAULT_USE_BEST_NON_QUALIFYING_LOCATION_AFTER_TIMEOUT)); // Storage settings: locField.setDoublePrecision(attributes.getBoolean("doublePrecision", LocationField.DEFAULT_DOUBLE_PRECISION)); locField.setStoreAltitude(attributes.getBoolean("storeAltitude", LocationField.DEFAULT_STORE_ALTITUDE)); locField.setStoreBearing(attributes.getBoolean("storeBearing", LocationField.DEFAULT_STORE_BEARING)); locField.setStoreSpeed(attributes.getBoolean("storeSpeed", LocationField.DEFAULT_STORE_SPEED)); locField.setStoreAccuracy(attributes.getBoolean("storeAccuracy", LocationField.DEFAULT_STORE_ACCURACY)); locField.setStoreProvider(attributes.getBoolean("storeProvider", LocationField.DEFAULT_STORE_PROVIDER)); } // <Photo> else if(qName.equals(TAG_PHOTO)) { PhotoField photoField = new PhotoField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_PHOTO, false)); newMediaField(photoField, attributes); photoField.setUseNativeApp(attributes.getBoolean("useNativeApp", PhotoField.DEFAULT_USE_NATIVE_APP)); // Camera options (only used when useNativeApp=false): photoField.setUseFrontFacingCamera(attributes.getBoolean("useFrontCamera", PhotoField.DEFAULT_USE_FRONT_FACING_CAMERA)); String flashText = attributes.getValue("flash"); PhotoField.FlashMode flash = PhotoField.DEFAULT_FLASH_MODE; if(flashText != null && !flashText.isEmpty()) { flashText = flashText.trim(); if(flashText.equalsIgnoreCase("on") || flashText.equalsIgnoreCase("always") || flashText.equalsIgnoreCase("true")) flash = PhotoField.FlashMode.ON; else if(flashText.equalsIgnoreCase("auto")) flash = PhotoField.FlashMode.AUTO; else if(flashText.equalsIgnoreCase("off") || flashText.equalsIgnoreCase("never") || flashText.equalsIgnoreCase("false")) flash = PhotoField.FlashMode.OFF; } photoField.setFlashMode(flash); // Custom buttons (only used when useNativeApp=false): photoField.setCaptureButtonImageRelativePath(attributes.getString("captureImg", null, false, false)); photoField.setApproveButtonImageRelativePath(attributes.getString("approveImg", null, false, false)); photoField.setDiscardButtonImageRelativePath(attributes.getString("discardImg", null, false, false)); } // <Audio> else if(qName.equals(TAG_AUDIO)) { AudioField audioField = new AudioField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_AUDIO, false)); newMediaField(audioField, attributes); audioField.setStartRecImageRelativePath(attributes.getString("startRecImg", null, false, false)); audioField.setStopRecImageRelativePath(attributes.getString("stopRecImg", null, false, false)); } // <Orientation> else if(qName.equals(TAG_ORIENTATION)) { OrientationField orField = new OrientationField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_ORIENTATION, false)); newField(orField, attributes); orField.setStoreAzimuth(attributes.getBoolean("storeAzimuth", OrientationField.DEFAULT_STORE_AZIMUTH)); orField.setStoreAzimuth(attributes.getBoolean("storePitch", OrientationField.DEFAULT_STORE_PITCH)); orField.setStoreAzimuth(attributes.getBoolean("storeRoll", OrientationField.DEFAULT_STORE_ROLL)); } // <BelongsTo> else if(qName.equals(TAG_BELONGS_TO)) { newRelationship(new BelongsToField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID)), attributes); } // <LinksTo> else if(qName.equals(TAG_LINKS_TO)) { newRelationship(new LinksToField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID)), attributes); } // <Button> else if(qName.equals(TAG_BUTTON)) { ButtonField btn = new ButtonField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_BUTTON, true)); newField(btn, attributes); try { btn.setColumnType(attributes.getString(ATTRIBUTE_BUTTON_COLUMN, ButtonField.DEFAULT_COLUMN_TYPE.name(), true, false)); } catch(IllegalArgumentException iae) { throw new SAXException("Invalid '" + ATTRIBUTE_BUTTON_COLUMN + "' attribute value on <" + TAG_BUTTON + ">.", iae); } if(btn.getColumnType() == ButtonColumnType.DATETIME && !btn.isOptional()) addWarning("Button \"" + btn.getID() + "\" has a DateTime column but is not optional, this means the button will *have* to be pressed."); } // <Label> else if(qName.equals(TAG_LABEL)) { LabelField lbl = new LabelField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_LABEL, true)); newField(lbl, attributes); lbl.setTextSizeScale(attributes.getFloat(ATTRIBUTE_LABEL_SCALE, LabelField.DEFAULT_TEXT_SIZE_SCALE)); lbl.setCentered(attributes.getBoolean(ATTRIBUTE_LABEL_CENTERED, LabelField.DEFAULT_TEXT_CENTERED)); } // <Text> else if(qName.equals(TAG_TEXTFIELD)) { TextBoxField txtField = new TextBoxField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_TEXTFIELD, true)); newField(txtField, attributes); // first set general things like optionality (needed for getDefaultMinLength() below). // Deal with minimum & maximum length: if(!txtField.isOptional() && !attributes.contains(ATTRIBUTE_TEXT_MINLENGTH)) addWarning("Text field \"" + txtField.getID() + "\" is non-optional but no minimal length is defined, therefore the minimum will be set to " + TextBoxField.DEFAULT_MIN_LENGTH_NON_OPTIONAL + " character(s). It is recommended to use the '" + ATTRIBUTE_TEXT_MINLENGTH + "' attribute to set an appropriate minimum length explicitly."); txtField.setMinMaxLength( attributes.getInteger(ATTRIBUTE_TEXT_MINLENGTH, TextBoxField.GetDefaultMinLength(txtField.isOptional())), attributes.getInteger(ATTRIBUTE_TEXT_MAXLENGTH, TextBoxField.DEFAULT_MAX_LENGTH)); // Multi-line: txtField.setMultiline(attributes.getBoolean(ATTRIBUTE_TEXT_MULTILINE, TextBoxField.DEFAULT_MULTILINE)); // Initial value (must happen after min/maxLength are set): txtField.setInitialValue(attributes.getString(TextBoxField.GetDefaultInitialValue(txtField.isOptional()), false, true, ATTRIBUTE_FIELD_DEFAULTVALUE, ATTRIBUTE_FIELD_INITVALUE)); // Content types: txtField.setContent(attributes.getString(ATTRIBUTE_TEXT_CONTENT, TextBoxField.DEFAULT_CONTENT.name(), true, false)); // Regular expression pattern (to check input against): txtField.setRegexPattern(attributes.getString(ATTRIBUTE_TEXT_REGEX, null, false, false)); // Auto capitalisation: txtField.setCapitalisation(attributes.getString(ATTRIBUTE_TEXT_CAPITALISATION, TextBoxField.DEFAULT_CAPITALISATION.name(), true, false)); } // <Check> else if(qName.equals(TAG_CHECKBOX)) { CheckBoxField chbxField = new CheckBoxField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, TAG_CHECKBOX, true)); chbxField.setInitialValue(attributes.getBoolean(ATTRIBUTE_FIELD_DEFAULTVALUE, CheckBoxField.DEFAULT_INITIAL_VALUE)); newField(chbxField, attributes); } // <List> or <MultiList> (these are in fact just synonyms, but we added both to avoid confusing novice form designers with terminoly that refers to a multi-level list when they only need a flat list) else if(qName.equals(TAG_LIST) || qName.equals(TAG_MULTILIST)) { MultiListField ml = new MultiListField(currentForm, attributes.getValue(ATTRIBUTE_FIELD_ID), readCaption(attributes, qName.equals(TAG_LIST) ? TAG_LIST : TAG_MULTILIST, true, true)); ml.setPreSelect(attributes.getBoolean(ATTRIBUTE_LIST_PRESELECT, MultiListField.DEFAULT_PRESELECT)); newField(ml, attributes); currentListItem = ml.getItemsRoot(); } // <Page> (Field composite) else if(qName.equals(TAG_PAGE)) { newPage(attributes); } // <Trigger> else if(qName.equals(TAG_TRIGGER)) { newTrigger(new Trigger(), attributes); } // Add future field types here // ... // Tags appearing within Field tags else if(!openFields.isEmpty()) { Field currentField = openFields.peek(); // <Argument> if(qName.equals(TAG_ARGUMENT)) { parseArgument(currentField, attributes); } // <Item> (contained within <List> or <MultiList>, and maybe other things later) else if(qName.equals(TAG_LISTITEM)) { if(currentListItem != null) { currentListItem = new MultiListItem(currentListItem, attributes.getRequiredString(TAG_LISTITEM, ATTRIBUTE_FIELD_VALUE, false, true)); if(attributes.getBoolean(ATTRIBUTE_LISTITEM_DEFAULT, false)) { if(currentListItem.getParent().getDefaultChild() == null) currentListItem.getParent().setDefaultChild(currentListItem); else addWarning("More than 1 item marked as default within one of the (sub)lists of MultiListField " + currentListItem.getField().getID() + ", using 1st item marked as default as the default for the list."); } } else addWarning("Ignored <" + TAG_LISTITEM + "> element occuring outside <" + TAG_LIST + "> or <" + TAG_MULTILIST + ">."); } // <Constraint> (contained within <BelongsTo> or <LinksTo>, and maybe other things later) else if(qName.equals(TAG_CONSTRAINT)) { if(currentField instanceof Relationship) { Relationship currentRelationship = (Relationship) currentField; String columnName = attributes.getRequiredString(getRelationshipTag(currentRelationship), ATTRIBUTE_CONSTRAINT_COLUMN, true, false); // Comparison attribute name: String comparisonAttrib = null; for(String compStr : RuleConstraint.COMPARISON_STRINGS) if(attributes.contains(compStr)) { comparisonAttrib = compStr; break; } if(comparisonAttrib == null) addWarning("<" + TAG_CONSTRAINT + "> does not contain an comparison attribute (i.e. 1 of: " + StringUtils.join(RuleConstraint.COMPARISON_STRINGS, ", ") + ")."); else owner.addRelationshipConstraint(currentRelationship, columnName, comparisonAttrib, attributes.getRequiredString(getRelationshipTag(currentRelationship), comparisonAttrib, true, true)); } // <Constraint> in something else than <BelongsTo> or <LinksTo> else addWarning("Ignored <" + TAG_CONSTRAINT + "> element occuring outside <" + TAG_BELONGS_TO + "> or <" + TAG_LINKS_TO + ">."); } // <?> within field else { addWarning("Ignored unrecognised or invalidly placed element <" + qName + "> occuring within field with id \"" + currentField.getID() + "\"."); } } // Tags appearing within <Trigger> else if(openTrigger != null) { // <Argument> if(qName.equals(TAG_ARGUMENT)) { parseArgument(openTrigger, attributes); } // <?> within trigger else { addWarning("Ignored unrecognised or invalidly placed element <" + qName + "> occuring within <" + TAG_TRIGGER + ">."); } } // <?> within <Form> else { addWarning("Ignored unrecognised or invalidly placed element <" + qName + "> occuring within <" + TAG_FORM + ">."); } } // <?> outside of <Form> (shouldn't happen) else { throw new IllegalArgumentException("FormParser only deals with elements that are equal to, or contained within <" + TAG_FORM + ">."); } } /** * @param attributes may be null for implicit pages (i.e. the one for a singlePage form) * @throws SAXException */ private void newPage(XMLAttributes attributes) throws SAXException { if(!openFields.isEmpty()) throw new SAXException("<Page> elements must be apprear directly within <Form> and cannot be nested."); Page newPage = new Page(currentForm, attributes == null ? currentForm.getID() + "_page" : attributes.getString(currentForm.getID() + "_page_" + currentForm.getFields().size(), true, false, ATTRIBUTE_FIELD_ID)); newField(newPage, attributes); } private void newRelationship(Relationship relationship, XMLAttributes attributes) throws Exception { newField(relationship, attributes); // Remember form name (to resolved later): owner.addRelationship(relationship, attributes.getRequiredString(getRelationshipTag(relationship), ATTRIBUTE_RELATIONSHIP_FORM, true, false)); // Other attributes: relationship.setHoldForeignRecord(attributes.getBoolean(ATTRIBUTE_RELATIONSHIP_HOLD, Relationship.DEFAULT_HOLD_FOREIGN_RECORD)); // TODO ? updateStartTimeUponLeave, saveBeforeFormChange, discardBeforeLeave (only for linksTo) ? } private void newMediaField(MediaField ma, XMLAttributes attributes) throws SAXException { newField(ma, attributes); ma.setMax(attributes.getInteger(ATTRIBUTE_MEDIA_MAX , MediaField.DEFAULT_MAX)); if(attributes.getValue(ATTRIBUTE_DISABLE_FIELD) != null) mediaAttachToDisableId.put(ma, attributes.getValue(ATTRIBUTE_DISABLE_FIELD).trim().toUpperCase()); // upper cased, for case insensitivity } /** * Adds field to current currentForm or currentPage, sets optionalness, remembers id & jump & reads various Field attributes * * @param field the Field object * @param attributes may be null for implicit fields (fields that are inserted by the parser but do not explicitly appear in the XML, e.g. the Page for a singlePage form) * @throws SAXException */ private void newField(Field field, XMLAttributes attributes) throws SAXException { try { // Warn about IDs starting with '_': //TODO test if no invalid XML chars if(field.getID().startsWith("_")) { // For really stupid cases ;-): for(EndField ef : EndField.GetEndFields(currentForm)) if(ef.getID().equals(field.getID())) throw new SAXException(field.getID() + " is a reserved ID, don't use it for user-defined fields."); addWarning("Please avoid field IDs starting with '_' (" + field.getID() + ")."); } // Get current page if there is one: Page currentPage = getCurrentPage(); // If the field is a root field (note: even elements on a page are root fields)... if(field.isRoot()) { // Add it to the form or page: if(currentPage == null) { // field is top-level (directly contained within the form, and not in a page first)... currentForm.addField(field); // ... and therefore it can be jumped to, so remember its ID (upper cased, for case insensitivity): if(idToField.put(field.getID().toUpperCase(), field) != null) throw new SAXException("Duplicate field ID '" + field.getID() + "' in Form '" + currentForm.getID() + "'! (Note: field and form IDs are case insensitive)"); } else // the field is contained by a page: currentPage.addField(field); if(attributes != null) { // Set optionalness: String optText = attributes.getValue(ATTRIBUTE_FIELD_OPTIONAL); boolean opt = currentPage == null ? Field.DEFAULT_OPTIONAL : currentPage.isOptional(); // use default optionalness or that of the containing page if(optText != null && !optText.trim().isEmpty()) { optText = optText.trim(); if("always".equalsIgnoreCase(optText) || Boolean.TRUE.toString().equalsIgnoreCase(optText)) opt = true; else if("notIfReached".equalsIgnoreCase(optText)) // deprecated, but still parsed on all format versions (for backwards compatibility) opt = false; else if("never".equalsIgnoreCase(optText) || Boolean.FALSE.toString().equalsIgnoreCase(optText)) opt = false; } field.setOptional(opt); // Show on create/edit: field.setShowOnCreate(attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_ON_CREATE, Field.DEFAULT_SHOW_ON_CREATE)); field.setShowOnEdit(attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_ON_EDIT, Field.DEFAULT_SHOW_ON_EDIT)); // Set editable (inherit from page if on page): field.setEditable(attributes.getBoolean(ATTRIBUTE_FIELD_EDITABLE, currentPage == null ? Field.DEFAULT_EDITABLE : currentPage.isEditable())); } } // Read various optional Field attributes: if(attributes != null) { // Remember jumps (always "intra-Form", and not leaving a page unless this type of field is allowed to do that): if(attributes.getValue(ATTRIBUTE_FIELD_JUMP) != null) { if(currentPage == null || field.canJumpFromPage()) jumpSourceToJumpTargetId.put(field, attributes.getValue(ATTRIBUTE_FIELD_JUMP).trim().toUpperCase()); // trimmed (because id's on fields are too) & upper cased (for case insensitivity) else if(currentPage != null) addWarning("Field \"" + field.getID() + "\" tries to jump away from the page, but is not allowed."); } // Skip on back: field.setSkipOnBack(attributes.getBoolean(ATTRIBUTE_SKIP_ON_BACK, Field.DEFAULT_SKIP_ON_BACK)); // Background colour: field.setBackgroundColor(attributes.getString(ATTRIBUTE_FIELD_BACKGROUND_COLOR, Field.DEFAULT_BACKGROUND_COLOR, true, false)); // Which buttons are allowed to show... // Mode-specific: field.setShowControlOnMode(Control.BACK, Mode.CREATE, attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_BACK_ON_CREATE, Field.DEFAULT_SHOW_BACK)); field.setShowControlOnMode(Control.BACK, Mode.EDIT, attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_BACK_ON_EDIT, Field.DEFAULT_SHOW_BACK)); field.setShowControlOnMode(Control.CANCEL, Mode.CREATE, attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_CANCEL_ON_CREATE, Field.DEFAULT_SHOW_CANCEL)); field.setShowControlOnMode(Control.CANCEL, Mode.EDIT, attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_CANCEL_ON_EDIT, Field.DEFAULT_SHOW_CANCEL)); field.setShowControlOnMode(Control.FORWARD, Mode.CREATE, attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_FORWARD_ON_CREATE, Field.DEFAULT_SHOW_FORWARD)); field.setShowControlOnMode(Control.FORWARD, Mode.EDIT, attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_FORWARD_ON_EDIT, Field.DEFAULT_SHOW_FORWARD)); // Across all modes (overrules mode-specific settings) + with backwards compatibility for v1.0 forms which may have shopBack/showCancel/showForward at the form level: if(attributes.contains(ATTRIBUTE_FIELD_SHOW_BACK) || v1xFormShowBack != null) field.setShowBack((v1xFormShowBack != null ? v1xFormShowBack : true) && attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_BACK, Field.DEFAULT_SHOW_BACK)); if(attributes.contains(ATTRIBUTE_FIELD_SHOW_CANCEL) || v1xFormShowCancel != null) field.setShowCancel((v1xFormShowCancel != null ? v1xFormShowCancel : true) && attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_CANCEL, Field.DEFAULT_SHOW_CANCEL)); if(attributes.contains(ATTRIBUTE_FIELD_SHOW_FORWARD) || v1xFormShowForward != null) field.setShowForward((v1xFormShowForward != null ? v1xFormShowForward : true) && attributes.getBoolean(ATTRIBUTE_FIELD_SHOW_FORWARD, Field.DEFAULT_SHOW_FORWARD)); } // Remember current field: openFields.push(field); //!!! } catch(Exception e) { throw new SAXException("Error on parsing field '" + field.getID() + "'", e); } } private void newTrigger(Trigger trigger, XMLAttributes attributes) { // Parse the attributes String keys = attributes.getString(null, true, false, ATTRIBUTE_TRIGGER_KEY, ATTRIBUTE_TRIGGER_KEYS); if(keys != null) for(String k : keys.split(Trigger.KEY_SEPARATOR)) { try { trigger.addKey(Trigger.Key.valueOf(k.toUpperCase())); } catch(Exception e) { addWarning("Unrecognised Trigger key: " + k); } } trigger.setFixedTimer(attributes.getInteger(ATTRIBUTE_TRIGGER_FIXED_TIMER, Trigger.NO_TIMEOUT)); if(attributes.contains(ATTRIBUTE_TRIGGER_JUMP)) // Remember jump (always "intra-Form") jumpSourceToJumpTargetId.put(trigger, attributes.getValue(ATTRIBUTE_TRIGGER_JUMP).trim().toUpperCase()); // upper cased, for insensitivity // Add the trigger to the current Page Page currentPage = getCurrentPage(); if(currentPage != null) currentPage.addTrigger(trigger); // else add the triggers to the Form else currentForm.addTrigger(trigger); // Remember trigger (so arguments can be added): openTrigger = trigger; } private void parseArgument(JumpSource source, XMLAttributes tagAttributes) throws Exception { if(!source.hasNextFieldArguements()) source.setNextFieldArguments(new FieldParameters()); source.getNextFieldArguments().put( tagAttributes.getRequiredString(TAG_ARGUMENT, ATTRIBUTE_ARGUMENT_PARAM, true, false), tagAttributes.getRequiredString(TAG_ARGUMENT, ATTRIBUTE_ARGUMENT_VALUE, false, true)); // TODO Let Field instance validate param & value? } private Page getCurrentPage() { // Iterate through the stack from the top down and look for a Page: for(int f = openFields.size() - 1; f >= 0; f--) if(openFields.get(f) instanceof Page) return (Page) openFields.get(f); // No current page: return null; } protected void closePage(Page page) { /* The 'optional' attribute of a page is only used to inherit from by contained fields (see newField()), * at runtime it doesn't have meaning in itself because the page does not have a column of its own and * whether or not the page can be skipped or left is to be decided based on the optionalness and acquired * values of the contained fields. * Because of this the optionalness of the page is reset to ALWAYS after all contained fields are parsed. */ page.setOptional(true); } @Override protected void parseEndElement(String uri, String localName, String qName) throws SAXException { // Close field: </Choice>, </Location>, </Photo>, </Audio>, </Orientation>, </BelongsTo>, </LinksTo>, </Button>, </Label>, </Textbox>, </Checkbox>, </List>, </MultiList>, </Page> if( !openFields.isEmpty() && ( qName.equals(TAG_CHOICE) || qName.equals(TAG_LOCATION) || qName.equals(TAG_PHOTO) || qName.equals(TAG_AUDIO) || qName.equals(TAG_ORIENTATION) || qName.equals(TAG_BELONGS_TO) || qName.equals(TAG_LINKS_TO) || qName.equals(TAG_BUTTON) || qName.equals(TAG_LABEL) || qName.equals(TAG_TEXTFIELD) || qName.equals(TAG_CHECKBOX) || qName.equals(TAG_LIST) || qName.equals(TAG_MULTILIST) || qName.equals(TAG_PAGE))) { Field currentField = openFields.pop(); // pop the field // </Choice> if(qName.equals(TAG_CHOICE) && currentField instanceof ChoiceField) { ChoiceField currentChoice = (ChoiceField) currentField; if(currentChoice.isRoot() && currentChoice.isLeaf()) throw new SAXException("Root choices need at least 1 child (but 2 or more children probably makes more sense)."); } // </Page> else if(qName.equals(TAG_PAGE) && currentField instanceof Page) closePage((Page) currentField); } // </Item>, </List> or </MultiList> else if(qName.equals(TAG_LISTITEM) || qName.equals(TAG_LIST) || qName.equals(TAG_MULTILIST)) { if(currentListItem.isRoot() && currentListItem.isLeaf()) throw new SAXException("A list needs at least 1 <Item> (but 2 or more probably makes more sense)."); if(!currentListItem.isLeaf() && currentListItem.getDefaultChild() == null) currentListItem.setDefaultChild(currentListItem.getChildren().get(0)); // first child become default currentListItem = currentListItem.getParent(); // parent (possibly null in case of root) becomes currentListItem } // </Trigger> else if(qName.equals(TAG_TRIGGER)) { openTrigger = null; } // </Form> else if(qName.equals(TAG_FORM)) { // close page in case of a singePage form: Page currentPage = getCurrentPage(); if(currentPage != null) { closePage(currentPage); openFields.pop(); } // Resolve/set currentForm start field: Field startField = currentForm.getFields().get(0); // first field is the default start field if(formStartFieldId != null) // try with field specified by ID in <Form startField="..."> (may be null) { Field specifiedStartField = currentForm.getField(formStartFieldId); // uses equalsIgnoreCase() if(specifiedStartField == null) //TODO throw exception instead addWarning("The specified start field (\"" + formStartFieldId + "\") of currentForm \"" + currentForm.getName() + "\" does not exist, using first field instead."); else startField = specifiedStartField; } currentForm.setStartField(startField); // Add EndField instances to idToField map (these don't need to be added as actual fields to the form itself) for(EndField endF : EndField.GetEndFields(currentForm)) idToField.put(endF.getID().toUpperCase(), endF); // upper cased, for case insensitivity (they should already be upper case, but just in case...) // Resolve jumps... for(Entry<JumpSource, String> jump : jumpSourceToJumpTargetId.entrySet()) { Field target = idToField.get(jump.getValue()); if(target == null) addWarning("Cannot resolve jump ID '" + jump.getValue() + "' (case insensitive)."); else jump.getKey().setJump(target); // set jump pointer (to a field object) } // Resolve disabling of Choices by MediaAttachments... for(Entry<MediaField, String> disable : mediaAttachToDisableId.entrySet()) { Field target = idToField.get(disable.getValue()); if(target == null) addWarning("Cannot resolve disable field ID '" + disable.getValue() + "' (case insensitive)."); else disable.getKey().setDisableChoice((ChoiceField) target); } // Deactivate this subtree parser: deactivate(); //will call reset() (+ warnings will be copied to owner) } } private String readCaption(XMLAttributes tagAttributes, String tag, boolean required) throws Exception { return readCaption(tagAttributes, tag, required, false); // singular by default } private String readCaption(XMLAttributes tagAttributes, String tag, boolean required, boolean plural) throws Exception { if(required) return tagAttributes.getRequiredString(tag, false, true, plural ? ATTRIBUTE_FIELD_CAPTION_PLURAL : ATTRIBUTE_FIELD_CAPTION_SINGULAR); else return tagAttributes.getString(null, false, true, plural ? ATTRIBUTE_FIELD_CAPTION_PLURAL : ATTRIBUTE_FIELD_CAPTION_SINGULAR); } private String getRelationshipTag(Relationship relationship) { if(relationship instanceof BelongsToField) return TAG_BELONGS_TO; if(relationship instanceof LinksToField) return TAG_LINKS_TO; throw new IllegalArgumentException("Unsupported relationship type"); } @Override protected boolean isSingleUse() { return false; } }
Previous commit did not actually include fix to <Location> parsing, this one does :-) Also moved location parsing in separate method (newLocation()) Signed-off-by: Matthias Stevens <3e0606afd16757d2df162884117429808539458f@gmail.com>
Library/src/uk/ac/ucl/excites/sapelli/collector/load/parse/FormParser.java
Previous commit did not actually include fix to <Location> parsing, this one does :-) Also moved location parsing in separate method (newLocation())
Java
apache-2.0
fbc7bac3b99c368e443903b926b66e9e9cbb4c48
0
smadha/tika,smadha/tika,smadha/tika,zamattiac/tika,icirellik/tika,zamattiac/tika,zamattiac/tika,smadha/tika,icirellik/tika,icirellik/tika,zamattiac/tika,icirellik/tika,icirellik/tika,smadha/tika,smadha/tika,zamattiac/tika,zamattiac/tika,zamattiac/tika,icirellik/tika,zamattiac/tika,smadha/tika,icirellik/tika,smadha/tika,icirellik/tika
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tika.mime; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import junit.framework.TestCase; import org.apache.tika.config.TikaConfig; import org.apache.tika.metadata.Metadata; public class MimeDetectionTest extends TestCase { private MimeTypes mimeTypes; private MediaTypeRegistry registry; /** @inheritDoc */ @Override protected void setUp() throws Exception { super.setUp(); this.mimeTypes = TikaConfig.getDefaultConfig().getMimeRepository(); this.registry = mimeTypes.getMediaTypeRegistry(); } public void testDetection() throws Exception { testFile("image/svg+xml", "circles.svg"); testFile("image/svg+xml", "circles-with-prefix.svg"); testFile("image/png", "datamatrix.png"); testFile("text/html", "test.html"); testFile("application/xml", "test-iso-8859-1.xml"); testFile("application/xml", "test-utf8.xml"); testFile("application/xml", "test-utf16le.xml"); testFile("application/xml", "test-utf16be.xml"); testFile("application/xml", "test-long-comment.xml"); testFile("application/xslt+xml", "stylesheet.xsl"); testUrl( "application/rdf+xml", "http://www.ai.sri.com/daml/services/owl-s/1.2/Process.owl", "test-difficult-rdf1.xml"); testUrl( "application/rdf+xml", "http://www.w3.org/2002/07/owl#", "test-difficult-rdf2.xml"); // add evil test from TIKA-327 testFile("text/html", "evilhtml.html"); // add another evil html test from TIKA-357 testFile("text/html", "testlargerbuffer.html"); } public void testByteOrderMark() throws Exception { assertEquals(MediaType.TEXT_PLAIN, mimeTypes.detect( new ByteArrayInputStream("\ufffetest".getBytes("UTF-16LE")), new Metadata())); assertEquals(MediaType.TEXT_PLAIN, mimeTypes.detect( new ByteArrayInputStream("\ufffetest".getBytes("UTF-16BE")), new Metadata())); assertEquals(MediaType.TEXT_PLAIN, mimeTypes.detect( new ByteArrayInputStream("\ufffetest".getBytes("UTF-8")), new Metadata())); } public void testSuperTypes() { assertTrue(registry.isSpecializationOf( MediaType.parse("text/something; charset=UTF-8"), MediaType.parse("text/something"))); assertTrue(registry.isSpecializationOf( MediaType.parse("text/something; charset=UTF-8"), MediaType.TEXT_PLAIN)); assertTrue(registry.isSpecializationOf( MediaType.parse("text/something; charset=UTF-8"), MediaType.OCTET_STREAM)); assertTrue(registry.isSpecializationOf( MediaType.parse("text/something"), MediaType.TEXT_PLAIN)); assertTrue(registry.isSpecializationOf( MediaType.parse("application/something+xml"), MediaType.APPLICATION_XML)); assertTrue(registry.isSpecializationOf( MediaType.parse("application/something+zip"), MediaType.APPLICATION_ZIP)); assertTrue(registry.isSpecializationOf( MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN)); assertTrue(registry.isSpecializationOf( MediaType.parse("application/vnd.apple.iwork"), MediaType.APPLICATION_ZIP)); } private void testUrl(String expected, String url, String file) throws IOException{ InputStream in = getClass().getResourceAsStream(file); testStream(expected, url, in); } private void testFile(String expected, String filename) throws IOException { InputStream in = getClass().getResourceAsStream(filename); testStream(expected, filename, in); } private void testStream(String expected, String urlOrFileName, InputStream in) throws IOException{ assertNotNull("Test stream: ["+urlOrFileName+"] is null!", in); if (!in.markSupported()) { in = new java.io.BufferedInputStream(in); } try { Metadata metadata = new Metadata(); String mime = this.mimeTypes.detect(in, metadata).toString(); assertEquals(urlOrFileName + " is not properly detected: detected.", expected, mime); //Add resource name and test again metadata.set(Metadata.RESOURCE_NAME_KEY, urlOrFileName); mime = this.mimeTypes.detect(in, metadata).toString(); assertEquals(urlOrFileName + " is not properly detected after adding resource name.", expected, mime); } finally { in.close(); } } /** * Test for type detection of empty documents. * * @see <a href="https://issues.apache.org/jira/browse/TIKA-483">TIKA-483</a> */ public void testEmptyDocument() throws IOException { assertEquals(MediaType.OCTET_STREAM, mimeTypes.detect( new ByteArrayInputStream(new byte[0]), new Metadata())); Metadata namehint = new Metadata(); namehint.set(Metadata.RESOURCE_NAME_KEY, "test.txt"); assertEquals(MediaType.TEXT_PLAIN, mimeTypes.detect( new ByteArrayInputStream(new byte[0]), namehint)); Metadata typehint = new Metadata(); typehint.set(Metadata.CONTENT_TYPE, "text/plain"); assertEquals(MediaType.TEXT_PLAIN, mimeTypes.detect( new ByteArrayInputStream(new byte[0]), typehint)); } /** * Test for things like javascript files whose content is enclosed in XML * comment delimiters, but that aren't actually XML. * * @see <a href="https://issues.apache.org/jira/browse/TIKA-426">TIKA-426</a> */ public void testNotXML() throws IOException { assertEquals(MediaType.TEXT_PLAIN, mimeTypes.detect( new ByteArrayInputStream("<!-- test -->".getBytes("UTF-8")), new Metadata())); } /** * Tests that when we repeatedly test the detection of a document * that can be detected with Mime Magic, that we consistently * detect it correctly. See TIKA-391 for more details. */ public void testMimeMagicStability() throws IOException { for(int i=0; i<100; i++) { testFile("application/vnd.ms-excel", "test.xls"); } } }
tika-core/src/test/java/org/apache/tika/mime/MimeDetectionTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tika.mime; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import junit.framework.TestCase; import org.apache.tika.config.TikaConfig; import org.apache.tika.metadata.Metadata; public class MimeDetectionTest extends TestCase { private MimeTypes mimeTypes; private MediaTypeRegistry registry; /** @inheritDoc */ @Override protected void setUp() throws Exception { super.setUp(); this.mimeTypes = TikaConfig.getDefaultConfig().getMimeRepository(); this.registry = mimeTypes.getMediaTypeRegistry(); } public void testDetection() throws Exception { testFile("image/svg+xml", "circles.svg"); testFile("image/svg+xml", "circles-with-prefix.svg"); testFile("image/png", "datamatrix.png"); testFile("text/html", "test.html"); testFile("application/xml", "test-iso-8859-1.xml"); testFile("application/xml", "test-utf8.xml"); testFile("application/xml", "test-utf16le.xml"); testFile("application/xml", "test-utf16be.xml"); testFile("application/xml", "test-long-comment.xml"); testFile("application/xslt+xml", "stylesheet.xsl"); testUrl( "application/rdf+xml", "http://www.ai.sri.com/daml/services/owl-s/1.2/Process.owl", "test-difficult-rdf1.xml"); testUrl( "application/rdf+xml", "http://www.w3.org/2002/07/owl#", "test-difficult-rdf2.xml"); // add evil test from TIKA-327 testFile("text/html", "evilhtml.html"); // add another evil html test from TIKA-357 testFile("text/html", "testlargerbuffer.html"); } public void testByteOrderMark() throws Exception { assertEquals(MediaType.TEXT_PLAIN, mimeTypes.detect( new ByteArrayInputStream("\ufffetest".getBytes("UTF-16LE")), new Metadata())); assertEquals(MediaType.TEXT_PLAIN, mimeTypes.detect( new ByteArrayInputStream("\ufffetest".getBytes("UTF-16BE")), new Metadata())); assertEquals(MediaType.TEXT_PLAIN, mimeTypes.detect( new ByteArrayInputStream("\ufffetest".getBytes("UTF-8")), new Metadata())); } public void testSuperTypes() { assertTrue(registry.isSpecializationOf( MediaType.parse("text/something; charset=UTF-8"), MediaType.parse("text/something"))); assertTrue(registry.isSpecializationOf( MediaType.parse("text/something; charset=UTF-8"), MediaType.TEXT_PLAIN)); assertTrue(registry.isSpecializationOf( MediaType.parse("text/something; charset=UTF-8"), MediaType.OCTET_STREAM)); assertTrue(registry.isSpecializationOf( MediaType.parse("text/something"), MediaType.TEXT_PLAIN)); assertTrue(registry.isSpecializationOf( MediaType.parse("application/something+xml"), MediaType.APPLICATION_XML)); assertTrue(registry.isSpecializationOf( MediaType.parse("application/something+zip"), MediaType.APPLICATION_ZIP)); assertTrue(registry.isSpecializationOf( MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN)); assertTrue(registry.isSpecializationOf( MediaType.parse("application/vnd.apple.iwork"), MediaType.APPLICATION_ZIP)); } private void testUrl(String expected, String url, String file) throws IOException{ InputStream in = getClass().getResourceAsStream(file); testStream(expected, url, in); } private void testFile(String expected, String filename) throws IOException { InputStream in = getClass().getResourceAsStream(filename); testStream(expected, filename, in); } private void testStream(String expected, String urlOrFileName, InputStream in) throws IOException{ assertNotNull("Test stream: ["+urlOrFileName+"] is null!", in); if (!in.markSupported()) { in = new java.io.BufferedInputStream(in); } try { Metadata metadata = new Metadata(); String mime = this.mimeTypes.detect(in, metadata).toString(); assertEquals(urlOrFileName + " is not properly detected: detected.", expected, mime); //Add resource name and test again metadata.set(Metadata.RESOURCE_NAME_KEY, urlOrFileName); mime = this.mimeTypes.detect(in, metadata).toString(); assertEquals(urlOrFileName + " is not properly detected after adding resource name.", expected, mime); } finally { in.close(); } } /** * Test for type detection of empty documents. * * @see <a href="https://issues.apache.org/jira/browse/TIKA-483">TIKA-483</a> */ public void testEmptyDocument() throws IOException { assertEquals(MediaType.OCTET_STREAM, mimeTypes.detect( new ByteArrayInputStream(new byte[0]), new Metadata())); Metadata namehint = new Metadata(); namehint.set(Metadata.RESOURCE_NAME_KEY, "test.txt"); assertEquals(MediaType.TEXT_PLAIN, mimeTypes.detect( new ByteArrayInputStream(new byte[0]), namehint)); Metadata typehint = new Metadata(); typehint.set(Metadata.CONTENT_TYPE, "text/plain"); assertEquals(MediaType.TEXT_PLAIN, mimeTypes.detect( new ByteArrayInputStream(new byte[0]), typehint)); } /** * Tests that when we repeatedly test the detection of a document * that can be detected with Mime Magic, that we consistently * detect it correctly. See TIKA-391 for more details. */ public void testMimeMagicStability() throws IOException { for(int i=0; i<100; i++) { testFile("application/vnd.ms-excel", "test.xls"); } } }
TIKA-426: Parsing javascript as XML Add a test case git-svn-id: de575e320ab8ef6bd6941acfb783cdb8d8307cc1@1004051 13f79535-47bb-0310-9956-ffa450edef68
tika-core/src/test/java/org/apache/tika/mime/MimeDetectionTest.java
TIKA-426: Parsing javascript as XML
Java
apache-2.0
08ecd8ca9bac18f7f5d0790f8dd7ac7f18e26bc5
0
senseidb/sensei,senseidb/sensei,senseidb/sensei,javasoze/sensei,javasoze/sensei,javasoze/sensei,senseidb/sensei,javasoze/sensei
package com.senseidb.search.node; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.log4j.Logger; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.SortField; import org.apache.lucene.util.PriorityQueue; import proj.zoie.api.ZoieIndexReader; import com.browseengine.bobo.api.BoboIndexReader; import com.browseengine.bobo.api.BrowseFacet; import com.browseengine.bobo.api.BrowseSelection; import com.browseengine.bobo.api.FacetAccessible; import com.browseengine.bobo.api.FacetIterator; import com.browseengine.bobo.api.FacetSpec; import com.browseengine.bobo.api.FacetSpec.FacetSortSpec; import com.browseengine.bobo.facets.CombinedFacetAccessible; import com.browseengine.bobo.facets.FacetHandler; import com.browseengine.bobo.facets.data.FacetDataCache; import com.browseengine.bobo.facets.data.PrimitiveLongArrayWrapper; import com.browseengine.bobo.sort.DocComparator; import com.browseengine.bobo.sort.DocIDPriorityQueue; import com.browseengine.bobo.sort.SortCollector; import com.browseengine.bobo.sort.SortCollector.CollectorContext; import com.browseengine.bobo.util.ListMerger; import com.senseidb.search.req.ErrorType; import com.senseidb.search.req.SenseiError; import com.senseidb.search.req.SenseiHit; import com.senseidb.search.req.SenseiRequest; import com.senseidb.search.req.SenseiResult; import com.senseidb.search.req.mapred.impl.SenseiReduceFunctionWrapper; public class ResultMerger { private final static Logger logger = Logger.getLogger(ResultMerger.class.getName()); private final static class MyScoreDoc extends ScoreDoc { private static final long serialVersionUID = 1L; private BoboIndexReader reader; private int finalDoc; public int groupPos; public Object rawGroupValue; public Comparable sortValue; public MyScoreDoc(int docid, float score, int finalDoc, BoboIndexReader reader) { super(docid, score); this.finalDoc = finalDoc; this.reader = reader; } SenseiHit getSenseiHit(SenseiRequest req) { SenseiHit hit = new SenseiHit(); if (req.isFetchStoredFields() || req.isFetchStoredValue()) { if (req.isFetchStoredFields()) { try { hit.setStoredFields(reader.document(doc)); } catch(Exception e) { logger.error(e.getMessage(),e); } } try { IndexReader innerReader = reader.getInnerReader(); if (innerReader instanceof ZoieIndexReader) { hit.setStoredValue( ((ZoieIndexReader)innerReader).getStoredValue( ((ZoieIndexReader)innerReader).getUID(doc))); } } catch(Exception e) { } } List<FacetHandler<?>> facetHandlers= new ArrayList<FacetHandler<?>>(reader.getFacetHandlerMap().values()); if (reader.getRuntimeFacetHandlerMap() != null) { facetHandlers.addAll(reader.getRuntimeFacetHandlerMap().values()); } Map<String,String[]> map = new HashMap<String,String[]>(); Map<String,Object[]> rawMap = new HashMap<String,Object[]>(); Set<String> selectSet = req.getSelectSet(); for (FacetHandler<?> facetHandler : facetHandlers) { if (selectSet == null || selectSet.size() == 0 || selectSet.contains(facetHandler.getName())) { map.put(facetHandler.getName(),facetHandler.getFieldValues(reader,doc)); rawMap.put(facetHandler.getName(),facetHandler.getRawFieldValues(reader,doc)); } } hit.setFieldValues(map); hit.setRawFieldValues(rawMap); hit.setUID(((ZoieIndexReader<BoboIndexReader>)reader.getInnerReader()).getUID(doc)); hit.setDocid(finalDoc); hit.setScore(score); hit.setComparable(sortValue); hit.setGroupPosition(groupPos); String[] groupBy = req.getGroupBy(); if (groupBy != null && groupBy.length > groupPos && groupBy[groupPos] != null) { hit.setGroupField(groupBy[groupPos]); hit.setGroupValue(hit.getField(groupBy[groupPos])); hit.setRawGroupValue(hit.getRawField(groupBy[groupPos])); } return hit; } } private final static class HitWithGroupQueue { public SenseiHit hit; public PriorityQueue<MyScoreDoc> queue; public ArrayList<Iterator<SenseiHit>> iterList = new ArrayList<Iterator<SenseiHit>>(); public HitWithGroupQueue(SenseiHit hit, PriorityQueue<MyScoreDoc> queue) { this.hit = hit; this.queue = queue; } } private static Map<String, FacetAccessible> mergeFacetContainer(Collection<Map<String, FacetAccessible>> subMaps, SenseiRequest req) { Map<String, Map<String, Integer>> counts = new HashMap<String, Map<String, Integer>>(); for (Map<String, FacetAccessible> subMap : subMaps) { for (Map.Entry<String, FacetAccessible> entry : subMap.entrySet()) { String facetname = entry.getKey(); Map<String, Integer> count = counts.get(facetname); if(count == null) { count = new HashMap<String, Integer>(); counts.put(facetname, count); } Set<String> values = new HashSet<String>(); String[] rawvalues = null; BrowseSelection selection = req.getSelection(facetname); if (selection!=null&&(rawvalues = selection.getValues())!=null) { values.addAll(Arrays.asList(rawvalues)); } FacetAccessible facetAccessible = entry.getValue(); for(BrowseFacet facet : facetAccessible.getFacets()) { if (facet == null) continue; String val = facet.getValue(); int oldValue = count.containsKey(val) ? count.get(val) : 0; count.put(val, oldValue + facet.getFacetValueHitCount()); values.remove(val); } if (!values.isEmpty()) { for(String val : values) { int oldValue = count.containsKey(val) ? count.get(val) : 0; BrowseFacet facet = facetAccessible.getFacet(val); int delta = 0; if (facet!=null) { delta = facet.getFacetValueHitCount(); count.put(val, oldValue + delta); } } } facetAccessible.close(); } } Map<String, FacetAccessible> mergedFacetMap = new HashMap<String, FacetAccessible>(); for (Entry<String,Map<String, Integer>> entry : counts.entrySet()) { String facet = entry.getKey(); Map<String, Integer> facetValueCounts = entry.getValue(); List<BrowseFacet> facets = new ArrayList<BrowseFacet>(facetValueCounts.size()); for (Entry<String, Integer> subEntry : facetValueCounts.entrySet()) { facets.add(new BrowseFacet(subEntry.getKey(), subEntry.getValue())); } FacetSpec fspec = null; Set<String> values = new HashSet<String>(); String[] rawvalues = null; if (req != null) { fspec = req.getFacetSpec(facet); BrowseSelection selection = req.getSelection(facet); if (selection!=null&&(rawvalues = selection.getValues())!=null) { values.addAll(Arrays.asList(rawvalues)); } } Comparator<BrowseFacet> facetComp = getComparator(fspec); Collections.sort(facets, facetComp); if (fspec != null) { int maxCount = fspec.getMaxCount(); int numToShow = facets.size(); if (maxCount > 0) { numToShow = Math.min(maxCount, numToShow); } for(int i = facets.size() - 1; i >= numToShow; i--) { if (!values.contains(facets.get(i).getValue())) { facets.remove(i); } } } MappedFacetAccessible mergedFacetAccessible = new MappedFacetAccessible(facets.toArray(new BrowseFacet[facets.size()])); mergedFacetMap.put(facet, mergedFacetAccessible); } return mergedFacetMap; } private static Map<String, FacetAccessible> mergeFacetContainerServerSide(Collection<Map<String, FacetAccessible>> subMaps, SenseiRequest req) { Map<String, List<FacetAccessible>> counts = new HashMap<String, List<FacetAccessible>>(); for (Map<String, FacetAccessible> subMap : subMaps) { for (Map.Entry<String, FacetAccessible> entry : subMap.entrySet()) { String facetname = entry.getKey(); List<FacetAccessible> count = counts.get(facetname); if(count == null) { count = new LinkedList<FacetAccessible>(); counts.put(facetname, count); } count.add(entry.getValue()); } } // create combinedFacetAccessibles Map<String, FacetAccessible> fieldMap = new HashMap<String, FacetAccessible>(); for(Entry<String,List<FacetAccessible>> entry : counts.entrySet()) { String fieldname = entry.getKey(); List<FacetAccessible> facetAccs = entry.getValue(); if (facetAccs.size() == 1) { fieldMap.put(fieldname, facetAccs.get(0)); } else { fieldMap.put(fieldname, new CombinedFacetAccessible(req.getFacetSpec(fieldname), facetAccs)); } } Map<String, FacetAccessible> mergedFacetMap = new HashMap<String, FacetAccessible>(); for(Entry<String,FacetAccessible> entry : fieldMap.entrySet()) { String fieldname = entry.getKey(); FacetAccessible facetAcc = entry.getValue(); FacetSpec fspec = req.getFacetSpec(fieldname); BrowseSelection sel = req.getSelection(fieldname); Set<String> values = new HashSet<String>(); String[] rawvalues = null; if (sel!=null&&(rawvalues = sel.getValues())!=null) { values.addAll(Arrays.asList(rawvalues)); } List<BrowseFacet> facets = new ArrayList<BrowseFacet>(); facets.addAll(facetAcc.getFacets()); for(BrowseFacet bf : facets) { values.remove(bf.getValue()); } if (values.size()>0) { for(String value : values) { facets.add(facetAcc.getFacet(value)); } } facetAcc.close(); // sorting Comparator<BrowseFacet> facetComp = getComparator(fspec); Collections.sort(facets, facetComp); MappedFacetAccessible mergedFacetAccessible = new MappedFacetAccessible(facets.toArray(new BrowseFacet[facets.size()])); mergedFacetMap.put(fieldname, mergedFacetAccessible); } return mergedFacetMap; } private static Comparator<BrowseFacet> getComparator(FacetSpec fspec) { Comparator<BrowseFacet> facetComp; if ((fspec == null) || fspec.getOrderBy() == FacetSortSpec.OrderHitsDesc) { facetComp = new BrowseFacetHitsDescComparator(); } else { if (fspec.getOrderBy() == FacetSortSpec.OrderValueAsc) { facetComp = new BrowseFacetValueAscComparator(); } else { facetComp = fspec.getCustomComparatorFactory().newComparator(); } } return facetComp; } private static final class BrowseFacetValueAscComparator implements Comparator<BrowseFacet> { public int compare(BrowseFacet f1, BrowseFacet f2) { if (f1==null && f2==null){ return 0; } if (f1==null){ return -1; } if (f2==null){ return 1; } int ret = f1.getValue().compareTo(f2.getValue()); if (f1.getValue().startsWith("-") && f2.getValue().startsWith("-")) { ret *= -1; } return ret; } } private static final class BrowseFacetHitsDescComparator implements Comparator<BrowseFacet> { public int compare(BrowseFacet f1, BrowseFacet f2) { if (f1==null && f2==null){ return 0; } if (f1==null){ return -1; } if (f2==null){ return 1; } int h1 = f1.getFacetValueHitCount(); int h2 = f2.getFacetValueHitCount(); int val = h2 - h1; if (val == 0) { val = f1.getValue().compareTo(f2.getValue()); } return val; } } private static final class SenseiHitComparator implements Comparator<SenseiHit> { SortField[] _sortFields; public SenseiHitComparator(SortField[] sortFields) { _sortFields = sortFields; } public int compare(SenseiHit o1, SenseiHit o2) { if (_sortFields.length == 0) { return o1.getDocid() - o2.getDocid(); } else { int equalCount = 0; for (int i = 0; i < _sortFields.length; ++i) { String field = _sortFields[i].getField(); int reverse = _sortFields[i].getReverse() ? -1 : 1; if (_sortFields[i].getType() == SortField.SCORE) { float score1 = o1.getScore(); float score2 = o2.getScore(); if (score1 == score2) { equalCount++; continue; } else { return (score1 > score2) ? -reverse : reverse; } } else if (_sortFields[i].getType() == SortField.DOC) { return o1.getDocid() - o2.getDocid(); } else // A regular sort field { String value1 = o1.getField(field); String value2 = o2.getField(field); if (value1 == null && value2 == null) { equalCount++; continue; } else if (value1 == null) return -reverse; else if (value2 == null) return reverse; else { int comp = value1.compareTo(value2); if (value1.startsWith("-") && value2.startsWith("-")) { comp *= -1; } if (comp != 0) { return comp * reverse; } else { equalCount++; continue; } } } // A regular sort field } if (equalCount == _sortFields.length) { return o1.getDocid() - o2.getDocid(); } else { return 0; } } } } private static class MappedFacetAccessible implements FacetAccessible, Serializable { /** * */ private static final long serialVersionUID = 1L; private final HashMap<String, BrowseFacet> _facetMap; private final BrowseFacet[] _facets; public MappedFacetAccessible(BrowseFacet[] facets) { _facetMap = new HashMap<String, BrowseFacet>(); for (BrowseFacet facet : facets) { if (facet!=null){ _facetMap.put(facet.getValue(), facet); } } _facets = facets; } public BrowseFacet getFacet(String value) { return _facetMap.get(value); } public int getFacetHitsCount(Object value) { BrowseFacet facet = _facetMap.get(value); if (facet != null) return facet.getHitCount(); return 0; } public List<BrowseFacet> getFacets() { return Arrays.asList(_facets); } @Override public void close() { // TODO Auto-generated method stub } @Override public FacetIterator iterator() { throw new IllegalStateException("FacetIterator should not be obtained at merge time"); } } public static int getNumHits(Collection<SenseiResult> results) { int numHits = 0; for(SenseiResult res : results) { numHits += res.getNumHits(); } return numHits; } public static int getTotalDocs(Collection<SenseiResult> results) { int totalDocs = 0; for(SenseiResult res : results) { totalDocs += res.getTotalDocs(); } return totalDocs; } public static int getNumGroups(Collection<SenseiResult> results) { int numGroups = 0; for(SenseiResult res : results) { numGroups += res.getNumGroups(); } return numGroups; } public static long findLongestTime(Collection<SenseiResult> results) { long time = 0L; for (SenseiResult res : results) { time = Math.max(time,res.getTime()); } return time; } public static String findParsedQuery(Collection<SenseiResult> results) { for(SenseiResult res : results) { String parsedQuery = res.getParsedQuery(); if(parsedQuery != null && parsedQuery.length()>0) return parsedQuery; } return ""; } public static boolean hasSortCollector(Collection<SenseiResult> results) { for(SenseiResult res : results) { if (res.getSortCollector() != null && res.getSortCollector().contextList != null) { return true; } } return false; } public static void createUniqueDocIds(Collection<SenseiResult> results) { int totalDocs= 0; for (SenseiResult res : results) { SenseiHit[] hits = res.getSenseiHits(); if (hits != null) { for (SenseiHit hit : hits) { hit.setDocid(hit.getDocid() + totalDocs); } } totalDocs += res.getTotalDocs(); } } public static List<Iterator<SenseiHit>> flattenHits(Collection<SenseiResult> results) { List<Iterator<SenseiHit>> hitList = new ArrayList<Iterator<SenseiHit>>(results.size()); for (SenseiResult res : results) { hitList.add(Arrays.asList(res.getSenseiHits()).iterator()); } return hitList; } private static final int UNKNOWN_GROUP_VALUE_TYPE = 0; private static final int NORMAL_GROUP_VALUE_TYPE = 1; private static final int LONG_ARRAY_GROUP_VALUE_TYPE = 2; public static SenseiResult merge(final SenseiRequest req, Collection<SenseiResult> results, boolean onSearchNode) { long start = System.currentTimeMillis(); List<Map<String, FacetAccessible>> facetList = new ArrayList<Map<String, FacetAccessible>>(results.size()); // Compute the size of hits priority queue final int topHits = req.getOffset() + req.getCount(); // Sum the hits, groups, totalDocs, etc from all the results final int numHits = getNumHits(results); final int numGroups = getNumGroups(results); int totalDocs = getTotalDocs(results); final long longestTime = findLongestTime(results); final String parsedQuery = findParsedQuery(results); final boolean hasSortCollector = hasSortCollector(results); // Assign each hit document a unique "document id" createUniqueDocIds(results); // Extract the hits from the results List<Iterator<SenseiHit>> hitLists = flattenHits(results); List<FacetAccessible>[] groupAccessibles = extractFacetAccessible(results); // Merge your facets for (SenseiResult res : results) { Map<String, FacetAccessible> facetMap = res.getFacetMap(); if (facetMap != null) { facetList.add(facetMap); } } Map<String, FacetAccessible> mergedFacetMap = null; if (onSearchNode) { mergedFacetMap = mergeFacetContainerServerSide(facetList, req); } else { mergedFacetMap = mergeFacetContainer(facetList, req); } Comparator<SenseiHit> comparator = new SenseiHitComparator(req.getSort()); SenseiHit[] hits; if (req.getGroupBy() == null || req.getGroupBy().length == 0) { List<SenseiHit> mergedList = ListMerger.mergeLists(req.getOffset(), req.getCount(), hitLists .toArray(new Iterator[hitLists.size()]), comparator); hits = mergedList.toArray(new SenseiHit[mergedList.size()]); } else { int[] rawGroupValueType = new int[req.getGroupBy().length]; // 0: unknown, 1: normal, 2: long[] PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp = new PrimitiveLongArrayWrapper(null); Iterator<SenseiHit> mergedIter = ListMerger.mergeLists(hitLists, comparator); List<SenseiHit> hitsList = null; if (!hasSortCollector) { hitsList = buildHitsListNoSortCollector(req, topHits, rawGroupValueType, mergedIter, req.getOffset()); //numGroups = (int)(numGroups*(groupHitMap.size()/(float)preGroups)); } else { int offsetLeft = req.getOffset(); MyScoreDoc pre = null; if (topHits > 0 && groupAccessibles != null && groupAccessibles.length != 0) { hitsList = buildHitsList(req, results, topHits, groupAccessibles, rawGroupValueType, primitiveLongArrayWrapperTmp); } else { hitsList = buildHitsListNoGroupAccessibles(req, topHits, rawGroupValueType, primitiveLongArrayWrapperTmp, mergedIter, offsetLeft); } //for (int i=0; i<combinedFacetAccessibles.length; ++i) combinedFacetAccessibles[i].close(); } hits = hitsList.toArray(new SenseiHit[hitsList.size()]); PrepareGroupMappings prepareGroupMappings = new PrepareGroupMappings(req, results, hasSortCollector, hits, rawGroupValueType, primitiveLongArrayWrapperTmp).invoke(); Map<Object, HitWithGroupQueue>[] groupMaps = prepareGroupMappings.getGroupMaps(); totalDocs = prepareGroupMappings.getTotalDocs(); if (hasSortCollector) { // Fix group position for (SenseiHit hit : hits) { if (hit.getGroupHitsCount() <= 1) { hit.setGroupPosition(0); hit.setGroupField(req.getGroupBy()[0]); hit.setGroupValue(hit.getField(req.getGroupBy()[0])); hit.setRawGroupValue(hit.getRawField(req.getGroupBy()[0])); } } for (Map<Object, HitWithGroupQueue> map : groupMaps) { for (HitWithGroupQueue hwg : map.values()) { int index = hwg.queue.size() - 1; if (index >= 0) { SenseiHit[] groupHits = new SenseiHit[index+1]; while (index >=0) { groupHits[index] = hwg.queue.pop().getSenseiHit(req); --index; } hwg.hit.setGroupHits(groupHits); } } } } else { for (Map<Object, HitWithGroupQueue> map : groupMaps) { for (HitWithGroupQueue hwg : map.values()) { List<SenseiHit> mergedList = ListMerger.mergeLists(0, req.getMaxPerGroup(), hwg.iterList .toArray(new Iterator[hwg.iterList.size()]), comparator); SenseiHit[] groupHits = mergedList.toArray(new SenseiHit[mergedList.size()]); hwg.hit.setGroupHits(groupHits); } } } } if (groupAccessibles != null) { for (List<FacetAccessible> list : groupAccessibles) { if (list != null) { for (FacetAccessible acc : list) { if (acc != null) acc.close(); } } } } SenseiResult merged = new SenseiResult(); merged.setHits(hits); merged.setNumHits(numHits); merged.setNumGroups(numGroups); merged.setTotalDocs(totalDocs); merged.addAll(mergedFacetMap); long end = System.currentTimeMillis(); merged.setTime(longestTime + end - start); mergerErrors(merged, req, results, parsedQuery); if (req.getMapReduceFunction() != null) { if (onSearchNode) { merged.setMapReduceResult(SenseiReduceFunctionWrapper.combine(req.getMapReduceFunction(), SenseiReduceFunctionWrapper.extractMapReduceResults(results))); } else { //on broker level merged.setMapReduceResult(SenseiReduceFunctionWrapper.reduce(req.getMapReduceFunction(), SenseiReduceFunctionWrapper.extractMapReduceResults(results))); } } return merged; } private static List<SenseiHit> buildHitsListNoGroupAccessibles(SenseiRequest req, int topHits, int[] rawGroupValueType, PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp, Iterator<SenseiHit> mergedIter, int offsetLeft) { List<SenseiHit> hitsList = new ArrayList<SenseiHit>(req.getCount()); Object rawGroupValue = null; Object firstRawGroupValue = null; Set<Object>[] groupSets = new Set[1]; groupSets[0] = new HashSet<Object>(topHits); while(mergedIter.hasNext()) { SenseiHit hit = mergedIter.next(); firstRawGroupValue = null; int i=0; for (; i<groupSets.length; ++i) { //rawGroupValue = hit.getRawField(req.getGroupBy()[i]); rawGroupValue = extractRawGroupValue(rawGroupValueType, i, primitiveLongArrayWrapperTmp, hit); if (firstRawGroupValue == null) firstRawGroupValue = rawGroupValue; if (groupSets[i].contains(rawGroupValue)) { i = -1; break; } } if (i >= 0) { if (i >= groupSets.length) { i = 0; rawGroupValue = firstRawGroupValue; } if (offsetLeft > 0) --offsetLeft; else { //hit.setGroupHitsCount(combinedFacetAccessibles[i].getFacetHitsCount(hit.getRawGroupValue())); hitsList.add(hit); if (hitsList.size() >= req.getCount()) break; } if (rawGroupValueType[i] == LONG_ARRAY_GROUP_VALUE_TYPE) groupSets[i].add(new PrimitiveLongArrayWrapper(primitiveLongArrayWrapperTmp.data)); else groupSets[i].add(rawGroupValue); } } return hitsList; } private static List<SenseiHit> buildHitsList(SenseiRequest req, Collection<SenseiResult> results, int topHits, List<FacetAccessible>[] groupAccessibles, int[] rawGroupValueType, PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp) { List<SenseiHit> hitsList = new ArrayList<SenseiHit>(req.getCount()); MyScoreDoc pre = null; Object rawGroupValue = null; CombinedFacetAccessible[] combinedFacetAccessibles = new CombinedFacetAccessible[groupAccessibles.length]; for(int i = 0; i < groupAccessibles.length; i++) { combinedFacetAccessibles[i] = new CombinedFacetAccessible(new FacetSpec(), groupAccessibles[i]); } Set<Object>[] groupSets = new Set[rawGroupValueType.length]; for (int i = 0; i < rawGroupValueType.length; ++i) { groupSets[i] = new HashSet<Object>(topHits); } Map<Object, MyScoreDoc>[] valueDocMaps = new Map[rawGroupValueType.length]; for (int i = 0; i < rawGroupValueType.length; ++i) { valueDocMaps[i] = new HashMap<Object, MyScoreDoc>(topHits); } int totalDocs = 0; MyScoreDoc tmpScoreDoc = new MyScoreDoc(0, 0.0f, 0, null); MyScoreDoc bottom = null; boolean queueFull = false; DocIDPriorityQueue docQueue = new DocIDPriorityQueue(new DocComparator() { public int compare(ScoreDoc doc1, ScoreDoc doc2) { return ((MyScoreDoc)doc1).sortValue.compareTo(((MyScoreDoc)doc2).sortValue); } public Comparable value(ScoreDoc doc) { return ((MyScoreDoc)doc).sortValue; } }, topHits, 0); // Sort all the documents???? for (SenseiResult res : results) { SortCollector sortCollector = res.getSortCollector(); if (sortCollector == null) continue; Iterator<CollectorContext> contextIter = sortCollector.contextList.iterator(); // Populate dataCaches and contextLeft CollectorContext currentContext = null; int contextLeft = 0; FacetDataCache[] dataCaches = new FacetDataCache[sortCollector.groupByMulti.length]; while (contextIter.hasNext()) { currentContext = contextIter.next(); currentContext.restoreRuntimeFacets(); contextLeft = currentContext.length; if (contextLeft > 0) { for (int j=0; j<sortCollector.groupByMulti.length; ++j) dataCaches[j] = (FacetDataCache)sortCollector.groupByMulti[j].getFacetData(currentContext.reader); break; } } Iterator<float[]> scoreArrayIter = sortCollector.scorearraylist != null ? sortCollector.scorearraylist.iterator():null; if (contextLeft > 0) { for (int[] docs : sortCollector.docidarraylist) { float[] scores = scoreArrayIter != null ? scoreArrayIter.next():null; for (int i=0; i<SortCollector.BLOCK_SIZE; ++i) { tmpScoreDoc.doc = docs[i]; tmpScoreDoc.score = scores != null ? scores[i] : 0.0f; tmpScoreDoc.finalDoc = currentContext.base + totalDocs + tmpScoreDoc.doc; tmpScoreDoc.reader = currentContext.reader; tmpScoreDoc.sortValue = currentContext.comparator.value(tmpScoreDoc); int j=0; if (!queueFull || tmpScoreDoc.sortValue.compareTo(bottom.sortValue) < 0) { for (;; ++j) { rawGroupValue = dataCaches[j].valArray.getRawValue(dataCaches[j].orderArray.get(tmpScoreDoc.doc)); rawGroupValue = extractRawGroupValue(rawGroupValueType, j, primitiveLongArrayWrapperTmp, rawGroupValue); pre = valueDocMaps[j].get(rawGroupValue); if (pre != null) { j = -1; break; } if (j >= combinedFacetAccessibles.length) break; if (rawGroupValueType[j] == LONG_ARRAY_GROUP_VALUE_TYPE) { if (combinedFacetAccessibles[j].getCappedFacetCount(primitiveLongArrayWrapperTmp.data, 2) != 1) break; } else { if (combinedFacetAccessibles[j].getCappedFacetCount(rawGroupValue, 2) != 1) break; } } if (j < 0) { if (tmpScoreDoc.sortValue.compareTo(pre.sortValue) < 0) { tmpScoreDoc.groupPos = pre.groupPos; tmpScoreDoc.rawGroupValue = rawGroupValue; MyScoreDoc tmp = pre; // Pre has a higher score. Pop it in the queue! bottom = (MyScoreDoc)docQueue.replace(tmpScoreDoc, pre); valueDocMaps[tmpScoreDoc.groupPos].put(rawGroupValue, tmpScoreDoc); tmpScoreDoc = tmp; } } else { if (queueFull) { tmpScoreDoc.groupPos = j; tmpScoreDoc.rawGroupValue = rawGroupValue; MyScoreDoc tmp = bottom; valueDocMaps[tmp.groupPos].remove(tmp.rawGroupValue); bottom = (MyScoreDoc)docQueue.replace(tmpScoreDoc); valueDocMaps[j].put(rawGroupValue, tmpScoreDoc); tmpScoreDoc = tmp; } else { MyScoreDoc tmp = new MyScoreDoc(tmpScoreDoc.doc, tmpScoreDoc.score, currentContext.base + totalDocs + tmpScoreDoc.doc, currentContext.reader); tmp.groupPos = j; tmp.rawGroupValue = rawGroupValue; tmp.sortValue = tmpScoreDoc.sortValue; bottom = (MyScoreDoc)docQueue.add(tmp); valueDocMaps[j].put(rawGroupValue, tmp); queueFull = (docQueue.size >= topHits); } } } --contextLeft; if (contextLeft <= 0) { while (contextIter.hasNext()) { currentContext = contextIter.next(); currentContext.restoreRuntimeFacets(); contextLeft = currentContext.length; if (contextLeft > 0) { for (j=0; j<sortCollector.groupByMulti.length; ++j) dataCaches[j] = (FacetDataCache)sortCollector.groupByMulti[j].getFacetData(currentContext.reader); break; } } if (contextLeft <= 0) // No more docs left. break; } } } } totalDocs += res.getTotalDocs(); } int len = docQueue.size() - req.getOffset(); if (len < 0) len = 0; SenseiHit[] hitArray = new SenseiHit[len]; for (int i = hitArray.length-1; i>=0; --i) { tmpScoreDoc = (MyScoreDoc)docQueue.pop(); hitArray[i] = tmpScoreDoc.getSenseiHit(req); } for (int i=0; i<hitArray.length; ++i) hitsList.add(hitArray[i]); return hitsList; } private static List<SenseiHit> buildHitsListNoSortCollector(SenseiRequest req, int topHits, int[] rawGroupValueType, Iterator<SenseiHit> mergedIter, int offsetLeft) { List<SenseiHit> hitsList = new ArrayList<SenseiHit>(req.getCount()); // TODO: Pull out the sensei hits extraction from this function PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp = new PrimitiveLongArrayWrapper(null); Map<Object, SenseiHit>[] groupHitMaps = new Map[req.getGroupBy().length]; for (int i=0; i < groupHitMaps.length; ++i) { groupHitMaps[i] = new HashMap<Object, SenseiHit>(topHits); } while(mergedIter.hasNext()) { SenseiHit hit = mergedIter.next(); Object rawGroupValue = extractRawGroupValue(rawGroupValueType, hit.getGroupPosition(), primitiveLongArrayWrapperTmp, hit); SenseiHit pre = groupHitMaps[hit.getGroupPosition()].get(rawGroupValue); if (pre != null) { if (offsetLeft <= 0) { pre.setGroupHitsCount(pre.getGroupHitsCount()+hit.getGroupHitsCount()); } } else { if (offsetLeft > 0) --offsetLeft; else if (hitsList.size() < req.getCount()) hitsList.add(hit); if (rawGroupValueType[0] == 2) groupHitMaps[hit.getGroupPosition()].put(new PrimitiveLongArrayWrapper(primitiveLongArrayWrapperTmp.data), hit); else groupHitMaps[hit.getGroupPosition()].put(rawGroupValue, hit); } } return hitsList; } private static Object extractRawGroupValue(int[] rawGroupValueType, int groupPosition, PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp, SenseiHit hit) { return extractRawGroupValue(rawGroupValueType, groupPosition, primitiveLongArrayWrapperTmp, hit.getRawGroupValue()); } private static Object extractRawGroupValue(int[] rawGroupValueType, int groupPosition, PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp, Object rawGroupValue) { if (rawGroupValueType[groupPosition] == LONG_ARRAY_GROUP_VALUE_TYPE) { // We already know this group position is a long[] primitiveLongArrayWrapperTmp.data = (long[])rawGroupValue; rawGroupValue = primitiveLongArrayWrapperTmp; } else if (rawGroupValueType[groupPosition] == UNKNOWN_GROUP_VALUE_TYPE) { // Unknown if (rawGroupValue != null) { if (rawGroupValue instanceof long[]) { // It's a long array, so set the position rawGroupValueType[groupPosition] = LONG_ARRAY_GROUP_VALUE_TYPE; primitiveLongArrayWrapperTmp.data = (long[])rawGroupValue; rawGroupValue = primitiveLongArrayWrapperTmp; } else rawGroupValueType[groupPosition] = NORMAL_GROUP_VALUE_TYPE; } } return rawGroupValue; } private static List<FacetAccessible>[] extractFacetAccessible(Collection<SenseiResult> results) { List<FacetAccessible>[] groupAccessibles = null; for (SenseiResult res : results) { if (res.getGroupAccessibles() != null) { if (groupAccessibles == null) { groupAccessibles = new List[res.getGroupAccessibles().length]; for (int i=0; i<groupAccessibles.length; ++i) { groupAccessibles[i] = new ArrayList<FacetAccessible>(results.size()); } } for (int i=0; i<groupAccessibles.length; ++i) { groupAccessibles[i].add(res.getGroupAccessibles()[i]); } } } return groupAccessibles; } public static class PrepareGroupMappings { private final SenseiRequest req; private final Collection<SenseiResult> results; private final boolean hasSortCollector; private final SenseiHit[] hits; private final int[] rawGroupValueType; private final PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp; private int totalDocs; private Map<Object, HitWithGroupQueue>[] groupMaps; public PrepareGroupMappings(SenseiRequest req, Collection<SenseiResult> results, boolean hasSortCollector, SenseiHit[] hits, int[] rawGroupValueType, PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp) { this.req = req; this.results = results; this.hasSortCollector = hasSortCollector; this.hits = hits; this.rawGroupValueType = rawGroupValueType; this.primitiveLongArrayWrapperTmp = primitiveLongArrayWrapperTmp; groupMaps = new Map[req.getGroupBy().length]; for (int i=0; i< groupMaps.length; ++i) { groupMaps[i] = new HashMap<Object, HitWithGroupQueue>(hits.length*2); } } public int getTotalDocs() { return totalDocs; } public Map<Object, HitWithGroupQueue>[] getGroupMaps() { return groupMaps; } public PrepareGroupMappings invoke() { Object rawGroupValue; for (SenseiHit hit : hits) { rawGroupValue = hit.getRawField(req.getGroupBy()[hit.getGroupPosition()]); rawGroupValue = extractRawGroupValue(rawGroupValueType, hit.getGroupPosition(), primitiveLongArrayWrapperTmp, rawGroupValue); groupMaps[hit.getGroupPosition()].put(rawGroupValue, new HitWithGroupQueue(hit, new PriorityQueue<MyScoreDoc>() { private int r; { this.initialize(req.getMaxPerGroup() <= 1? 0: req.getMaxPerGroup()); } protected boolean lessThan(MyScoreDoc a, MyScoreDoc b) { r = a.sortValue.compareTo(b.sortValue); if (r>0) return true; else if (r<0) return false; else return (a.finalDoc > b.finalDoc); } } )); } MyScoreDoc tmpScoreDoc = null; int doc = 0; float score = 0.0f; HitWithGroupQueue hitWithGroupQueue = null; totalDocs = 0; for (SenseiResult res : results) { if (hasSortCollector) { SortCollector sortCollector = res.getSortCollector(); if (sortCollector == null) continue; Iterator<CollectorContext> contextIter = sortCollector.contextList.iterator(); CollectorContext currentContext = null; int contextLeft = 0; FacetDataCache[] dataCaches = new FacetDataCache[sortCollector.groupByMulti.length]; while (contextIter.hasNext()) { currentContext = contextIter.next(); currentContext.restoreRuntimeFacets(); contextLeft = currentContext.length; if (contextLeft > 0) { for (int j=0; j<sortCollector.groupByMulti.length; ++j) dataCaches[j] = (FacetDataCache)sortCollector.groupByMulti[j].getFacetData(currentContext.reader); break; } } Iterator<float[]> scoreArrayIter = sortCollector.scorearraylist != null ? sortCollector.scorearraylist.iterator():null; if (contextLeft > 0) { for (int[] docs : sortCollector.docidarraylist) { float[] scores = scoreArrayIter != null ? scoreArrayIter.next():null; for (int i=0; i<SortCollector.BLOCK_SIZE; ++i) { doc = docs[i]; score = scores != null ? scores[i]:0.0f; int j=0; for (; j<sortCollector.groupByMulti.length; ++j) { rawGroupValue = extractRawGroupValue(rawGroupValueType, j, primitiveLongArrayWrapperTmp, dataCaches[j].valArray.getRawValue(dataCaches[j].orderArray.get(doc))); hitWithGroupQueue = groupMaps[j].get(rawGroupValue); if (hitWithGroupQueue != null) { hitWithGroupQueue.hit.setGroupHitsCount(hitWithGroupQueue.hit.getGroupHitsCount() + 1); // Collect this hit. if (tmpScoreDoc == null) tmpScoreDoc = new MyScoreDoc(doc, score, currentContext.base + totalDocs + doc, currentContext.reader); else { tmpScoreDoc.doc = doc; tmpScoreDoc.score = score; tmpScoreDoc.finalDoc = currentContext.base + totalDocs + doc; tmpScoreDoc.reader = currentContext.reader; } tmpScoreDoc.sortValue = currentContext.comparator.value(tmpScoreDoc); tmpScoreDoc.groupPos = j; tmpScoreDoc.rawGroupValue = rawGroupValue; tmpScoreDoc = hitWithGroupQueue.queue.insertWithOverflow(tmpScoreDoc); break; } } --contextLeft; if (contextLeft <= 0) { while (contextIter.hasNext()) { currentContext = contextIter.next(); currentContext.restoreRuntimeFacets(); contextLeft = currentContext.length; if (contextLeft > 0) { for (j=0; j<sortCollector.groupByMulti.length; ++j) dataCaches[j] = (FacetDataCache)sortCollector.groupByMulti[j].getFacetData(currentContext.reader); break; } } if (contextLeft <= 0) // No more docs left. break; } } } } } else { if (res.getSenseiHits() != null) { for (SenseiHit hit : res.getSenseiHits()) { if (hit.getGroupHits() != null) { rawGroupValue = hit.getRawGroupValue(); if (rawGroupValueType[hit.getGroupPosition()] == LONG_ARRAY_GROUP_VALUE_TYPE) { primitiveLongArrayWrapperTmp.data = (long[])rawGroupValue; rawGroupValue = primitiveLongArrayWrapperTmp; } hitWithGroupQueue = groupMaps[hit.getGroupPosition()].get(rawGroupValue); if (hitWithGroupQueue != null) hitWithGroupQueue.iterList.add(Arrays.asList(hit.getSenseiGroupHits()).iterator()); } } } } totalDocs += res.getTotalDocs(); } return this; } } private static void mergerErrors(SenseiResult merged, final SenseiRequest req, Collection<SenseiResult> results, String parsedQuery) { merged.setParsedQuery(parsedQuery); merged.getErrors().addAll(req.getErrors()); for (SenseiResult res : results) { merged.getErrors().addAll(res.getErrors()); if (res.getBoboErrors().size() > 0) { for (String boboError : res.getBoboErrors()) { merged.addError(new SenseiError(boboError, ErrorType.BoboExecutionError)); } } } } }
sensei-core/src/main/java/com/senseidb/search/node/ResultMerger.java
package com.senseidb.search.node; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.log4j.Logger; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.SortField; import org.apache.lucene.util.PriorityQueue; import proj.zoie.api.ZoieIndexReader; import com.browseengine.bobo.api.BoboIndexReader; import com.browseengine.bobo.api.BrowseFacet; import com.browseengine.bobo.api.BrowseSelection; import com.browseengine.bobo.api.FacetAccessible; import com.browseengine.bobo.api.FacetIterator; import com.browseengine.bobo.api.FacetSpec; import com.browseengine.bobo.api.FacetSpec.FacetSortSpec; import com.browseengine.bobo.facets.CombinedFacetAccessible; import com.browseengine.bobo.facets.FacetHandler; import com.browseengine.bobo.facets.data.FacetDataCache; import com.browseengine.bobo.facets.data.PrimitiveLongArrayWrapper; import com.browseengine.bobo.sort.DocComparator; import com.browseengine.bobo.sort.DocIDPriorityQueue; import com.browseengine.bobo.sort.SortCollector; import com.browseengine.bobo.sort.SortCollector.CollectorContext; import com.browseengine.bobo.util.ListMerger; import com.senseidb.search.req.ErrorType; import com.senseidb.search.req.SenseiError; import com.senseidb.search.req.SenseiHit; import com.senseidb.search.req.SenseiRequest; import com.senseidb.search.req.SenseiResult; import com.senseidb.search.req.mapred.impl.SenseiReduceFunctionWrapper; public class ResultMerger { private final static Logger logger = Logger.getLogger(ResultMerger.class.getName()); private final static class MyScoreDoc extends ScoreDoc { private static final long serialVersionUID = 1L; private BoboIndexReader reader; private int finalDoc; public int groupPos; public Object rawGroupValue; public Comparable sortValue; public MyScoreDoc(int docid, float score, int finalDoc, BoboIndexReader reader) { super(docid, score); this.finalDoc = finalDoc; this.reader = reader; } SenseiHit getSenseiHit(SenseiRequest req) { SenseiHit hit = new SenseiHit(); if (req.isFetchStoredFields() || req.isFetchStoredValue()) { if (req.isFetchStoredFields()) { try { hit.setStoredFields(reader.document(doc)); } catch(Exception e) { logger.error(e.getMessage(),e); } } try { IndexReader innerReader = reader.getInnerReader(); if (innerReader instanceof ZoieIndexReader) { hit.setStoredValue( ((ZoieIndexReader)innerReader).getStoredValue( ((ZoieIndexReader)innerReader).getUID(doc))); } } catch(Exception e) { } } List<FacetHandler<?>> facetHandlers= new ArrayList<FacetHandler<?>>(reader.getFacetHandlerMap().values()); if (reader.getRuntimeFacetHandlerMap() != null) { facetHandlers.addAll(reader.getRuntimeFacetHandlerMap().values()); } Map<String,String[]> map = new HashMap<String,String[]>(); Map<String,Object[]> rawMap = new HashMap<String,Object[]>(); Set<String> selectSet = req.getSelectSet(); for (FacetHandler<?> facetHandler : facetHandlers) { if (selectSet == null || selectSet.size() == 0 || selectSet.contains(facetHandler.getName())) { map.put(facetHandler.getName(),facetHandler.getFieldValues(reader,doc)); rawMap.put(facetHandler.getName(),facetHandler.getRawFieldValues(reader,doc)); } } hit.setFieldValues(map); hit.setRawFieldValues(rawMap); hit.setUID(((ZoieIndexReader<BoboIndexReader>)reader.getInnerReader()).getUID(doc)); hit.setDocid(finalDoc); hit.setScore(score); hit.setComparable(sortValue); hit.setGroupPosition(groupPos); String[] groupBy = req.getGroupBy(); if (groupBy != null && groupBy.length > groupPos && groupBy[groupPos] != null) { hit.setGroupField(groupBy[groupPos]); hit.setGroupValue(hit.getField(groupBy[groupPos])); hit.setRawGroupValue(hit.getRawField(groupBy[groupPos])); } return hit; } } private final static class HitWithGroupQueue { public SenseiHit hit; public PriorityQueue<MyScoreDoc> queue; public ArrayList<Iterator<SenseiHit>> iterList = new ArrayList<Iterator<SenseiHit>>(); public HitWithGroupQueue(SenseiHit hit, PriorityQueue<MyScoreDoc> queue) { this.hit = hit; this.queue = queue; } } private static Map<String, FacetAccessible> mergeFacetContainer(Collection<Map<String, FacetAccessible>> subMaps, SenseiRequest req) { Map<String, Map<String, Integer>> counts = new HashMap<String, Map<String, Integer>>(); for (Map<String, FacetAccessible> subMap : subMaps) { for (Map.Entry<String, FacetAccessible> entry : subMap.entrySet()) { String facetname = entry.getKey(); Map<String, Integer> count = counts.get(facetname); if(count == null) { count = new HashMap<String, Integer>(); counts.put(facetname, count); } Set<String> values = new HashSet<String>(); String[] rawvalues = null; BrowseSelection selection = req.getSelection(facetname); if (selection!=null&&(rawvalues = selection.getValues())!=null) { values.addAll(Arrays.asList(rawvalues)); } FacetAccessible facetAccessible = entry.getValue(); for(BrowseFacet facet : facetAccessible.getFacets()) { if (facet == null) continue; String val = facet.getValue(); int oldValue = count.containsKey(val) ? count.get(val) : 0; count.put(val, oldValue + facet.getFacetValueHitCount()); values.remove(val); } if (!values.isEmpty()) { for(String val : values) { int oldValue = count.containsKey(val) ? count.get(val) : 0; BrowseFacet facet = facetAccessible.getFacet(val); int delta = 0; if (facet!=null) { delta = facet.getFacetValueHitCount(); count.put(val, oldValue + delta); } } } facetAccessible.close(); } } Map<String, FacetAccessible> mergedFacetMap = new HashMap<String, FacetAccessible>(); for (Entry<String,Map<String, Integer>> entry : counts.entrySet()) { String facet = entry.getKey(); Map<String, Integer> facetValueCounts = entry.getValue(); List<BrowseFacet> facets = new ArrayList<BrowseFacet>(facetValueCounts.size()); for (Entry<String, Integer> subEntry : facetValueCounts.entrySet()) { facets.add(new BrowseFacet(subEntry.getKey(), subEntry.getValue())); } FacetSpec fspec = null; Set<String> values = new HashSet<String>(); String[] rawvalues = null; if (req != null) { fspec = req.getFacetSpec(facet); BrowseSelection selection = req.getSelection(facet); if (selection!=null&&(rawvalues = selection.getValues())!=null) { values.addAll(Arrays.asList(rawvalues)); } } Comparator<BrowseFacet> facetComp = getComparator(fspec); Collections.sort(facets, facetComp); if (fspec != null) { int maxCount = fspec.getMaxCount(); int numToShow = facets.size(); if (maxCount > 0) { numToShow = Math.min(maxCount, numToShow); } for(int i = facets.size() - 1; i >= numToShow; i--) { if (!values.contains(facets.get(i).getValue())) { facets.remove(i); } } } MappedFacetAccessible mergedFacetAccessible = new MappedFacetAccessible(facets.toArray(new BrowseFacet[facets.size()])); mergedFacetMap.put(facet, mergedFacetAccessible); } return mergedFacetMap; } private static Map<String, FacetAccessible> mergeFacetContainerServerSide(Collection<Map<String, FacetAccessible>> subMaps, SenseiRequest req) { Map<String, List<FacetAccessible>> counts = new HashMap<String, List<FacetAccessible>>(); for (Map<String, FacetAccessible> subMap : subMaps) { for (Map.Entry<String, FacetAccessible> entry : subMap.entrySet()) { String facetname = entry.getKey(); List<FacetAccessible> count = counts.get(facetname); if(count == null) { count = new LinkedList<FacetAccessible>(); counts.put(facetname, count); } count.add(entry.getValue()); } } // create combinedFacetAccessibles Map<String, FacetAccessible> fieldMap = new HashMap<String, FacetAccessible>(); for(Entry<String,List<FacetAccessible>> entry : counts.entrySet()) { String fieldname = entry.getKey(); List<FacetAccessible> facetAccs = entry.getValue(); if (facetAccs.size() == 1) { fieldMap.put(fieldname, facetAccs.get(0)); } else { fieldMap.put(fieldname, new CombinedFacetAccessible(req.getFacetSpec(fieldname), facetAccs)); } } Map<String, FacetAccessible> mergedFacetMap = new HashMap<String, FacetAccessible>(); for(Entry<String,FacetAccessible> entry : fieldMap.entrySet()) { String fieldname = entry.getKey(); FacetAccessible facetAcc = entry.getValue(); FacetSpec fspec = req.getFacetSpec(fieldname); BrowseSelection sel = req.getSelection(fieldname); Set<String> values = new HashSet<String>(); String[] rawvalues = null; if (sel!=null&&(rawvalues = sel.getValues())!=null) { values.addAll(Arrays.asList(rawvalues)); } List<BrowseFacet> facets = new ArrayList<BrowseFacet>(); facets.addAll(facetAcc.getFacets()); for(BrowseFacet bf : facets) { values.remove(bf.getValue()); } if (values.size()>0) { for(String value : values) { facets.add(facetAcc.getFacet(value)); } } facetAcc.close(); // sorting Comparator<BrowseFacet> facetComp = getComparator(fspec); Collections.sort(facets, facetComp); MappedFacetAccessible mergedFacetAccessible = new MappedFacetAccessible(facets.toArray(new BrowseFacet[facets.size()])); mergedFacetMap.put(fieldname, mergedFacetAccessible); } return mergedFacetMap; } private static Comparator<BrowseFacet> getComparator(FacetSpec fspec) { Comparator<BrowseFacet> facetComp; if ((fspec == null) || fspec.getOrderBy() == FacetSortSpec.OrderHitsDesc) { facetComp = new BrowseFacetHitsDescComparator(); } else { if (fspec.getOrderBy() == FacetSortSpec.OrderValueAsc) { facetComp = new BrowseFacetValueAscComparator(); } else { facetComp = fspec.getCustomComparatorFactory().newComparator(); } } return facetComp; } private static final class BrowseFacetValueAscComparator implements Comparator<BrowseFacet> { public int compare(BrowseFacet f1, BrowseFacet f2) { if (f1==null && f2==null){ return 0; } if (f1==null){ return -1; } if (f2==null){ return 1; } int ret = f1.getValue().compareTo(f2.getValue()); if (f1.getValue().startsWith("-") && f2.getValue().startsWith("-")) { ret *= -1; } return ret; } } private static final class BrowseFacetHitsDescComparator implements Comparator<BrowseFacet> { public int compare(BrowseFacet f1, BrowseFacet f2) { if (f1==null && f2==null){ return 0; } if (f1==null){ return -1; } if (f2==null){ return 1; } int h1 = f1.getFacetValueHitCount(); int h2 = f2.getFacetValueHitCount(); int val = h2 - h1; if (val == 0) { val = f1.getValue().compareTo(f2.getValue()); } return val; } } private static final class SenseiHitComparator implements Comparator<SenseiHit> { SortField[] _sortFields; public SenseiHitComparator(SortField[] sortFields) { _sortFields = sortFields; } public int compare(SenseiHit o1, SenseiHit o2) { if (_sortFields.length == 0) { return o1.getDocid() - o2.getDocid(); } else { int equalCount = 0; for (int i = 0; i < _sortFields.length; ++i) { String field = _sortFields[i].getField(); int reverse = _sortFields[i].getReverse() ? -1 : 1; if (_sortFields[i].getType() == SortField.SCORE) { float score1 = o1.getScore(); float score2 = o2.getScore(); if (score1 == score2) { equalCount++; continue; } else { return (score1 > score2) ? -reverse : reverse; } } else if (_sortFields[i].getType() == SortField.DOC) { return o1.getDocid() - o2.getDocid(); } else // A regular sort field { String value1 = o1.getField(field); String value2 = o2.getField(field); if (value1 == null && value2 == null) { equalCount++; continue; } else if (value1 == null) return -reverse; else if (value2 == null) return reverse; else { int comp = value1.compareTo(value2); if (value1.startsWith("-") && value2.startsWith("-")) { comp *= -1; } if (comp != 0) { return comp * reverse; } else { equalCount++; continue; } } } // A regular sort field } if (equalCount == _sortFields.length) { return o1.getDocid() - o2.getDocid(); } else { return 0; } } } } private static class MappedFacetAccessible implements FacetAccessible, Serializable { /** * */ private static final long serialVersionUID = 1L; private final HashMap<String, BrowseFacet> _facetMap; private final BrowseFacet[] _facets; public MappedFacetAccessible(BrowseFacet[] facets) { _facetMap = new HashMap<String, BrowseFacet>(); for (BrowseFacet facet : facets) { if (facet!=null){ _facetMap.put(facet.getValue(), facet); } } _facets = facets; } public BrowseFacet getFacet(String value) { return _facetMap.get(value); } public int getFacetHitsCount(Object value) { BrowseFacet facet = _facetMap.get(value); if (facet != null) return facet.getHitCount(); return 0; } public List<BrowseFacet> getFacets() { return Arrays.asList(_facets); } @Override public void close() { // TODO Auto-generated method stub } @Override public FacetIterator iterator() { throw new IllegalStateException("FacetIterator should not be obtained at merge time"); } } public static int getNumHits(Collection<SenseiResult> results) { int numHits = 0; for(SenseiResult res : results) { numHits += res.getNumHits(); } return numHits; } public static int getTotalDocs(Collection<SenseiResult> results) { int totalDocs = 0; for(SenseiResult res : results) { totalDocs += res.getTotalDocs(); } return totalDocs; } public static int getNumGroups(Collection<SenseiResult> results) { int numGroups = 0; for(SenseiResult res : results) { numGroups += res.getNumGroups(); } return numGroups; } public static long findLongestTime(Collection<SenseiResult> results) { long time = 0L; for (SenseiResult res : results) { time = Math.max(time,res.getTime()); } return time; } public static String findParsedQuery(Collection<SenseiResult> results) { for(SenseiResult res : results) { String parsedQuery = res.getParsedQuery(); if(parsedQuery != null && parsedQuery.length()>0) return parsedQuery; } return ""; } public static boolean hasSortCollector(Collection<SenseiResult> results) { for(SenseiResult res : results) { if (res.getSortCollector() != null && res.getSortCollector().contextList != null) { return true; } } return false; } public static void createUniqueDocIds(Collection<SenseiResult> results) { int totalDocs= 0; for (SenseiResult res : results) { SenseiHit[] hits = res.getSenseiHits(); if (hits != null) { for (SenseiHit hit : hits) { hit.setDocid(hit.getDocid() + totalDocs); } } totalDocs += res.getTotalDocs(); } } public static List<Iterator<SenseiHit>> flattenHits(Collection<SenseiResult> results) { List<Iterator<SenseiHit>> hitList = new ArrayList<Iterator<SenseiHit>>(results.size()); for (SenseiResult res : results) { hitList.add(Arrays.asList(res.getSenseiHits()).iterator()); } return hitList; } private static final int UNKNOWN_GROUP_VALUE_TYPE = 0; private static final int NORMAL_GROUP_VALUE_TYPE = 1; private static final int LONG_ARRAY_GROUP_VALUE_TYPE = 2; public static SenseiResult merge(final SenseiRequest req, Collection<SenseiResult> results, boolean onSearchNode) { long start = System.currentTimeMillis(); List<Map<String, FacetAccessible>> facetList = new ArrayList<Map<String, FacetAccessible>>(results.size()); // Compute the size of hits priority queue final int topHits = req.getOffset() + req.getCount(); // Sum the hits, groups, totalDocs, etc from all the results final int numHits = getNumHits(results); final int numGroups = getNumGroups(results); int totalDocs = getTotalDocs(results); final long longestTime = findLongestTime(results); final String parsedQuery = findParsedQuery(results); final boolean hasSortCollector = hasSortCollector(results); // Assign each hit document a unique "document id" createUniqueDocIds(results); // Extract the hits from the results List<Iterator<SenseiHit>> hitLists = flattenHits(results); List<FacetAccessible>[] groupAccessibles = extractFacetAccessible(results); // Merge your facets for (SenseiResult res : results) { Map<String, FacetAccessible> facetMap = res.getFacetMap(); if (facetMap != null) { facetList.add(facetMap); } } Map<String, FacetAccessible> mergedFacetMap = null; if (onSearchNode) { mergedFacetMap = mergeFacetContainerServerSide(facetList, req); } else { mergedFacetMap = mergeFacetContainer(facetList, req); } Comparator<SenseiHit> comparator = new SenseiHitComparator(req.getSort()); SenseiHit[] hits; if (req.getGroupBy() == null || req.getGroupBy().length == 0) { List<SenseiHit> mergedList = ListMerger.mergeLists(req.getOffset(), req.getCount(), hitLists .toArray(new Iterator[hitLists.size()]), comparator); hits = mergedList.toArray(new SenseiHit[mergedList.size()]); } else { int[] rawGroupValueType = new int[req.getGroupBy().length]; // 0: unknown, 1: normal, 2: long[] PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp = new PrimitiveLongArrayWrapper(null); Iterator<SenseiHit> mergedIter = ListMerger.mergeLists(hitLists, comparator); List<SenseiHit> hitsList = null; if (!hasSortCollector) { hitsList = buildHitsListNoSortCollector(req, topHits, rawGroupValueType, mergedIter, req.getOffset()); //numGroups = (int)(numGroups*(groupHitMap.size()/(float)preGroups)); } else { int offsetLeft = req.getOffset(); MyScoreDoc pre = null; if (topHits > 0 && groupAccessibles != null && groupAccessibles.length > 1) { hitsList = buildHitsList(req, results, topHits, groupAccessibles, rawGroupValueType, primitiveLongArrayWrapperTmp); } else { hitsList = buildHitsListNoGroupAccessibles(req, topHits, rawGroupValueType, primitiveLongArrayWrapperTmp, mergedIter, offsetLeft); } //for (int i=0; i<combinedFacetAccessibles.length; ++i) combinedFacetAccessibles[i].close(); } hits = hitsList.toArray(new SenseiHit[hitsList.size()]); PrepareGroupMappings prepareGroupMappings = new PrepareGroupMappings(req, results, hasSortCollector, hits, rawGroupValueType, primitiveLongArrayWrapperTmp).invoke(); Map<Object, HitWithGroupQueue>[] groupMaps = prepareGroupMappings.getGroupMaps(); totalDocs = prepareGroupMappings.getTotalDocs(); if (hasSortCollector) { for (Map<Object, HitWithGroupQueue> map : groupMaps) { for (HitWithGroupQueue hwg : map.values()) { int index = hwg.queue.size() - 1; if (index >= 0) { SenseiHit[] groupHits = new SenseiHit[index+1]; while (index >=0) { groupHits[index] = hwg.queue.pop().getSenseiHit(req); --index; } hwg.hit.setGroupHits(groupHits); } } } } else { for (Map<Object, HitWithGroupQueue> map : groupMaps) { for (HitWithGroupQueue hwg : map.values()) { List<SenseiHit> mergedList = ListMerger.mergeLists(0, req.getMaxPerGroup(), hwg.iterList .toArray(new Iterator[hwg.iterList.size()]), comparator); SenseiHit[] groupHits = mergedList.toArray(new SenseiHit[mergedList.size()]); hwg.hit.setGroupHits(groupHits); } } } } if (groupAccessibles != null) { for (List<FacetAccessible> list : groupAccessibles) { if (list != null) { for (FacetAccessible acc : list) { if (acc != null) acc.close(); } } } } SenseiResult merged = new SenseiResult(); merged.setHits(hits); merged.setNumHits(numHits); merged.setNumGroups(numGroups); merged.setTotalDocs(totalDocs); merged.addAll(mergedFacetMap); long end = System.currentTimeMillis(); merged.setTime(longestTime + end - start); mergerErrors(merged, req, results, parsedQuery); if (req.getMapReduceFunction() != null) { if (onSearchNode) { merged.setMapReduceResult(SenseiReduceFunctionWrapper.combine(req.getMapReduceFunction(), SenseiReduceFunctionWrapper.extractMapReduceResults(results))); } else { //on broker level merged.setMapReduceResult(SenseiReduceFunctionWrapper.reduce(req.getMapReduceFunction(), SenseiReduceFunctionWrapper.extractMapReduceResults(results))); } } return merged; } private static List<SenseiHit> buildHitsListNoGroupAccessibles(SenseiRequest req, int topHits, int[] rawGroupValueType, PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp, Iterator<SenseiHit> mergedIter, int offsetLeft) { List<SenseiHit> hitsList = new ArrayList<SenseiHit>(req.getCount()); Object rawGroupValue = null; Object firstRawGroupValue = null; Set<Object>[] groupSets = new Set[1]; groupSets[0] = new HashSet<Object>(topHits); while(mergedIter.hasNext()) { SenseiHit hit = mergedIter.next(); firstRawGroupValue = null; int i=0; for (; i<groupSets.length; ++i) { //rawGroupValue = hit.getRawField(req.getGroupBy()[i]); rawGroupValue = extractRawGroupValue(rawGroupValueType, i, primitiveLongArrayWrapperTmp, hit); if (firstRawGroupValue == null) firstRawGroupValue = rawGroupValue; if (groupSets[i].contains(rawGroupValue)) { i = -1; break; } } if (i >= 0) { if (i >= groupSets.length) { i = 0; rawGroupValue = firstRawGroupValue; } if (offsetLeft > 0) --offsetLeft; else { //hit.setGroupHitsCount(combinedFacetAccessibles[i].getFacetHitsCount(hit.getRawGroupValue())); hitsList.add(hit); if (hitsList.size() >= req.getCount()) break; } if (rawGroupValueType[i] == LONG_ARRAY_GROUP_VALUE_TYPE) groupSets[i].add(new PrimitiveLongArrayWrapper(primitiveLongArrayWrapperTmp.data)); else groupSets[i].add(rawGroupValue); } } return hitsList; } private static List<SenseiHit> buildHitsList(SenseiRequest req, Collection<SenseiResult> results, int topHits, List<FacetAccessible>[] groupAccessibles, int[] rawGroupValueType, PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp) { List<SenseiHit> hitsList = new ArrayList<SenseiHit>(req.getCount()); MyScoreDoc pre = null; Object rawGroupValue = null; Object firstRawGroupValue = null; CombinedFacetAccessible[] combinedFacetAccessibles = new CombinedFacetAccessible[groupAccessibles.length]; for(int i = 0; i < groupAccessibles.length; i++) { combinedFacetAccessibles[i] = new CombinedFacetAccessible(new FacetSpec(), groupAccessibles[i]); } Set<Object>[] groupSets = new Set[groupAccessibles.length]; for (int i = 0; i < groupAccessibles.length; ++i) { groupSets[i] = new HashSet<Object>(topHits); } Map<Object, MyScoreDoc>[] valueDocMaps = new Map[groupAccessibles.length]; for (int i = 0; i < groupAccessibles.length; ++i) { valueDocMaps[i] = new HashMap<Object, MyScoreDoc>(topHits); } int totalDocs = 0; MyScoreDoc tmpScoreDoc = new MyScoreDoc(0, 0.0f, 0, null); MyScoreDoc bottom = null; boolean queueFull = false; DocIDPriorityQueue docQueue = new DocIDPriorityQueue(new DocComparator() { public int compare(ScoreDoc doc1, ScoreDoc doc2) { return ((MyScoreDoc)doc1).sortValue.compareTo(((MyScoreDoc)doc2).sortValue); } public Comparable value(ScoreDoc doc) { return ((MyScoreDoc)doc).sortValue; } }, topHits, 0); // Sort all the documents???? for (SenseiResult res : results) { SortCollector sortCollector = res.getSortCollector(); if (sortCollector == null) continue; Iterator<CollectorContext> contextIter = sortCollector.contextList.iterator(); // Populate dataCaches and contextLeft CollectorContext currentContext = null; int contextLeft = 0; FacetDataCache[] dataCaches = new FacetDataCache[sortCollector.groupByMulti.length]; while (contextIter.hasNext()) { currentContext = contextIter.next(); currentContext.restoreRuntimeFacets(); contextLeft = currentContext.length; if (contextLeft > 0) { for (int j=0; j<sortCollector.groupByMulti.length; ++j) dataCaches[j] = (FacetDataCache)sortCollector.groupByMulti[j].getFacetData(currentContext.reader); break; } } Iterator<float[]> scoreArrayIter = sortCollector.scorearraylist != null ? sortCollector.scorearraylist.iterator():null; if (contextLeft > 0) { for (int[] docs : sortCollector.docidarraylist) { float[] scores = scoreArrayIter != null ? scoreArrayIter.next():null; for (int i=0; i<SortCollector.BLOCK_SIZE; ++i) { tmpScoreDoc.doc = docs[i]; tmpScoreDoc.score = scores != null ? scores[i] : 0.0f; tmpScoreDoc.finalDoc = currentContext.base + totalDocs + tmpScoreDoc.doc; tmpScoreDoc.reader = currentContext.reader; tmpScoreDoc.sortValue = currentContext.comparator.value(tmpScoreDoc); firstRawGroupValue = null; int j=0; for (; j<sortCollector.groupByMulti.length; ++j) { rawGroupValue = dataCaches[j].valArray.getRawValue(dataCaches[j].orderArray.get(tmpScoreDoc.doc)); rawGroupValue = extractRawGroupValue(rawGroupValueType, j, primitiveLongArrayWrapperTmp, rawGroupValue); if (firstRawGroupValue == null) firstRawGroupValue = rawGroupValue; pre = valueDocMaps[j].get(rawGroupValue); if (pre != null) { j = -1; break; } if (rawGroupValueType[j] == LONG_ARRAY_GROUP_VALUE_TYPE) { if (combinedFacetAccessibles[j].getCappedFacetCount(primitiveLongArrayWrapperTmp.data, 2) != 1) break; } else { if (combinedFacetAccessibles[j].getCappedFacetCount(rawGroupValue, 2) != 1) break; } } if (j < 0) { if (tmpScoreDoc.sortValue.compareTo(pre.sortValue) < 0) { tmpScoreDoc.groupPos = pre.groupPos; tmpScoreDoc.rawGroupValue = rawGroupValue; MyScoreDoc tmp = pre; // Pre has a higher score. Pop it in the queue! bottom = (MyScoreDoc)docQueue.replace(tmpScoreDoc, pre); valueDocMaps[tmpScoreDoc.groupPos].put(rawGroupValue, tmpScoreDoc); tmpScoreDoc = tmp; } } else { if (j >= sortCollector.groupByMulti.length) { j = 0; rawGroupValue = firstRawGroupValue; } if (!queueFull || tmpScoreDoc.sortValue.compareTo(bottom.sortValue) < 0) { if (queueFull) { tmpScoreDoc.groupPos = j; tmpScoreDoc.rawGroupValue = rawGroupValue; MyScoreDoc tmp = bottom; valueDocMaps[tmp.groupPos].remove(tmp.rawGroupValue); bottom = (MyScoreDoc)docQueue.replace(tmpScoreDoc); valueDocMaps[j].put(rawGroupValue, tmpScoreDoc); tmpScoreDoc = tmp; } else { MyScoreDoc tmp = new MyScoreDoc(tmpScoreDoc.doc, tmpScoreDoc.score, currentContext.base + totalDocs + tmpScoreDoc.doc, currentContext.reader); tmp.groupPos = j; tmp.rawGroupValue = rawGroupValue; tmp.sortValue = tmpScoreDoc.sortValue; bottom = (MyScoreDoc)docQueue.add(tmp); valueDocMaps[j].put(rawGroupValue, tmp); queueFull = (docQueue.size >= topHits); } } } --contextLeft; if (contextLeft <= 0) { while (contextIter.hasNext()) { currentContext = contextIter.next(); currentContext.restoreRuntimeFacets(); contextLeft = currentContext.length; if (contextLeft > 0) { for (j=0; j<sortCollector.groupByMulti.length; ++j) dataCaches[j] = (FacetDataCache)sortCollector.groupByMulti[j].getFacetData(currentContext.reader); break; } } if (contextLeft <= 0) // No more docs left. break; } } } } totalDocs += res.getTotalDocs(); } int len = docQueue.size() - req.getOffset(); if (len < 0) len = 0; SenseiHit[] hitArray = new SenseiHit[len]; for (int i = hitArray.length-1; i>=0; --i) { tmpScoreDoc = (MyScoreDoc)docQueue.pop(); hitArray[i] = tmpScoreDoc.getSenseiHit(req); } for (int i=0; i<hitArray.length; ++i) hitsList.add(hitArray[i]); return hitsList; } private static List<SenseiHit> buildHitsListNoSortCollector(SenseiRequest req, int topHits, int[] rawGroupValueType, Iterator<SenseiHit> mergedIter, int offsetLeft) { List<SenseiHit> hitsList = new ArrayList<SenseiHit>(req.getCount()); // TODO: Pull out the sensei hits extraction from this function PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp = new PrimitiveLongArrayWrapper(null); Map<Object, SenseiHit>[] groupHitMaps = new Map[req.getGroupBy().length]; for (int i=0; i < groupHitMaps.length; ++i) { groupHitMaps[i] = new HashMap<Object, SenseiHit>(topHits); } while(mergedIter.hasNext()) { SenseiHit hit = mergedIter.next(); Object rawGroupValue = extractRawGroupValue(rawGroupValueType, hit.getGroupPosition(), primitiveLongArrayWrapperTmp, hit); SenseiHit pre = groupHitMaps[hit.getGroupPosition()].get(rawGroupValue); if (pre != null) { if (offsetLeft <= 0) { pre.setGroupHitsCount(pre.getGroupHitsCount()+hit.getGroupHitsCount()); } } else { if (offsetLeft > 0) --offsetLeft; else if (hitsList.size() < req.getCount()) hitsList.add(hit); if (rawGroupValueType[0] == 2) groupHitMaps[hit.getGroupPosition()].put(new PrimitiveLongArrayWrapper(primitiveLongArrayWrapperTmp.data), hit); else groupHitMaps[hit.getGroupPosition()].put(rawGroupValue, hit); } } return hitsList; } private static Object extractRawGroupValue(int[] rawGroupValueType, int groupPosition, PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp, SenseiHit hit) { return extractRawGroupValue(rawGroupValueType, groupPosition, primitiveLongArrayWrapperTmp, hit.getRawGroupValue()); } private static Object extractRawGroupValue(int[] rawGroupValueType, int groupPosition, PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp, Object rawGroupValue) { if (rawGroupValueType[groupPosition] == LONG_ARRAY_GROUP_VALUE_TYPE) { // We already know this group position is a long[] primitiveLongArrayWrapperTmp.data = (long[])rawGroupValue; rawGroupValue = primitiveLongArrayWrapperTmp; } else if (rawGroupValueType[groupPosition] == UNKNOWN_GROUP_VALUE_TYPE) { // Unknown if (rawGroupValue != null) { if (rawGroupValue instanceof long[]) { // It's a long array, so set the position rawGroupValueType[groupPosition] = LONG_ARRAY_GROUP_VALUE_TYPE; primitiveLongArrayWrapperTmp.data = (long[])rawGroupValue; rawGroupValue = primitiveLongArrayWrapperTmp; } else rawGroupValueType[groupPosition] = NORMAL_GROUP_VALUE_TYPE; } } return rawGroupValue; } private static List<FacetAccessible>[] extractFacetAccessible(Collection<SenseiResult> results) { List<FacetAccessible>[] groupAccessibles = null; for (SenseiResult res : results) { if (res.getGroupAccessibles() != null) { if (groupAccessibles == null) { groupAccessibles = new List[res.getGroupAccessibles().length]; for (int i=0; i<groupAccessibles.length; ++i) { groupAccessibles[i] = new ArrayList<FacetAccessible>(results.size()); } } for (int i=0; i<groupAccessibles.length; ++i) { groupAccessibles[i].add(res.getGroupAccessibles()[i]); } } } return groupAccessibles; } public static class PrepareGroupMappings { private final SenseiRequest req; private final Collection<SenseiResult> results; private final boolean hasSortCollector; private final SenseiHit[] hits; private final int[] rawGroupValueType; private final PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp; private int totalDocs; private Map<Object, HitWithGroupQueue>[] groupMaps; public PrepareGroupMappings(SenseiRequest req, Collection<SenseiResult> results, boolean hasSortCollector, SenseiHit[] hits, int[] rawGroupValueType, PrimitiveLongArrayWrapper primitiveLongArrayWrapperTmp) { this.req = req; this.results = results; this.hasSortCollector = hasSortCollector; this.hits = hits; this.rawGroupValueType = rawGroupValueType; this.primitiveLongArrayWrapperTmp = primitiveLongArrayWrapperTmp; groupMaps = new Map[req.getGroupBy().length]; for (int i=0; i< groupMaps.length; ++i) { groupMaps[i] = new HashMap<Object, HitWithGroupQueue>(hits.length*2); } } public int getTotalDocs() { return totalDocs; } public Map<Object, HitWithGroupQueue>[] getGroupMaps() { return groupMaps; } public PrepareGroupMappings invoke() { Object rawGroupValue; for (SenseiHit hit : hits) { rawGroupValue = hit.getRawField(req.getGroupBy()[hit.getGroupPosition()]); rawGroupValue = extractRawGroupValue(rawGroupValueType, hit.getGroupPosition(), primitiveLongArrayWrapperTmp, rawGroupValue); groupMaps[hit.getGroupPosition()].put(rawGroupValue, new HitWithGroupQueue(hit, new PriorityQueue<MyScoreDoc>() { private int r; { this.initialize(req.getMaxPerGroup() <= 1? 0: req.getMaxPerGroup()); } protected boolean lessThan(MyScoreDoc a, MyScoreDoc b) { r = a.sortValue.compareTo(b.sortValue); if (r>0) return true; else if (r<0) return false; else return (a.finalDoc > b.finalDoc); } } )); } MyScoreDoc tmpScoreDoc = null; int doc = 0; float score = 0.0f; HitWithGroupQueue hitWithGroupQueue = null; totalDocs = 0; for (SenseiResult res : results) { if (hasSortCollector) { SortCollector sortCollector = res.getSortCollector(); if (sortCollector == null) continue; Iterator<CollectorContext> contextIter = sortCollector.contextList.iterator(); CollectorContext currentContext = null; int contextLeft = 0; FacetDataCache[] dataCaches = new FacetDataCache[sortCollector.groupByMulti.length]; while (contextIter.hasNext()) { currentContext = contextIter.next(); currentContext.restoreRuntimeFacets(); contextLeft = currentContext.length; if (contextLeft > 0) { for (int j=0; j<sortCollector.groupByMulti.length; ++j) dataCaches[j] = (FacetDataCache)sortCollector.groupByMulti[j].getFacetData(currentContext.reader); break; } } Iterator<float[]> scoreArrayIter = sortCollector.scorearraylist != null ? sortCollector.scorearraylist.iterator():null; if (contextLeft > 0) { for (int[] docs : sortCollector.docidarraylist) { float[] scores = scoreArrayIter != null ? scoreArrayIter.next():null; for (int i=0; i<SortCollector.BLOCK_SIZE; ++i) { doc = docs[i]; score = scores != null ? scores[i]:0.0f; int j=0; for (; j<sortCollector.groupByMulti.length; ++j) { rawGroupValue = extractRawGroupValue(rawGroupValueType, j, primitiveLongArrayWrapperTmp, dataCaches[j].valArray.getRawValue(dataCaches[j].orderArray.get(doc))); hitWithGroupQueue = groupMaps[j].get(rawGroupValue); if (hitWithGroupQueue != null) { hitWithGroupQueue.hit.setGroupHitsCount(hitWithGroupQueue.hit.getGroupHitsCount() + 1); // Collect this hit. if (tmpScoreDoc == null) tmpScoreDoc = new MyScoreDoc(doc, score, currentContext.base + totalDocs + doc, currentContext.reader); else { tmpScoreDoc.doc = doc; tmpScoreDoc.score = score; tmpScoreDoc.finalDoc = currentContext.base + totalDocs + doc; tmpScoreDoc.reader = currentContext.reader; } tmpScoreDoc.sortValue = currentContext.comparator.value(tmpScoreDoc); tmpScoreDoc.groupPos = j; tmpScoreDoc.rawGroupValue = rawGroupValue; tmpScoreDoc = hitWithGroupQueue.queue.insertWithOverflow(tmpScoreDoc); break; } } --contextLeft; if (contextLeft <= 0) { while (contextIter.hasNext()) { currentContext = contextIter.next(); currentContext.restoreRuntimeFacets(); contextLeft = currentContext.length; if (contextLeft > 0) { for (j=0; j<sortCollector.groupByMulti.length; ++j) dataCaches[j] = (FacetDataCache)sortCollector.groupByMulti[j].getFacetData(currentContext.reader); break; } } if (contextLeft <= 0) // No more docs left. break; } } } } } else { if (res.getSenseiHits() != null) { for (SenseiHit hit : res.getSenseiHits()) { if (hit.getGroupHits() != null) { rawGroupValue = hit.getRawGroupValue(); if (rawGroupValueType[hit.getGroupPosition()] == LONG_ARRAY_GROUP_VALUE_TYPE) { primitiveLongArrayWrapperTmp.data = (long[])rawGroupValue; rawGroupValue = primitiveLongArrayWrapperTmp; } hitWithGroupQueue = groupMaps[hit.getGroupPosition()].get(rawGroupValue); if (hitWithGroupQueue != null) hitWithGroupQueue.iterList.add(Arrays.asList(hit.getSenseiGroupHits()).iterator()); } } } } totalDocs += res.getTotalDocs(); } return this; } } private static void mergerErrors(SenseiResult merged, final SenseiRequest req, Collection<SenseiResult> results, String parsedQuery) { merged.setParsedQuery(parsedQuery); merged.getErrors().addAll(req.getErrors()); for (SenseiResult res : results) { merged.getErrors().addAll(res.getErrors()); if (res.getBoboErrors().size() > 0) { for (String boboError : res.getBoboErrors()) { merged.addError(new SenseiError(boboError, ErrorType.BoboExecutionError)); } } } } }
get counts for good hits only.
sensei-core/src/main/java/com/senseidb/search/node/ResultMerger.java
get counts for good hits only.
Java
apache-2.0
fb7c2efd3f502052e95159984fa75b76a31eb941
0
crashoverwide/java-a-to-z
package ru.shestakov.services; import org.junit.Test; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertArrayEquals; public class IteratorStrangeTest { @Test public void whenConvertedThenConverted() { List<Integer> list1 = new ArrayList<Integer>(); list1.add(1); list1.add(2); List<Integer> list2 = new ArrayList<Integer>(); list2.add(3); list2.add(4); List<List<Integer>> listBig = new ArrayList<List<Integer>>(); listBig.add(list1); listBig.add(list2); IteratorStrangeArrayList iteratorStrange= new IteratorStrangeArrayList(); List<Integer> listSmall = iteratorStrange.convert(listBig); List<Integer> expected = new ArrayList<Integer>(); expected.add(1); expected.add(2); expected.add(3); expected.add(4); assertArrayEquals(expected.toArray(), listSmall.toArray()); } }
Tracker/src/test/java/ru/shestakov/services/IteratorStrangeTest.java
package ru.shestakov.services; import org.junit.Test; import java.util.ArrayList; import java.util.List; public class IteratorStrangeTest { @Test public void whenThen() { List<Integer> list1 = new ArrayList<Integer>(); list1.add(1); list1.add(2); List<Integer> list2 = new ArrayList<Integer>(); list2.add(3); list2.add(4); List<List<Integer>> listBig = new ArrayList<List<Integer>>(); listBig.add(list1); listBig.add(list2); IteratorStrangeArrayList iteratorStrange= new IteratorStrangeArrayList(); List<Integer> listSmall = iteratorStrange.convert(listBig); } }
add structure with folder
Tracker/src/test/java/ru/shestakov/services/IteratorStrangeTest.java
add structure with folder
Java
apache-2.0
ef007796562205e24b8318c6cb68b5df996cf439
0
benmfaul/XRTB,benmfaul/XRTB,benmfaul/XRTB,benmfaul/XRTB
package com.xrtb.commands; import com.xrtb.common.Configuration; import com.xrtb.exchanges.adx.AdxWinObject; import com.xrtb.exchanges.google.GoogleWinObject; /** * A class for logging pixel loads. (ad loads in user web page) * @author Ben M. Faul. * */ public class PixelLog extends PixelClickConvertLog { /** * Default constructor */ public PixelLog() { super(); type = PIXEL; } /** * Create a Click log, the payload is the URI. * @param payload String. The URI. */ public PixelLog(String payload) { type = PIXEL; this.payload = payload; String [] parts = payload.split("/"); for (int i=0;i<parts.length;i++) { if (parts[i].indexOf("=") > -1) { String [] items = parts[i].split("="); switch(items[0]) { case "lat": try { lat = Double.parseDouble(items[1]); } catch (Exception error) { } break; case "lon": try { lon = Double.parseDouble(items[1]); } catch (Exception error) { } break; case "price": try { price = Double.parseDouble(items[1]); } catch (Exception error) { price = 0; String ctext = items[1].trim(); if (exchange.equals("google") || exchange.equals("adx")) { try { if (exchange.equals("google")) price = GoogleWinObject.decrypt(ctext, System.currentTimeMillis()); else price = AdxWinObject.decrypt(ctext, System.currentTimeMillis()); } catch (Exception e) { } } } break; case "bid_id": bid_id = items[1]; break; case "ad_id": ad_id=items[1]; break; case "creative_id": creative_id=items[1]; break; case "exchange": exchange = items[1]; break; } } } type = PIXEL; instance = Configuration.getInstance().instanceName; time = System.currentTimeMillis(); instance = Configuration.getInstance().instanceName; } /** * Create a pixel log from the payload and the bidder instance name. * @param payload String. The data to convert. * @param instance String. The instance name. */ public PixelLog(String payload, String instance) { type = PIXEL; this.payload = payload; String [] parts = payload.split("/"); lat = Double.parseDouble(parts[8]); lon = Double.parseDouble(parts[9]); price = Double.parseDouble(parts[7]); bid_id = parts[6]; ad_id=parts[4]; creative_id=parts[5]; exchange = parts[3]; type = PIXEL; time = System.currentTimeMillis(); this.instance = instance; } }
src/com/xrtb/commands/PixelLog.java
package com.xrtb.commands; import com.xrtb.common.Configuration; /** * A class for logging pixel loads. (ad loads in user web page) * @author Ben M. Faul. * */ public class PixelLog extends PixelClickConvertLog { /** * Default constructor */ public PixelLog() { super(); type = PIXEL; } /** * Create a Click log, the payload is the URI. * @param payload String. The URI. */ public PixelLog(String payload) { type = PIXEL; this.payload = payload; String [] parts = payload.split("/"); for (int i=0;i<parts.length;i++) { if (parts[i].indexOf("=") > -1) { String [] items = parts[i].split("="); switch(items[0]) { case "lat": try { lat = Double.parseDouble(items[1]); } catch (Exception error) { } break; case "lon": try { lon = Double.parseDouble(items[1]); } catch (Exception error) { } break; case "price": price = Double.parseDouble(items[1]); break; case "bid_id": bid_id = items[1]; break; case "ad_id": ad_id=items[1]; break; case "creative_id": creative_id=items[1]; break; case "exchange": exchange = items[1]; break; } } } type = PIXEL; instance = Configuration.getInstance().instanceName; time = System.currentTimeMillis(); instance = Configuration.getInstance().instanceName; } /** * Create a pixel log from the payload and the bidder instance name. * @param payload String. The data to convert. * @param instance String. The instance name. */ public PixelLog(String payload, String instance) { type = PIXEL; this.payload = payload; String [] parts = payload.split("/"); lat = Double.parseDouble(parts[8]); lon = Double.parseDouble(parts[9]); price = Double.parseDouble(parts[7]); bid_id = parts[6]; ad_id=parts[4]; creative_id=parts[5]; exchange = parts[3]; type = PIXEL; time = System.currentTimeMillis(); this.instance = instance; } }
Dont give up if price is encrpyted
src/com/xrtb/commands/PixelLog.java
Dont give up if price is encrpyted
Java
apache-2.0
0dff6e6baa2370809a5115e3f4ab4647e2c9abf5
0
bertilmuth/requirementsascode
package org.requirementsascode; import java.io.Serializable; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.requirementsascode.flowposition.FlowPosition; /** * A flow defines a sequence of steps that lead the user through a use case. * * <p> * A flow either ends with the user reaching her/his goal, or terminates before, * usually because of an exception that occurred. * * @author b_muth */ public class Flow extends ModelElement implements Serializable { private static final long serialVersionUID = -2448742413260609615L; private UseCase useCase; /** * Creates a flow with the specified name that belongs to the specified use * case. * * @param name the name of the flow to be created * @param useCase the use case that will contain the new flow */ Flow(String name, UseCase useCase) { super(name, useCase.getModel()); this.useCase = useCase; } /** * Returns the use case this flow is part of. * * @return the containing use case */ public UseCase getUseCase() { return useCase; } /** * Returns the steps contained in this flow. Do not modify the returned * collection directly. * * @return a collection of the steps */ public List<FlowStep> getSteps() { List<FlowStep> steps = getUseCase().getModifiableSteps().stream().filter(step -> step instanceof FlowStep) .map(step -> (FlowStep) step).filter(step -> this.equals(step.getFlow())).collect(Collectors.toList()); return Collections.unmodifiableList(steps); } /** * Returns the first step of the flow * * @return the first step of the flow, or an empty optional if the flow has no * steps. */ public Optional<FlowStep> getFirstStep() { List<FlowStep> steps = getSteps(); return steps.size() > 0 ? Optional.of(steps.get(0)) : Optional.empty(); } /** * Convenience method that returns the position of the flow (as defined e.g. by * "InsteadOf"). * * <p> * Internally this calls the method of the same name of the first step in the * flow. * * @return the flow position, or null if the flow is empty. */ public FlowPosition getFlowPosition() { FlowPosition flowPosition = getFirstStep().map(step -> step.getFlowPosition()).orElse(null); return flowPosition; } /** * Convenience method that returns the condition of the flow. * * <p> * Internally this calls the method of the same name of the first step in the * flow. * * @return the condition */ public Optional<Condition> getCondition() { Optional<Condition> condition = getFirstStep().flatMap(step -> step.getCondition()); return condition; } }
requirementsascodecore/src/main/java/org/requirementsascode/Flow.java
package org.requirementsascode; import java.io.Serializable; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.requirementsascode.flowposition.FlowPosition; /** * A flow defines a sequence of steps that lead the user through a * use case. * * <p> * A flow either ends with the user reaching her/his goal, or terminates before, * usually because of an exception that occurred. * * @author b_muth */ public class Flow extends ModelElement implements Serializable { private static final long serialVersionUID = -2448742413260609615L; private UseCase useCase; /** * Creates a flow with the specified name that belongs to the specified * use case. * * @param name * the name of the flow to be created * @param useCase * the use case that will contain the new flow */ Flow(String name, UseCase useCase) { super(name, useCase.getModel()); this.useCase = useCase; } /** * Returns the use case this flow is part of. * * @return the containing use case */ public UseCase getUseCase() { return useCase; } /** * Returns the steps contained in this flow. Do not modify the returned * collection directly. * * @return a collection of the steps */ public List<FlowStep> getSteps() { List<FlowStep> steps = getUseCase().getModifiableSteps().stream() .filter(step -> step instanceof FlowStep) .map(step -> (FlowStep)step) .filter(step -> this.equals(step.getFlow())) .collect(Collectors.toList()); return Collections.unmodifiableList(steps); } /** * Returns the first step of the flow * * @return the first step of the flow, or an empty optional if the flow has no * steps. */ public Optional<FlowStep> getFirstStep() { List<FlowStep> steps = getSteps(); return steps.size() > 0 ? Optional.of(steps.get(0)) : Optional.empty(); } /** * Convenience method that returns the position of the flow (as defined e.g. by * "InsteadOf"). * * <p> * Internally this calls the method of the same name of the first step in the * flow. * * @return the flow position, or null if the flow is empty. */ public FlowPosition getFlowPosition() { FlowPosition flowPosition = getFirstStep().map(step -> step.getFlowPosition()).orElse(null); return flowPosition; } /** * Convenience method that returns the condition of the flow. * * <p> * Internally this calls the method of the same name of the first step in the * flow. * * @return the condition */ public Optional<Condition> getCondition() { Optional<Condition> condition = getFirstStep().flatMap(step -> step.getCondition()); return condition; } }
Layout Flow class
requirementsascodecore/src/main/java/org/requirementsascode/Flow.java
Layout Flow class
Java
apache-2.0
cfb64a7c9329075b8a89b7230bae8abf66b9a9de
0
apache/zeppelin,VipinRathor/zeppelin,fogbeam/zeppelin_mirror,jongyoul/incubator-zeppelin,apache/zeppelin,zjffdu/zeppelin,jongyoul/incubator-zeppelin,prabhjyotsingh/zeppelin,apache/zeppelin,joroKr21/incubator-zeppelin,joroKr21/incubator-zeppelin,prabhjyotsingh/incubator-zeppelin,jongyoul/zeppelin,joroKr21/incubator-zeppelin,prabhjyotsingh/zeppelin,VipinRathor/zeppelin,fogbeam/zeppelin_mirror,joroKr21/incubator-zeppelin,jongyoul/incubator-zeppelin,VipinRathor/zeppelin,joroKr21/incubator-zeppelin,VipinRathor/zeppelin,jongyoul/zeppelin,apache/zeppelin,fogbeam/zeppelin_mirror,apache/incubator-zeppelin,apache/incubator-zeppelin,prabhjyotsingh/incubator-zeppelin,hammertank/zeppelin,prabhjyotsingh/zeppelin,jongyoul/zeppelin,zjffdu/zeppelin,hammertank/zeppelin,VipinRathor/zeppelin,jongyoul/zeppelin,joroKr21/incubator-zeppelin,apache/incubator-zeppelin,zjffdu/zeppelin,joroKr21/incubator-zeppelin,prabhjyotsingh/zeppelin,jongyoul/zeppelin,hammertank/zeppelin,hammertank/zeppelin,jongyoul/zeppelin,prabhjyotsingh/zeppelin,zjffdu/zeppelin,prabhjyotsingh/incubator-zeppelin,apache/incubator-zeppelin,apache/zeppelin,VipinRathor/zeppelin,apache/incubator-zeppelin,zjffdu/zeppelin,fogbeam/zeppelin_mirror,prabhjyotsingh/zeppelin,joroKr21/incubator-zeppelin,jongyoul/incubator-zeppelin,prabhjyotsingh/incubator-zeppelin,fogbeam/zeppelin_mirror,jongyoul/zeppelin,apache/incubator-zeppelin,prabhjyotsingh/zeppelin,prabhjyotsingh/incubator-zeppelin,apache/zeppelin,jongyoul/incubator-zeppelin,VipinRathor/zeppelin,zjffdu/zeppelin,jongyoul/incubator-zeppelin,prabhjyotsingh/incubator-zeppelin,hammertank/zeppelin,zjffdu/zeppelin,fogbeam/zeppelin_mirror,hammertank/zeppelin,apache/zeppelin,hammertank/zeppelin,fogbeam/zeppelin_mirror
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.flink.sql; import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment; import org.apache.flink.table.api.TableEnvironment; import org.apache.flink.types.Row; import org.apache.flink.util.StringUtils; import org.apache.zeppelin.flink.FlinkShims; import org.apache.zeppelin.flink.JobManager; import org.apache.zeppelin.interpreter.InterpreterContext; import org.apache.zeppelin.tabledata.TableDataUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.List; public class UpdateStreamSqlJob extends AbstractStreamSqlJob { private static Logger LOGGER = LoggerFactory.getLogger(UpdateStreamSqlJob.class); private List<Row> materializedTable = new ArrayList<>(); private List<Row> lastSnapshot = new ArrayList<>(); public UpdateStreamSqlJob(StreamExecutionEnvironment senv, TableEnvironment stEnv, JobManager jobManager, InterpreterContext context, int defaultParallelism, FlinkShims flinkShims) { super(senv, stEnv, jobManager, context, defaultParallelism, flinkShims); } @Override protected String getType() { return "retract"; } protected void processInsert(Row row) { enableToRefresh = true; resultLock.notify(); LOGGER.debug("processInsert: " + row.toString()); materializedTable.add(row); } protected void processDelete(Row row) { enableToRefresh = false; LOGGER.debug("processDelete: " + row.toString()); for (int i = 0; i < materializedTable.size(); i++) { if (flinkShims.rowEquals(materializedTable.get(i), row)) { LOGGER.debug("real processDelete: " + row.toString()); materializedTable.remove(i); break; } } } @Override protected String buildResult() { StringBuilder builder = new StringBuilder(); builder.append("%table\n"); for (int i = 0; i < schema.getFieldCount(); ++i) { String field = schema.getFieldNames()[i]; builder.append(field); if (i != (schema.getFieldCount() - 1)) { builder.append("\t"); } } builder.append("\n"); // sort it by the first column materializedTable.sort((r1, r2) -> { String f1 = TableDataUtils.normalizeColumn(StringUtils.arrayAwareToString(r1.getField(0))); String f2 = TableDataUtils.normalizeColumn(StringUtils.arrayAwareToString(r2.getField(0))); return f1.compareTo(f2); }); for (Row row : materializedTable) { for (int i = 0; i < row.getArity(); ++i) { Object field = row.getField(i); builder.append(TableDataUtils.normalizeColumn(StringUtils.arrayAwareToString(field))); if (i != (row.getArity() - 1)) { builder.append("\t"); } } builder.append("\n"); } builder.append("\n%text\n"); return builder.toString(); } @Override protected void refresh(InterpreterContext context) { context.out().clear(false); try { String result = buildResult(); context.out.write(result); context.out.flush(); LOGGER.debug("Refresh with data: " + result); this.lastSnapshot.clear(); for (Row row : materializedTable) { this.lastSnapshot.add(row); } } catch (IOException e) { LOGGER.error("Fail to refresh data", e); } } }
flink/interpreter/src/main/java/org/apache/zeppelin/flink/sql/UpdateStreamSqlJob.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.flink.sql; import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment; import org.apache.flink.table.api.TableEnvironment; import org.apache.flink.types.Row; import org.apache.flink.util.StringUtils; import org.apache.zeppelin.flink.FlinkShims; import org.apache.zeppelin.flink.JobManager; import org.apache.zeppelin.interpreter.InterpreterContext; import org.apache.zeppelin.tabledata.TableDataUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.List; public class UpdateStreamSqlJob extends AbstractStreamSqlJob { private static Logger LOGGER = LoggerFactory.getLogger(UpdateStreamSqlJob.class); private List<Row> materializedTable = new ArrayList<>(); private List<Row> lastSnapshot = new ArrayList<>(); public UpdateStreamSqlJob(StreamExecutionEnvironment senv, TableEnvironment stEnv, JobManager jobManager, InterpreterContext context, int defaultParallelism, FlinkShims flinkShims) { super(senv, stEnv, jobManager, context, defaultParallelism, flinkShims); } @Override protected String getType() { return "retract"; } protected void processInsert(Row row) { enableToRefresh = true; resultLock.notify(); LOGGER.debug("processInsert: " + row.toString()); materializedTable.add(row); } protected void processDelete(Row row) { enableToRefresh = false; LOGGER.debug("processDelete: " + row.toString()); for (int i = 0; i < materializedTable.size(); i++) { if (materializedTable.get(i).equals(row)) { LOGGER.debug("real processDelete: " + row.toString()); materializedTable.remove(i); break; } } } @Override protected String buildResult() { StringBuilder builder = new StringBuilder(); builder.append("%table\n"); for (int i = 0; i < schema.getFieldCount(); ++i) { String field = schema.getFieldNames()[i]; builder.append(field); if (i != (schema.getFieldCount() - 1)) { builder.append("\t"); } } builder.append("\n"); // sort it by the first column materializedTable.sort((r1, r2) -> { String f1 = TableDataUtils.normalizeColumn(StringUtils.arrayAwareToString(r1.getField(0))); String f2 = TableDataUtils.normalizeColumn(StringUtils.arrayAwareToString(r2.getField(0))); return f1.compareTo(f2); }); for (Row row : materializedTable) { for (int i = 0; i < row.getArity(); ++i) { Object field = row.getField(i); builder.append(TableDataUtils.normalizeColumn(StringUtils.arrayAwareToString(field))); if (i != (row.getArity() - 1)) { builder.append("\t"); } } builder.append("\n"); } builder.append("\n%text\n"); return builder.toString(); } @Override protected void refresh(InterpreterContext context) { context.out().clear(false); try { String result = buildResult(); context.out.write(result); context.out.flush(); LOGGER.debug("Refresh with data: " + result); this.lastSnapshot.clear(); for (Row row : materializedTable) { this.lastSnapshot.add(row); } } catch (IOException e) { LOGGER.error("Fail to refresh data", e); } } }
[hotfix] use proper row equals for flink 1.11
flink/interpreter/src/main/java/org/apache/zeppelin/flink/sql/UpdateStreamSqlJob.java
[hotfix] use proper row equals for flink 1.11
Java
apache-2.0
faa6258597891f0e51900a021f8a07373d693eb6
0
vladmm/intellij-community,dslomov/intellij-community,allotria/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,jexp/idea2,clumsy/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,kdwink/intellij-community,consulo/consulo,blademainer/intellij-community,holmes/intellij-community,diorcety/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,signed/intellij-community,ryano144/intellij-community,da1z/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,signed/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,kool79/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,kool79/intellij-community,petteyg/intellij-community,diorcety/intellij-community,da1z/intellij-community,dslomov/intellij-community,wreckJ/intellij-community,supersven/intellij-community,ahb0327/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,supersven/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,TangHao1987/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,allotria/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,izonder/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,Distrotech/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,adedayo/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,ibinti/intellij-community,semonte/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,adedayo/intellij-community,ernestp/consulo,MichaelNedzelsky/intellij-community,izonder/intellij-community,xfournet/intellij-community,blademainer/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,orekyuu/intellij-community,supersven/intellij-community,jagguli/intellij-community,diorcety/intellij-community,fnouama/intellij-community,slisson/intellij-community,retomerz/intellij-community,allotria/intellij-community,kool79/intellij-community,vladmm/intellij-community,samthor/intellij-community,amith01994/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,asedunov/intellij-community,jagguli/intellij-community,caot/intellij-community,semonte/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,ernestp/consulo,vvv1559/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,fnouama/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,diorcety/intellij-community,FHannes/intellij-community,clumsy/intellij-community,slisson/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,adedayo/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,MichaelNedzelsky/intellij-community,youdonghai/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,blademainer/intellij-community,petteyg/intellij-community,lucafavatella/intellij-community,robovm/robovm-studio,kool79/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,samthor/intellij-community,izonder/intellij-community,ernestp/consulo,ibinti/intellij-community,vladmm/intellij-community,holmes/intellij-community,hurricup/intellij-community,vladmm/intellij-community,ernestp/consulo,pwoodworth/intellij-community,consulo/consulo,TangHao1987/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,MER-GROUP/intellij-community,petteyg/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,petteyg/intellij-community,supersven/intellij-community,samthor/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,allotria/intellij-community,supersven/intellij-community,slisson/intellij-community,joewalnes/idea-community,supersven/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,Distrotech/intellij-community,semonte/intellij-community,kdwink/intellij-community,retomerz/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,robovm/robovm-studio,clumsy/intellij-community,retomerz/intellij-community,amith01994/intellij-community,fitermay/intellij-community,supersven/intellij-community,ryano144/intellij-community,consulo/consulo,jexp/idea2,da1z/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,slisson/intellij-community,nicolargo/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,slisson/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,ryano144/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,kool79/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,holmes/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,clumsy/intellij-community,kdwink/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,michaelgallacher/intellij-community,robovm/robovm-studio,ahb0327/intellij-community,retomerz/intellij-community,consulo/consulo,idea4bsd/idea4bsd,FHannes/intellij-community,signed/intellij-community,ibinti/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,izonder/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,ftomassetti/intellij-community,pwoodworth/intellij-community,amith01994/intellij-community,jagguli/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,akosyakov/intellij-community,jexp/idea2,ol-loginov/intellij-community,fnouama/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,supersven/intellij-community,retomerz/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,signed/intellij-community,fnouama/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,signed/intellij-community,caot/intellij-community,fnouama/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,FHannes/intellij-community,apixandru/intellij-community,allotria/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,retomerz/intellij-community,akosyakov/intellij-community,samthor/intellij-community,ahb0327/intellij-community,jagguli/intellij-community,apixandru/intellij-community,fnouama/intellij-community,jexp/idea2,petteyg/intellij-community,dslomov/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,SerCeMan/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,caot/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,orekyuu/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,adedayo/intellij-community,kdwink/intellij-community,dslomov/intellij-community,xfournet/intellij-community,joewalnes/idea-community,jexp/idea2,consulo/consulo,akosyakov/intellij-community,vladmm/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,ahb0327/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,apixandru/intellij-community,diorcety/intellij-community,jexp/idea2,joewalnes/idea-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,diorcety/intellij-community,da1z/intellij-community,fitermay/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,hurricup/intellij-community,kdwink/intellij-community,joewalnes/idea-community,michaelgallacher/intellij-community,da1z/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,xfournet/intellij-community,vladmm/intellij-community,petteyg/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,izonder/intellij-community,nicolargo/intellij-community,holmes/intellij-community,fnouama/intellij-community,slisson/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,dslomov/intellij-community,clumsy/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,allotria/intellij-community,kdwink/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,izonder/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,da1z/intellij-community,petteyg/intellij-community,amith01994/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,jexp/idea2,da1z/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,slisson/intellij-community,ryano144/intellij-community,diorcety/intellij-community,vladmm/intellij-community,amith01994/intellij-community,fitermay/intellij-community,asedunov/intellij-community,da1z/intellij-community,da1z/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,akosyakov/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,allotria/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,mglukhikh/intellij-community,caot/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,orekyuu/intellij-community,ryano144/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,joewalnes/idea-community,xfournet/intellij-community,wreckJ/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,semonte/intellij-community,supersven/intellij-community,dslomov/intellij-community,samthor/intellij-community,youdonghai/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,samthor/intellij-community,clumsy/intellij-community,retomerz/intellij-community,blademainer/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,signed/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,gnuhub/intellij-community,izonder/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,adedayo/intellij-community,clumsy/intellij-community,kool79/intellij-community,holmes/intellij-community,samthor/intellij-community,hurricup/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,asedunov/intellij-community,kool79/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,vvv1559/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,FHannes/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,joewalnes/idea-community,pwoodworth/intellij-community,Lekanich/intellij-community,fnouama/intellij-community,apixandru/intellij-community,joewalnes/idea-community,orekyuu/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,vladmm/intellij-community,dslomov/intellij-community,caot/intellij-community,holmes/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,blademainer/intellij-community,robovm/robovm-studio,robovm/robovm-studio,petteyg/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,joewalnes/idea-community,da1z/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,fengbaicanhe/intellij-community,adedayo/intellij-community,semonte/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,FHannes/intellij-community,diorcety/intellij-community,joewalnes/idea-community,gnuhub/intellij-community,ernestp/consulo,ftomassetti/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,slisson/intellij-community,xfournet/intellij-community,allotria/intellij-community,slisson/intellij-community,caot/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,consulo/consulo,muntasirsyed/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,ernestp/consulo,FHannes/intellij-community,fitermay/intellij-community,ryano144/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,slisson/intellij-community,hurricup/intellij-community,allotria/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,slisson/intellij-community,signed/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,signed/intellij-community,SerCeMan/intellij-community,signed/intellij-community,FHannes/intellij-community,xfournet/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,holmes/intellij-community,caot/intellij-community,alphafoobar/intellij-community,signed/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,Lekanich/intellij-community,caot/intellij-community,da1z/intellij-community,holmes/intellij-community,samthor/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,adedayo/intellij-community,petteyg/intellij-community,semonte/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,FHannes/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,jexp/idea2,suncycheng/intellij-community,salguarnieri/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,adedayo/intellij-community,kool79/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,diorcety/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,ibinti/intellij-community
/* * Copyright (c) 2000-2006 JetBrains s.r.o. All Rights Reserved. */ package com.intellij.util.xml.ui.actions; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationBundle; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.ui.popup.ListPopup; import com.intellij.util.xml.*; import com.intellij.util.xml.reflect.DomCollectionChildDescription; import com.intellij.util.xml.ui.DomCollectionControl; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.KeyEvent; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.List; /** * User: Sergey.Vasiliev */ public abstract class AddDomElementAction extends AnAction { private final static ShortcutSet shortcutSet = new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_INSERT, 0)); public AddDomElementAction() { super(ApplicationBundle.message("action.add"), null, DomCollectionControl.ADD_ICON); } public void update(AnActionEvent e) { if (!isEnabled(e)) return; final AnAction[] actions = getChildren(e); for (final AnAction action : actions) { e.getPresentation().setEnabled(true); action.update(e); if (e.getPresentation().isEnabled()) { break; } } if (actions.length == 1) { e.getPresentation().setText(actions[0].getTemplatePresentation().getText()); } else { final String actionText = getActionText(e); if (!actionText.endsWith("...")) { e.getPresentation().setText(actionText + (actions.length > 1 ? "..." : "")); } } e.getPresentation().setIcon(DomCollectionControl.ADD_ICON); super.update(e); } public void actionPerformed(AnActionEvent e) { final AnAction[] actions = getChildren(e); if (actions.length > 1) { final DefaultActionGroup group = new DefaultActionGroup(); for (final AnAction action : actions) { group.add(action); } final DataContext dataContext = e.getDataContext(); final ListPopup groupPopup = JBPopupFactory.getInstance().createActionGroupPopup(null,//J2EEBundle.message("label.menu.title.add.activation.config.property"), group, dataContext, JBPopupFactory.ActionSelectionAid.NUMBERING, true); showPopup(groupPopup, e); } else if (actions.length == 1) { actions[0].actionPerformed(e); } } protected String getActionText(final AnActionEvent e) { return e.getPresentation().getText(); } protected boolean isEnabled(final AnActionEvent e) { return true; } protected void showPopup(final ListPopup groupPopup, final AnActionEvent e) { final Component component = e.getInputEvent().getComponent(); if (component instanceof JMenuItem) { groupPopup.showInBestPositionFor(e.getDataContext()); } else { groupPopup.showUnderneathOf(component); } } @NotNull public AnAction[] getChildren(final AnActionEvent e) { Project project = (Project)e.getDataContext().getData(DataConstants.PROJECT); if (project == null) return AnAction.EMPTY_ARRAY; DomCollectionChildDescription[] descriptions = getDomCollectionChildDescriptions(e); final List<AnAction> actions = new ArrayList<AnAction>(); for (DomCollectionChildDescription description : descriptions) { final TypeChooser chooser = DomManager.getDomManager(project).getTypeChooserManager().getTypeChooser(description.getType()); for (Type type : chooser.getChooserTypes()) { final Class<?> rawType = DomReflectionUtil.getRawType(type); String name = ElementPresentationManager.getTypeName(rawType); Icon icon = null; if (!showAsPopup() || descriptions.length == 1) { // if (descriptions.length > 1) { icon = ElementPresentationManager.getIconForClass(rawType); // } } actions.add(createAddingAction(e, ApplicationBundle.message("action.add") + " " + name, icon, type, description)); } } if (actions.size() > 1 && showAsPopup()) { ActionGroup group = new ActionGroup() { public AnAction[] getChildren(@Nullable AnActionEvent e) { return actions.toArray(AnAction.EMPTY_ARRAY); } }; return new AnAction[]{new ShowPopupAction(group)}; } else { if (actions.size() > 1) { actions.add(Separator.getInstance()); } else if (actions.size() == 1) { } } return actions.toArray(AnAction.EMPTY_ARRAY); } protected abstract AnAction createAddingAction(final AnActionEvent e, final String name, final Icon icon, final Type type, final DomCollectionChildDescription description); @NotNull protected abstract DomCollectionChildDescription[] getDomCollectionChildDescriptions(final AnActionEvent e); @Nullable protected abstract DomElement getParentDomElement(final AnActionEvent e); protected abstract JComponent getComponent(AnActionEvent e); protected boolean showAsPopup() { return true; } protected class ShowPopupAction extends AnAction { protected final ActionGroup myGroup; protected ShowPopupAction(ActionGroup group) { super(ApplicationBundle.message("action.add"), null, DomCollectionControl.ADD_ICON); myGroup = group; setShortcutSet(shortcutSet); } public void actionPerformed(AnActionEvent e) { final ListPopup groupPopup = JBPopupFactory.getInstance().createActionGroupPopup(null,//J2EEBundle.message("label.menu.title.add.activation.config.property"), myGroup, e.getDataContext(), JBPopupFactory.ActionSelectionAid.NUMBERING, true); showPopup(groupPopup, e); } } }
dom/openapi/src/com/intellij/util/xml/ui/actions/AddDomElementAction.java
/* * Copyright (c) 2000-2006 JetBrains s.r.o. All Rights Reserved. */ package com.intellij.util.xml.ui.actions; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationBundle; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.ui.popup.ListPopup; import com.intellij.util.xml.*; import com.intellij.util.xml.reflect.DomCollectionChildDescription; import com.intellij.util.xml.ui.DomCollectionControl; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.awt.*; import java.awt.event.KeyEvent; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.List; import javax.swing.*; /** * User: Sergey.Vasiliev */ public abstract class AddDomElementAction extends AnAction { private final static ShortcutSet shortcutSet = new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_INSERT, 0)); public AddDomElementAction() { super(ApplicationBundle.message("action.add"), null, DomCollectionControl.ADD_ICON); } public void update(AnActionEvent e) { if (!isEnabled(e)) return; final AnAction[] actions = getChildren(e); for (final AnAction action : actions) { e.getPresentation().setEnabled(true); action.update(e); if (e.getPresentation().isEnabled()) { break; } } if (actions.length == 1) { e.getPresentation().setText(actions[0].getTemplatePresentation().getText()); } else { final String actionText = getActionText(e); if (!actionText.endsWith("...")) { e.getPresentation().setText(actionText + (actions.length > 1 ? "..." : "")); } } e.getPresentation().setIcon(DomCollectionControl.ADD_ICON); super.update(e); } public void actionPerformed(AnActionEvent e) { final AnAction[] actions = getChildren(e); if (actions.length > 1) { final DefaultActionGroup group = new DefaultActionGroup(); for (final AnAction action : actions) { group.add(action); } final DataContext dataContext = e.getDataContext(); final ListPopup groupPopup = JBPopupFactory.getInstance().createActionGroupPopup(null,//J2EEBundle.message("label.menu.title.add.activation.config.property"), group, dataContext, JBPopupFactory.ActionSelectionAid.NUMBERING, true); showPopup(groupPopup, e); } else { actions[0].actionPerformed(e); } } protected String getActionText(final AnActionEvent e) { return e.getPresentation().getText(); } protected boolean isEnabled(final AnActionEvent e) { return true; } protected void showPopup(final ListPopup groupPopup, final AnActionEvent e) { final Component component = e.getInputEvent().getComponent(); if (component instanceof JMenuItem) { groupPopup.showInBestPositionFor(e.getDataContext()); } else { groupPopup.showUnderneathOf(component); } } @NotNull public AnAction[] getChildren(final AnActionEvent e) { Project project = (Project)e.getDataContext().getData(DataConstants.PROJECT); if (project == null) return AnAction.EMPTY_ARRAY; DomCollectionChildDescription[] descriptions = getDomCollectionChildDescriptions(e); final List<AnAction> actions = new ArrayList<AnAction>(); for (DomCollectionChildDescription description : descriptions) { final TypeChooser chooser = DomManager.getDomManager(project).getTypeChooserManager().getTypeChooser(description.getType()); for (Type type : chooser.getChooserTypes()) { final Class<?> rawType = DomReflectionUtil.getRawType(type); String name = ElementPresentationManager.getTypeName(rawType); Icon icon = null; if (!showAsPopup() || descriptions.length == 1) { // if (descriptions.length > 1) { icon = ElementPresentationManager.getIconForClass(rawType); // } } actions.add(createAddingAction(e, ApplicationBundle.message("action.add") + " " + name, icon, type, description)); } } if (actions.size() > 1 && showAsPopup()) { ActionGroup group = new ActionGroup() { public AnAction[] getChildren(@Nullable AnActionEvent e) { return actions.toArray(AnAction.EMPTY_ARRAY); } }; return new AnAction[]{new ShowPopupAction(group)}; } else { if (actions.size() > 1) { actions.add(Separator.getInstance()); } else if (actions.size() == 1) { } } return actions.toArray(AnAction.EMPTY_ARRAY); } protected abstract AnAction createAddingAction(final AnActionEvent e, final String name, final Icon icon, final Type type, final DomCollectionChildDescription description); @NotNull protected abstract DomCollectionChildDescription[] getDomCollectionChildDescriptions(final AnActionEvent e); @Nullable protected abstract DomElement getParentDomElement(final AnActionEvent e); protected abstract JComponent getComponent(AnActionEvent e); protected boolean showAsPopup() { return true; } protected class ShowPopupAction extends AnAction { protected final ActionGroup myGroup; protected ShowPopupAction(ActionGroup group) { super(ApplicationBundle.message("action.add"), null, DomCollectionControl.ADD_ICON); myGroup = group; setShortcutSet(shortcutSet); } public void actionPerformed(AnActionEvent e) { final ListPopup groupPopup = JBPopupFactory.getInstance().createActionGroupPopup(null,//J2EEBundle.message("label.menu.title.add.activation.config.property"), myGroup, e.getDataContext(), JBPopupFactory.ActionSelectionAid.NUMBERING, true); showPopup(groupPopup, e); } } }
AIOOBE
dom/openapi/src/com/intellij/util/xml/ui/actions/AddDomElementAction.java
AIOOBE
Java
apache-2.0
1fe0c31304683e4efd50d9efbd01d190bc9f15f8
0
jrimum/texgit,braully/bopepo,braully/bopepo,braully/bopepo
package org.jrimum.texgit; import static org.apache.commons.lang.StringUtils.isNotBlank; import static org.jrimum.utilix.Objects.isNotNull; import java.io.File; import org.jrimum.texgit.engine.TexgitManager; public final class Texgit { public static final FlatFile<Record> createFlatFile(File xmlDef) throws TexgitException { if (isNotNull(xmlDef)){ return TexgitManager.buildFlatFile(xmlDef); } return null; } public static final FlatFile<Record> createFlatFile(String xmlDef) throws TexgitException { try { if (isNotBlank(xmlDef)) return createFlatFile(new File(xmlDef)); } catch (Exception e) { throw new TexgitException(e); } return null; } }
src/main/java/org/jrimum/texgit/Texgit.java
package org.jrimum.texgit; import static org.apache.commons.lang.StringUtils.isNotBlank; import static org.jrimum.utilix.Objects.isNotNull; import java.io.File; import org.jrimum.texgit.engine.TexgitManager; public final class Texgit { public static final FlatFile<Record> createFlatFile(File xmlDef)throws TexgitException{ FlatFile<Record> iFlatFile = null; if (isNotNull(xmlDef)) iFlatFile = TexgitManager.buildFlatFile(xmlDef); return iFlatFile; } public static final FlatFile<Record> createFlatFile(String xmlDef)throws TexgitException{ FlatFile<Record> iFlatFile = null; try{ if(isNotBlank(xmlDef)) iFlatFile = createFlatFile(new File(xmlDef)); }catch (Exception e) { throw new TexgitException(e); } return iFlatFile; } }
Revisão do código
src/main/java/org/jrimum/texgit/Texgit.java
Revisão do código
Java
apache-2.0
80e93c700fed3f247a883454a5ac7fe4b1115689
0
projectbuendia/client,projectbuendia/client,luisfdeandrade/client,christianrafael/client,G1DR4/client,llvasconcellos/client,llvasconcellos/client,pedromarins/client,viniciusboson/client,luisfdeandrade/client,projectbuendia/client,christianrafael/client,projectbuendia/client,viniciusboson/client,llvasconcellos/client,projectbuendia/client,luisfdeandrade/client,christianrafael/client,fcruz/client,viniciusboson/client,jvanz/client
package org.msf.records.net; import android.content.Context; import android.support.annotation.Nullable; import android.util.Log; import com.android.volley.Response; import com.android.volley.VolleyError; import com.google.gson.Gson; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.msf.records.model.Patient; import org.msf.records.model.PatientAge; import org.msf.records.model.PatientLocation; import org.msf.records.utils.Utils; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * Implementation of Server RPCs that will talk * Created by nfortescue on 11/3/14. */ public class OpenMrsServer implements Server { // private static final String USERNAME = "buendiatest1"; // private static final String PASSWORD = "Buendia123"; private final Gson gson = new Gson(); private final VolleySingleton mVolley; private final String mRootUrl; private final String mUserName; private final String mPassword; public OpenMrsServer(Context context, @Nullable String rootUrl, @Nullable String userName, @Nullable String password) { mRootUrl = (rootUrl == null) ? Constants.API_URL : rootUrl; mUserName = (userName == null) ? Constants.LOCAL_ADMIN_USERNAME : userName; mPassword = (password == null) ? Constants.LOCAL_ADMIN_PASSWORD : password; this.mVolley = VolleySingleton.getInstance(context.getApplicationContext()); } @Override public void addPatient(final Map<String, String> patientArguments, final Response.Listener<Patient> patientListener, final Response.ErrorListener errorListener, final String logTag) { JSONObject requestBody = new JSONObject(); try { putIfSet(patientArguments, Server.PATIENT_ID_KEY, requestBody, Server.PATIENT_ID_KEY); putIfSet(patientArguments, Server.PATIENT_GIVEN_NAME_KEY, requestBody, Server.PATIENT_GIVEN_NAME_KEY); putIfSet(patientArguments, Server.PATIENT_FAMILY_NAME_KEY, requestBody, Server.PATIENT_FAMILY_NAME_KEY); putIfSet(patientArguments, Server.PATIENT_GENDER_KEY, requestBody, Server.PATIENT_GENDER_KEY); } catch (JSONException e) { // This is almost never recoverable, and should not happen in correctly functioning code // So treat like NPE and rethrow. throw new RuntimeException(e); } OpenMrsJsonRequest request = new OpenMrsJsonRequest( mUserName, mPassword, mRootUrl + "/patient", requestBody, new Response.Listener<JSONObject>() { @Override public void onResponse(JSONObject response) { try { patientListener.onResponse(parsePatientJson(response)); } catch (JSONException e) { Log.e(logTag, "Failed to parse response", e); errorListener.onErrorResponse( new VolleyError("Failed to parse response", e)); } } }, errorListener); mVolley.addToRequestQueue(request, logTag); } private void putIfSet(Map<String, String> patientArguments, String key, JSONObject name, String param) throws JSONException { String value = patientArguments.get(key); if (value != null) { name.put(param, value); } } @Override public void getPatient(String patientId, final Response.Listener<Patient> patientListener, final Response.ErrorListener errorListener, final String logTag) { OpenMrsJsonRequest request = new OpenMrsJsonRequest( mUserName, mPassword, mRootUrl + "/patient/" + patientId, null, new Response.Listener<JSONObject>() { @Override public void onResponse(JSONObject response) { try { patientListener.onResponse(parsePatientJson(response)); } catch (JSONException e) { Log.e(logTag, "Failed to parse response", e); errorListener.onErrorResponse( new VolleyError("Failed to parse response", e)); } } }, errorListener); mVolley.addToRequestQueue(request, logTag); } @Override public void updatePatient(String patientId, Map<String, String> patientArguments, Response.Listener<Patient> patientListener, Response.ErrorListener errorListener, String logTag) { errorListener.onErrorResponse(new VolleyError("Not yet implemented")); } @Override public void listPatients(@Nullable String filterState, @Nullable String filterLocation, @Nullable String filterQueryTerm, final Response.Listener<List<Patient>> patientListener, Response.ErrorListener errorListener, final String logTag) { String query = filterQueryTerm != null ? filterQueryTerm : ""; OpenMrsJsonRequest request = new OpenMrsJsonRequest( mUserName, mPassword, mRootUrl + "/patient?q=" + Utils.urlEncode(query), null, new Response.Listener<JSONObject>() { @Override public void onResponse(JSONObject response) { ArrayList<Patient> result = new ArrayList<>(); try { JSONArray results = response.getJSONArray("results"); for (int i=0; i<results.length(); i++) { Patient patient = parsePatientJson(results.getJSONObject(i)); result.add(patient); } } catch (JSONException e) { Log.e(logTag, "Failed to parse response", e); } patientListener.onResponse(result); } }, errorListener); mVolley.addToRequestQueue(request, logTag); } private Patient parsePatientJson(JSONObject object) throws JSONException { Patient patient = gson.fromJson(object.toString(), Patient.class); // TODO(nfortescue): fill these in properly patient.assigned_location = new PatientLocation(); patient.assigned_location.zone = 1; patient.assigned_location.bed = 2; patient.assigned_location.tent = 3; if (patient.age == null) { patient.age = new PatientAge(); patient.age.type = "years"; patient.age.years = 24; } patient.first_showed_symptoms_timestamp_utc = 0L; if (patient.created_timestamp_utc != null) { patient.created_timestamp_utc /= 1000; // UI wants it in seconds, not millis } return patient; } @Override public void cancelPendingRequests(String logTag) { mVolley.cancelPendingRequests(logTag); } }
app/src/main/java/org/msf/records/net/OpenMrsServer.java
package org.msf.records.net; import android.content.Context; import android.support.annotation.Nullable; import android.util.Log; import com.android.volley.Response; import com.android.volley.VolleyError; import com.google.gson.Gson; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.msf.records.model.Patient; import org.msf.records.model.PatientAge; import org.msf.records.model.PatientLocation; import org.msf.records.utils.Utils; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * Implementation of Server RPCs that will talk * Created by nfortescue on 11/3/14. */ public class OpenMrsServer implements Server { // private static final String USERNAME = "buendiatest1"; // private static final String PASSWORD = "Buendia123"; private final Gson gson = new Gson(); private final VolleySingleton mVolley; private final String mRootUrl; private final String mUserName; private final String mPassword; public OpenMrsServer(Context context, @Nullable String rootUrl, @Nullable String userName, @Nullable String password) { mRootUrl = (rootUrl == null) ? Constants.API_URL : rootUrl; mUserName = (userName == null) ? Constants.LOCAL_ADMIN_USERNAME : userName; mPassword = (password == null) ? Constants.LOCAL_ADMIN_PASSWORD : password; this.mVolley = VolleySingleton.getInstance(context.getApplicationContext()); } @Override public void addPatient(final Map<String, String> patientArguments, final Response.Listener<Patient> patientListener, final Response.ErrorListener errorListener, final String logTag) { JSONObject requestBody = new JSONObject(); try { putIfSet(patientArguments, Server.PATIENT_ID_KEY, requestBody, Server.PATIENT_ID_KEY); putIfSet(patientArguments, Server.PATIENT_GIVEN_NAME_KEY, requestBody, Server.PATIENT_GIVEN_NAME_KEY); putIfSet(patientArguments, Server.PATIENT_FAMILY_NAME_KEY, requestBody, Server.PATIENT_FAMILY_NAME_KEY); putIfSet(patientArguments, Server.PATIENT_GENDER_KEY, requestBody, Server.PATIENT_GENDER_KEY); } catch (JSONException e) { // This is almost never recoverable, and should not happen in correctly functioning code // So treat like NPE and rethrow. throw new RuntimeException(e); } OpenMrsJsonRequest request = new OpenMrsJsonRequest( mUserName, mPassword, mRootUrl + "/patient", requestBody, new Response.Listener<JSONObject>() { @Override public void onResponse(JSONObject response) { try { patientListener.onResponse(parsePatientJson(response)); } catch (JSONException e) { Log.e(logTag, "Failed to parse response", e); errorListener.onErrorResponse( new VolleyError("Failed to parse response", e)); } } }, errorListener); mVolley.addToRequestQueue(request, logTag); } private void putIfSet(Map<String, String> patientArguments, String key, JSONObject name, String param) throws JSONException { String value = patientArguments.get(key); if (value != null) { name.put(param, value); } } @Override public void getPatient(String patientId, final Response.Listener<Patient> patientListener, final Response.ErrorListener errorListener, final String logTag) { OpenMrsJsonRequest request = new OpenMrsJsonRequest( mUserName, mPassword, mRootUrl + "/patient/" + patientId, null, new Response.Listener<JSONObject>() { @Override public void onResponse(JSONObject response) { try { patientListener.onResponse(parsePatientJson(response)); } catch (JSONException e) { Log.e(logTag, "Failed to parse response", e); errorListener.onErrorResponse( new VolleyError("Failed to parse response", e)); } } }, errorListener); mVolley.addToRequestQueue(request, logTag); } @Override public void updatePatient(String patientId, Map<String, String> patientArguments, Response.Listener<Patient> patientListener, Response.ErrorListener errorListener, String logTag) { errorListener.onErrorResponse(new VolleyError("Not yet implemented")); } @Override public void listPatients(@Nullable String filterState, @Nullable String filterLocation, @Nullable String filterQueryTerm, final Response.Listener<List<Patient>> patientListener, Response.ErrorListener errorListener, final String logTag) { String query = filterQueryTerm != null ? filterQueryTerm : ""; OpenMrsJsonRequest request = new OpenMrsJsonRequest( mUserName, mPassword, mRootUrl + "/patient?q=" + Utils.urlEncode(query), null, new Response.Listener<JSONObject>() { @Override public void onResponse(JSONObject response) { ArrayList<Patient> result = new ArrayList<>(); try { JSONArray results = response.getJSONArray("results"); for (int i=0; i<results.length(); i++) { Patient patient = parsePatientJson(results.getJSONObject(i)); result.add(patient); } } catch (JSONException e) { Log.e(logTag, "Failed to parse response", e); } patientListener.onResponse(result); } }, errorListener); mVolley.addToRequestQueue(request, logTag); } private Patient parsePatientJson(JSONObject object) throws JSONException { Patient patient = gson.fromJson(object.toString(), Patient.class); // TODO(nfortescue): fill these in properly patient.assigned_location = new PatientLocation(); patient.assigned_location.zone = 1; patient.assigned_location.bed = 2; patient.assigned_location.tent = 3; patient.age = new PatientAge(); patient.age.type = "years"; patient.age.years = 24; patient.first_showed_symptoms_timestamp_utc = 0L; if (patient.created_timestamp_utc != null) { patient.created_timestamp_utc /= 1000; // UI wants it in seconds, not millis } return patient; } @Override public void cancelPendingRequests(String logTag) { mVolley.cancelPendingRequests(logTag); } }
Now only write 24 for the age if the server hasn't sent it.
app/src/main/java/org/msf/records/net/OpenMrsServer.java
Now only write 24 for the age if the server hasn't sent it.
Java
bsd-3-clause
303ceb060da7ddb0f5cee472a82b1093ceded1b2
0
yegor256/rexsl,krzyk/rexsl,krzyk/rexsl,yegor256/rexsl
/** * Copyright (c) 2011-2012, ReXSL.com * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: 1) Redistributions of source code must retain the above * copyright notice, this list of conditions and the following * disclaimer. 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. 3) Neither the name of the ReXSL.com nor * the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT * NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.rexsl.trap; import com.ymock.util.Logger; import java.io.IOException; import java.util.Date; import java.util.Iterator; import java.util.Properties; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import javax.mail.BodyPart; import javax.mail.Message; import javax.mail.Multipart; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMultipart; /** * Notifier by SMTP, with pre-packaging into bulks. * * <p>Configuration of this notifier is similar to {@link SmtpNotifier}, but * requires one more parameter: {@code interval}. This parameter should contain * an integer number of minutes we should wait before actual delivery of * emails. This option may be critically important if you expect high volume * of exceptions and don't want to receive many individual emails. * * @author Yegor Bugayenko (yegor@rexsl.com) * @version $Id$ * @since 0.3.6 */ @SuppressWarnings("PMD.DoNotUseThreads") public final class SmtpBulkNotifier extends AbstractSmtpNotifier { /** * Maximum allowed interval in minutes. */ private static final long MAX_INTERVAL = 180L; /** * Minimum allowed interval in minutes. */ private static final long MIN_INTERVAL = 5L; /** * Running thread. */ private final transient ScheduledFuture future; /** * List of reported defect. */ private final transient Queue<Defect> defects = new ConcurrentLinkedQueue<Defect>(); /** * Public ctor. * @param props The properties */ public SmtpBulkNotifier(final Properties props) { super(props); final long interval = this.interval(); this.future = Executors .newSingleThreadScheduledExecutor() .scheduleAtFixedRate( new Runnable() { @Override public void run() { SmtpBulkNotifier.this.background(); } }, 0L, interval, TimeUnit.MINUTES ); Logger.info( this, "#SmtpBulkNotifier(): started with %dmin interval", interval ); } /** * {@inheritDoc} */ @Override public void notify(final String defect) throws IOException { this.defects.add(new Defect(defect)); } /** * {@inheritDoc} */ @Override public void close() throws IOException { this.background(); if (!this.future.cancel(true)) { throw new IOException("Failed to close scheduled future"); } } /** * Run this on background and send emails. */ private void background() { synchronized (this.defects) { if (!this.defects.isEmpty()) { try { this.send(this.compress()); } catch (IOException ex) { Logger.error(this, "#run(): %[exception]s", ex); } } } } /** * Compress all defects into one message. * @return The message * @throws IOException If some problem inside */ private Message compress() throws IOException { final StringBuilder text = new StringBuilder(); text.append( String.format( "During the last few mins there were %d exception(s):%s%2$s", this.defects.size(), AbstractSmtpNotifier.CRLF ) ); final StringBuilder attachment = new StringBuilder(); final Iterator<Defect> iterator = this.defects.iterator(); while (iterator.hasNext()) { final Defect defect = iterator.next(); text.append(defect.date()).append(AbstractSmtpNotifier.CRLF); attachment.append(defect.text()).append("\n\n"); iterator.remove(); } text.append(AbstractSmtpNotifier.CRLF) .append("Detailed information is attached in text file."); return this.mime(text.toString(), attachment.toString()); } /** * Create MIME message with body and attachment. * @param text The body * @param attachment The attachment * @return The message * @throws IOException If some problem inside */ private Message mime(final String text, final String attachment) throws IOException { final Message message = this.message(); try { final Multipart multipart = new MimeMultipart(); final BodyPart body = new MimeBodyPart(); body.setText(text); multipart.addBodyPart(body); final BodyPart file = new MimeBodyPart(); file.setText(attachment); file.setFileName("exceptions.txt"); multipart.addBodyPart(file); message.setContent(multipart); } catch (javax.mail.MessagingException ex) { throw new IOException(ex); } return message; } /** * Calculate interval in minutes. * @return The interval */ private long interval() { long interval = Long.parseLong(this.prop("interval")); if (interval < this.MIN_INTERVAL) { Logger.warn( this, "#interval(): set to %d, while minimum allowed is %d", interval, this.MIN_INTERVAL ); interval = this.MIN_INTERVAL; } if (interval > this.MAX_INTERVAL) { Logger.warn( this, "#interval(): set to %d, while maximum allowed is %d", interval, this.MAX_INTERVAL ); interval = this.MAX_INTERVAL; } return interval; } /** * Single defect reported. */ private static final class Defect { /** * The date. */ private final transient Date when = new Date(); /** * The text. */ private final transient String what; /** * Public ctor. * @param txt The text */ public Defect(final String txt) { this.what = txt; } /** * Get date. * @return The date */ public Date date() { return this.when; } /** * Get text. * @return The text */ public String text() { return this.what; } } }
rexsl/rexsl-core/src/main/java/com/rexsl/trap/SmtpBulkNotifier.java
/** * Copyright (c) 2011-2012, ReXSL.com * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: 1) Redistributions of source code must retain the above * copyright notice, this list of conditions and the following * disclaimer. 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. 3) Neither the name of the ReXSL.com nor * the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT * NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.rexsl.trap; import com.ymock.util.Logger; import java.io.IOException; import java.util.Date; import java.util.Iterator; import java.util.Properties; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import javax.mail.BodyPart; import javax.mail.Message; import javax.mail.Multipart; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMultipart; /** * Notifier by SMTP, with pre-packaging into bulks. * * <p>Configuration of this notifier is similar to {@link SmtpNotifier}, but * requires one more parameter: {@code interval}. This parameter should contain * an integer number of minutes we should wait before actual delivery of * emails. This option may be critically important if you expect high volume * of exceptions and don't want to receive many individual emails. * * @author Yegor Bugayenko (yegor@rexsl.com) * @version $Id$ * @since 0.3.6 */ @SuppressWarnings("PMD.DoNotUseThreads") public final class SmtpBulkNotifier extends AbstractSmtpNotifier { /** * Maximum allowed interval in minutes. */ private static final long MAX_INTERVAL = 180L; /** * Minimum allowed interval in minutes. */ private static final long MIN_INTERVAL = 5L; /** * Running thread. */ private final transient ScheduledFuture future; /** * List of reported defect. */ private final transient Queue<Defect> defects = new ConcurrentLinkedQueue<Defect>(); /** * Public ctor. * @param props The properties */ public SmtpBulkNotifier(final Properties props) { super(props); final long interval = this.interval(); this.future = Executors .newSingleThreadScheduledExecutor() .scheduleAtFixedRate( new Runnable() { @Override public void run() { SmtpBulkNotifier.this.background(); } }, 0L, interval, TimeUnit.MINUTES ); Logger.info( this, "#SmtpBulkNotifier(): started with %dmin interval", interval ); } /** * {@inheritDoc} */ @Override public void notify(final String defect) throws IOException { synchronized (this.defects) { this.defects.add(new Defect(defect)); } } /** * {@inheritDoc} */ @Override public void close() throws IOException { this.background(); if (!this.future.cancel(true)) { throw new IOException("Failed to close scheduled future"); } } /** * Run this on background and send emails. */ private void background() { synchronized (this.defects) { if (!this.defects.isEmpty()) { try { this.send(this.compress()); } catch (IOException ex) { Logger.error(this, "#run(): %[exception]s", ex); } } } } /** * Compress all defects into one message. * @return The message * @throws IOException If some problem inside */ private Message compress() throws IOException { final StringBuilder text = new StringBuilder(); text.append( String.format( "During the last few mins there were %d exception(s):%s%2$s", this.defects.size(), AbstractSmtpNotifier.CRLF ) ); final StringBuilder attachment = new StringBuilder(); final Iterator<Defect> iterator = this.defects.iterator(); while (iterator.hasNext()) { final Defect defect = iterator.next(); text.append(defect.date()).append(AbstractSmtpNotifier.CRLF); attachment.append(defect.text()).append("\n\n"); iterator.remove(); } text.append(AbstractSmtpNotifier.CRLF) .append("Detailed information is attached in text file."); return this.mime(text.toString(), attachment.toString()); } /** * Create MIME message with body and attachment. * @param text The body * @param attachment The attachment * @return The message * @throws IOException If some problem inside */ private Message mime(final String text, final String attachment) throws IOException { final Message message = this.message(); try { final Multipart multipart = new MimeMultipart(); final BodyPart body = new MimeBodyPart(); body.setText(text); multipart.addBodyPart(body); final BodyPart file = new MimeBodyPart(); file.setText(attachment); file.setFileName("exceptions.txt"); multipart.addBodyPart(file); message.setContent(multipart); } catch (javax.mail.MessagingException ex) { throw new IOException(ex); } return message; } /** * Calculate interval in minutes. * @return The interval */ private long interval() { long interval = Long.parseLong(this.prop("interval")); if (interval < this.MIN_INTERVAL) { Logger.warn( this, "#interval(): set to %d, while minimum allowed is %d", interval, this.MIN_INTERVAL ); interval = this.MIN_INTERVAL; } if (interval > this.MAX_INTERVAL) { Logger.warn( this, "#interval(): set to %d, while maximum allowed is %d", interval, this.MAX_INTERVAL ); interval = this.MAX_INTERVAL; } return interval; } /** * Single defect reported. */ private static final class Defect { /** * The date. */ private final transient Date when = new Date(); /** * The text. */ private final transient String what; /** * Public ctor. * @param txt The text */ public Defect(final String txt) { this.what = txt; } /** * Get date. * @return The date */ public Date date() { return this.when; } /** * Get text. * @return The text */ public String text() { return this.what; } } }
refs #376 - fixed
rexsl/rexsl-core/src/main/java/com/rexsl/trap/SmtpBulkNotifier.java
refs #376 - fixed
Java
bsd-3-clause
99e42fea89beac4bc14d9d38c2c21a4437974dfe
0
NCIP/cananolab,NCIP/cananolab,NCIP/cananolab
package gov.nih.nci.cananolab.service.publication; import gov.nih.nci.cananolab.domain.particle.Sample; import gov.nih.nci.cananolab.dto.common.PublicationBean; import gov.nih.nci.cananolab.dto.common.UserBean; import gov.nih.nci.cananolab.exception.DuplicateEntriesException; import gov.nih.nci.cananolab.exception.NoAccessException; import gov.nih.nci.cananolab.exception.PublicationException; import java.util.List; /** * Interface defining methods invovled in submiting and searching publications. * * @author tanq * */ public interface PublicationService { /** * Persist a new publication or update an existing publication * * @param publication * @param sampleNames * @param fileData * @param authors * * @throws Exception */ public void savePublication(PublicationBean publicationBean, UserBean user) throws PublicationException, NoAccessException, DuplicateEntriesException; public List<PublicationBean> findPublicationsBy(String publicationTitle, String publicationCategory, String sampleName, String[] researchAreas, String[] keywords, String pubMedId, String digitalObjectId, String[] authors, String[] nanomaterialEntityClassNames, String[] otherNanomaterialEntityTypes, String[] functionalizingEntityClassNames, String[] otherFunctionalizingEntityTypes, String[] functionClassNames, String[] otherFunctionTypes, UserBean user) throws PublicationException; public PublicationBean findPublicationById(String publicationId, UserBean user) throws PublicationException, NoAccessException; public List<PublicationBean> findPublicationsBySampleId(String sampleId, UserBean user) throws PublicationException; public int getNumberOfPublicPublications() throws PublicationException; public void removePublicationFromSample(Sample particle, Long dataId) throws PublicationException, NoAccessException; }
src/gov/nih/nci/cananolab/service/publication/PublicationService.java
package gov.nih.nci.cananolab.service.publication; import gov.nih.nci.cananolab.domain.particle.Sample; import gov.nih.nci.cananolab.dto.common.PublicationBean; import gov.nih.nci.cananolab.dto.common.UserBean; import gov.nih.nci.cananolab.exception.DuplicateEntriesException; import gov.nih.nci.cananolab.exception.NoAccessException; import gov.nih.nci.cananolab.exception.PublicationException; import java.util.List; /** * Interface defining methods invovled in submiting and searching publications. * * @author tanq * */ public interface PublicationService { /** * Persist a new publication or update an existing publication * * @param publication * @param sampleNames * @param fileData * @param authors * * @throws Exception */ public void savePublication(PublicationBean publicationBean, UserBean user) throws PublicationException, NoAccessException, DuplicateEntriesException; public List<PublicationBean> findPublicationsBy(String publicationTitle, String publicationCategory, String sampleName, String[] researchAreas, String[] keywords, String pubMedId, String digitalObjectId, String[] authors, String[] nanomaterialEntityClassNames, String[] otherNanoparticleTypes, String[] functionalizingEntityClassNames, String[] otherFunctionalizingEntityTypes, String[] functionClassNames, String[] otherFunctionTypes, UserBean user) throws PublicationException; public PublicationBean findPublicationById(String publicationId, UserBean user) throws PublicationException, NoAccessException; public List<PublicationBean> findPublicationsBySampleId(String sampleId, UserBean user) throws PublicationException; public int getNumberOfPublicPublications() throws PublicationException; public void removePublicationFromSample(Sample particle, Long dataId) throws PublicationException, NoAccessException; }
updated word nanoparticle to sample and nanoparticle entity to namaterial entity SVN-Revision: 16053
src/gov/nih/nci/cananolab/service/publication/PublicationService.java
updated word nanoparticle to sample and nanoparticle entity to namaterial entity
Java
mit
8291216ce028a0419e4195e57ed66f8029f28c1d
0
Geforce132/SecurityCraft
package net.geforcemods.securitycraft.screen; import java.util.List; import com.mojang.blaze3d.systems.RenderSystem; import com.mojang.blaze3d.vertex.PoseStack; import net.geforcemods.securitycraft.SCContent; import net.geforcemods.securitycraft.SecurityCraft; import net.geforcemods.securitycraft.inventory.DisguiseModuleMenu; import net.geforcemods.securitycraft.network.server.UpdateNBTTagOnServer; import net.geforcemods.securitycraft.screen.components.StateSelector; import net.geforcemods.securitycraft.util.IHasExtraAreas; import net.geforcemods.securitycraft.util.Utils; import net.minecraft.client.gui.screens.inventory.AbstractContainerScreen; import net.minecraft.client.renderer.Rect2i; import net.minecraft.nbt.CompoundTag; import net.minecraft.nbt.NbtUtils; import net.minecraft.network.chat.Component; import net.minecraft.network.chat.TranslatableComponent; import net.minecraft.resources.ResourceLocation; import net.minecraft.world.entity.player.Inventory; import net.minecraft.world.item.ItemStack; public class DisguiseModuleScreen extends AbstractContainerScreen<DisguiseModuleMenu> implements IHasExtraAreas { private static final ResourceLocation TEXTURE = new ResourceLocation("securitycraft:textures/gui/container/customize1.png"); private final TranslatableComponent disguiseModuleName = Utils.localize(SCContent.DISGUISE_MODULE.get().getDescriptionId()); private StateSelector stateSelector; public DisguiseModuleScreen(DisguiseModuleMenu menu, Inventory inv, Component title) { super(menu, inv, title); } @Override protected void init() { super.init(); leftPos += 90; stateSelector = addRenderableWidget(new StateSelector(menu, title, leftPos - 190, topPos + 7, 0, 200, 15, -2.725F, -1.2F)); stateSelector.init(minecraft, width, height); } @Override public void render(PoseStack pose, int mouseX, int mouseY, float partialTicks) { super.render(pose, mouseX, mouseY, partialTicks); if (getSlotUnderMouse() != null && !getSlotUnderMouse().getItem().isEmpty()) renderTooltip(pose, getSlotUnderMouse().getItem(), mouseX, mouseY); } @Override protected void renderLabels(PoseStack pose, int mouseX, int mouseY) { font.draw(pose, disguiseModuleName, imageWidth / 2 - font.width(disguiseModuleName) / 2, 6, 0x404040); } @Override protected void renderBg(PoseStack pose, float partialTicks, int mouseX, int mouseY) { renderBackground(pose); RenderSystem.setShaderColor(1.0F, 1.0F, 1.0F, 1.0F); RenderSystem._setShaderTexture(0, TEXTURE); blit(pose, leftPos, topPos, 0, 0, imageWidth, imageHeight); } @Override public boolean mouseDragged(double mouseX, double mouseY, int button, double dragX, double dragY) { if (stateSelector != null && stateSelector.mouseDragged(mouseX, mouseY, button, dragX, dragY)) return true; return super.mouseDragged(mouseX, mouseY, button, dragX, dragY); } @Override public void onClose() { super.onClose(); if (!menu.getSlot(0).getItem().isEmpty() && stateSelector.getState() != null) { ItemStack module = menu.getInventory().getModule(); CompoundTag moduleTag = module.getOrCreateTag(); moduleTag.put("SavedState", NbtUtils.writeBlockState(stateSelector.getState())); moduleTag.putInt("StandingOrWall", stateSelector.getStandingOrWallType().ordinal()); SecurityCraft.channel.sendToServer(new UpdateNBTTagOnServer(module)); } } @Override public List<Rect2i> getExtraAreas() { if (stateSelector != null) return stateSelector.getGuiExtraAreas(); else return List.of(); } }
src/main/java/net/geforcemods/securitycraft/screen/DisguiseModuleScreen.java
package net.geforcemods.securitycraft.screen; import java.util.List; import com.mojang.blaze3d.systems.RenderSystem; import com.mojang.blaze3d.vertex.PoseStack; import net.geforcemods.securitycraft.SCContent; import net.geforcemods.securitycraft.SecurityCraft; import net.geforcemods.securitycraft.inventory.DisguiseModuleMenu; import net.geforcemods.securitycraft.network.server.UpdateNBTTagOnServer; import net.geforcemods.securitycraft.screen.components.StateSelector; import net.geforcemods.securitycraft.util.IHasExtraAreas; import net.geforcemods.securitycraft.util.Utils; import net.minecraft.client.gui.screens.inventory.AbstractContainerScreen; import net.minecraft.client.renderer.Rect2i; import net.minecraft.nbt.CompoundTag; import net.minecraft.nbt.NbtUtils; import net.minecraft.network.chat.Component; import net.minecraft.network.chat.TranslatableComponent; import net.minecraft.resources.ResourceLocation; import net.minecraft.world.entity.player.Inventory; import net.minecraft.world.item.ItemStack; public class DisguiseModuleScreen extends AbstractContainerScreen<DisguiseModuleMenu> implements IHasExtraAreas { private static final ResourceLocation TEXTURE = new ResourceLocation("securitycraft:textures/gui/container/customize1.png"); private final TranslatableComponent disguiseModuleName = Utils.localize(SCContent.DISGUISE_MODULE.get().getDescriptionId()); private StateSelector stateSelector; public DisguiseModuleScreen(DisguiseModuleMenu menu, Inventory inv, Component title) { super(menu, inv, title); } @Override protected void init() { super.init(); leftPos += 90; stateSelector = addRenderableWidget(new StateSelector(menu, title, leftPos - 190, topPos + 7, 0, 183, 10, -3.0F, -1.0F)); stateSelector.init(minecraft, width, height); } @Override public void render(PoseStack pose, int mouseX, int mouseY, float partialTicks) { super.render(pose, mouseX, mouseY, partialTicks); if (getSlotUnderMouse() != null && !getSlotUnderMouse().getItem().isEmpty()) renderTooltip(pose, getSlotUnderMouse().getItem(), mouseX, mouseY); } @Override protected void renderLabels(PoseStack pose, int mouseX, int mouseY) { font.draw(pose, disguiseModuleName, imageWidth / 2 - font.width(disguiseModuleName) / 2, 6, 0x404040); } @Override protected void renderBg(PoseStack pose, float partialTicks, int mouseX, int mouseY) { renderBackground(pose); RenderSystem.setShaderColor(1.0F, 1.0F, 1.0F, 1.0F); RenderSystem._setShaderTexture(0, TEXTURE); blit(pose, leftPos, topPos, 0, 0, imageWidth, imageHeight); } @Override public boolean mouseDragged(double mouseX, double mouseY, int button, double dragX, double dragY) { if (stateSelector != null && stateSelector.mouseDragged(mouseX, mouseY, button, dragX, dragY)) return true; return super.mouseDragged(mouseX, mouseY, button, dragX, dragY); } @Override public void onClose() { super.onClose(); if (!menu.getSlot(0).getItem().isEmpty() && stateSelector.getState() != null) { ItemStack module = menu.getInventory().getModule(); CompoundTag moduleTag = module.getOrCreateTag(); moduleTag.put("SavedState", NbtUtils.writeBlockState(stateSelector.getState())); moduleTag.putInt("StandingOrWall", stateSelector.getStandingOrWallType().ordinal()); SecurityCraft.channel.sendToServer(new UpdateNBTTagOnServer(module)); } } @Override public List<Rect2i> getExtraAreas() { if (stateSelector != null) return stateSelector.getGuiExtraAreas(); else return List.of(); } }
change disguise module state preview position
src/main/java/net/geforcemods/securitycraft/screen/DisguiseModuleScreen.java
change disguise module state preview position
Java
mit
a983c824c359795d3b3b8fbc11068ded3d63c6d3
0
CytoDev/FrequencyCalculator
package io.cytodev.freqcalc.fragments; import android.app.Activity; import android.app.AlertDialog; import android.content.ComponentName; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.content.res.Resources; import android.os.Bundle; import android.preference.ListPreference; import android.preference.Preference; import android.preference.PreferenceFragment; import android.preference.PreferenceManager; import android.text.Html; import android.text.method.LinkMovementMethod; import android.util.Log; import android.widget.TextView; import android.widget.Toast; import java.io.FileNotFoundException; import java.io.InputStream; import io.cytodev.freqcalc.R; import io.cytodev.freqcalc.activities.CytoActivity; import io.cytodev.freqcalc.activities.PreferencesActivity; import io.cytodev.freqcalc.activities.TranslationsActivity; /** * io.cytodev.freqcalc.fragments "Frequency Calculator" * 2016/01/14 @ 13:42 * * @author Roel Walraven <cytodev@gmail.com> */ public class NestedPreferenceFragment extends PreferenceFragment { private final static String TAG = NestedPreferenceFragment.class.getSimpleName(); private SharedPreferences.OnSharedPreferenceChangeListener changeListener; private Context context; private static int subTitle = -1; @Override public void onCreate(Bundle savedInstanceState) { Log.v(TAG, "Called onCreate"); super.onCreate(savedInstanceState); addPreferencesFromResource(getArguments().getInt("KEY")); NestedPreferenceFragment.subTitle = getArguments().getInt("NAME"); if(((PreferencesActivity) getActivity()).getSupportActionBar() != null) { if(getArguments().getInt("NAME") != R.string.action_settings) { ((PreferencesActivity) getActivity()).getSupportActionBar().setSubtitle(subTitle); } } setupListeners(); setPlurals(); } @Override public void onPause() { Log.v(TAG, "Called onPause"); super.onPause(); getPreferenceManager().getSharedPreferences() .unregisterOnSharedPreferenceChangeListener(changeListener); } @Override public void onResume() { Log.v(TAG, "Called onResume"); super.onResume(); getPreferenceManager().getSharedPreferences() .registerOnSharedPreferenceChangeListener(changeListener); } @Override public void onAttach(Context context) { Log.v(TAG, "Called onAttach"); super.onAttach(context); this.context = context; } @Override @SuppressWarnings("deprecation") public void onAttach(Activity activity) { Log.v(TAG, "Called onAttach"); Log.w(TAG, "onAttach(Activity activity) is deprecated"); super.onAttach(activity); this.context = activity; } public static NestedPreferenceFragment newInstance(int key, int name) { Log.v(TAG, "Creating new instance"); NestedPreferenceFragment fragment = new NestedPreferenceFragment(); Bundle args = new Bundle(); args.putInt("KEY", key); args.putInt("NAME", name); fragment.setArguments(args); return fragment; } private void attachClickListener(String key, Preference.OnPreferenceClickListener listener) { Log.d(TAG, "Attaching listener to " + key); if(findPreference(key) == null) return; findPreference(key).setOnPreferenceClickListener(listener); } private void setupListeners() { Log.v(TAG, "Setting up listeners"); final Context c = this.context; changeListener = new SharedPreferences.OnSharedPreferenceChangeListener() { @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { Log.v(TAG, "Called onSharedPreferenceChanged"); switch(key) { case "pref_appearance_theme": changeTheme(false, false); case "pref_appearance_theme_dark": PreferencesActivity prefs = (PreferencesActivity) getActivity(); Bundle bundle = new Bundle(); Intent restart = prefs.getIntent(); bundle.putInt("pref", R.xml.prefs_appearance); bundle.putInt("name", R.string.pref_cat_appearance); restart.putExtras(bundle); prefs.finish(); startActivity(restart); prefs.overridePendingTransition(android.R.anim.fade_in, android.R.anim.fade_out); break; case "pref_appearance_icon": changeTheme(false, true); break; case "pref_general_averagenum": if(sharedPreferences.getString(key, "4").equals("-1")) { Toast.makeText(context, R.string.pref_general_averagenum_unlimitedHelp, Toast.LENGTH_SHORT).show(); } break; } } }; final Preference.OnPreferenceClickListener nestedListener = new Preference.OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference) { Log.v(TAG, "Called onPreferenceClick (nestedListener)"); Log.d(TAG, "Clicked on " + preference.getKey()); int instance = -1; switch(preference.getKey()) { case "pref_cat_about": instance = R.xml.prefs_about; break; case "pref_cat_appearance": instance = R.xml.prefs_appearance; break; case "pref_cat_general": instance = R.xml.prefs_general; break; case "pref_cat_interface": instance = R.xml.prefs_interface; break; default: break; } if(instance != -1) { getFragmentManager() .beginTransaction() .setCustomAnimations(R.animator.push_left_in, R.animator.push_left_out, R.animator.push_right_in, R.animator.push_right_out) .replace(R.id.rootView, newInstance(instance, preference.getTitleRes())) .addToBackStack(preference.getKey()) .commit(); } return true; } }; final Preference.OnPreferenceClickListener cytoLauncher = new Preference.OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference) { PreferencesActivity prefs = (PreferencesActivity) getActivity(); Intent cytoLauncher = new Intent(prefs, CytoActivity.class); prefs.startActivity(cytoLauncher); return true; } }; final Preference.OnPreferenceClickListener dialogLauncher = new Preference.OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference) { AlertDialog.Builder dialog = new AlertDialog.Builder(c); dialog.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }); try { Resources res = getResources(); InputStream ins; switch(preference.getKey()) { case "pref_about_license": dialog.setTitle(R.string.pref_about_license); ins = res.openRawResource(R.raw.freqcalc); break; case "pref_about_privacy": dialog.setTitle(R.string.pref_about_privacy); ins = res.openRawResource(R.raw.privacy); break; default: throw new FileNotFoundException(); } byte[] b = new byte[ins.available()]; ins.read(b); dialog.setMessage(Html.fromHtml(new String(b))); } catch(Exception e) { e.printStackTrace(); dialog.setMessage(e.getLocalizedMessage()); } finally { AlertDialog alertDialog = dialog.create(); alertDialog.show(); ((TextView) alertDialog.findViewById(android.R.id.message)).setMovementMethod(LinkMovementMethod.getInstance()); } return true; } }; final Preference.OnPreferenceClickListener translationsLauncher = new Preference.OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference){ PreferencesActivity prefs = (PreferencesActivity) getActivity(); Intent translationsLauncher = new Intent(prefs, TranslationsActivity.class); translationsLauncher.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NO_ANIMATION); prefs.startActivity(translationsLauncher); prefs.overridePendingTransition(R.anim.push_left_in, R.anim.push_left_out); return true; } }; Log.v(TAG, "Attaching listeners"); attachClickListener("pref_cat_about", nestedListener); attachClickListener("pref_about_developer", cytoLauncher); attachClickListener("pref_about_license", dialogLauncher); attachClickListener("pref_about_privacy", dialogLauncher); attachClickListener("pref_about_translations", translationsLauncher); attachClickListener("pref_cat_appearance", nestedListener); attachClickListener("pref_cat_general", nestedListener); attachClickListener("pref_cat_interface", nestedListener); } private void setPlurals() { ListPreference decimals = (ListPreference) findPreference("pref_general_decimals"); ListPreference average = (ListPreference) findPreference("pref_general_averagenum"); if(decimals != null) { CharSequence[] decimalEntries = { getResources().getQuantityString(R.plurals.plural_decimals, 1, 1), getResources().getQuantityString(R.plurals.plural_decimals, 2, 2), getResources().getQuantityString(R.plurals.plural_decimals, 3, 3), getResources().getQuantityString(R.plurals.plural_decimals, 4, 4), getResources().getQuantityString(R.plurals.plural_decimals, 5, 5), getResources().getQuantityString(R.plurals.plural_decimals, 6, 6), getResources().getQuantityString(R.plurals.plural_decimals, 7, 7) }; decimals.setEntries(decimalEntries); } if(average != null) { CharSequence[] averageEntries = { getResources().getQuantityString(R.plurals.plural_taps, 1, 1), getResources().getQuantityString(R.plurals.plural_taps, 2, 2), getResources().getQuantityString(R.plurals.plural_taps, 3, 3), getResources().getQuantityString(R.plurals.plural_taps, 4, 4), getResources().getString(R.string.unlimited) }; average.setEntries(averageEntries); } } private void changeTheme(boolean revert, boolean iconChange) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); if(revert) { context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.WhiteSmoke"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.DodgerBlue"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.SpringBud"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.ElectricPurple"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.OrangePeel"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.HollywoodCerise"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.SpringGreen"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.WhiteSmoke"), PackageManager.COMPONENT_ENABLED_STATE_ENABLED, PackageManager.DONT_KILL_APP); return; } if(prefs.getBoolean("pref_appearance_icon", false)) { context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.WhiteSmoke"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.DodgerBlue"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.SpringBud"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.ElectricPurple"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.OrangePeel"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.HollywoodCerise"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.SpringGreen"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity." + prefs.getString("pref_appearance_theme", "WhiteSmoke")), PackageManager.COMPONENT_ENABLED_STATE_ENABLED, PackageManager.DONT_KILL_APP); } else { if(iconChange) { new AlertDialog.Builder(context) .setTitle(R.string.dialog_icon_title) .setMessage(R.string.dialog_icon_message) .setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { changeTheme(true, false); } }) .setNegativeButton(android.R.string.no, null) .show(); } } } }
app/src/main/java/io/cytodev/freqcalc/fragments/NestedPreferenceFragment.java
package io.cytodev.freqcalc.fragments; import android.app.Activity; import android.app.AlertDialog; import android.content.ComponentName; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.content.res.Resources; import android.os.Bundle; import android.preference.ListPreference; import android.preference.Preference; import android.preference.PreferenceFragment; import android.preference.PreferenceManager; import android.text.Html; import android.text.method.LinkMovementMethod; import android.util.Log; import android.widget.TextView; import android.widget.Toast; import java.io.FileNotFoundException; import java.io.InputStream; import io.cytodev.freqcalc.R; import io.cytodev.freqcalc.activities.CytoActivity; import io.cytodev.freqcalc.activities.PreferencesActivity; import io.cytodev.freqcalc.activities.TranslationsActivity; /** * io.cytodev.freqcalc.fragments "Frequency Calculator" * 2016/01/14 @ 13:42 * * @author Roel Walraven <cytodev@gmail.com> */ public class NestedPreferenceFragment extends PreferenceFragment { private final static String TAG = NestedPreferenceFragment.class.getSimpleName(); private SharedPreferences.OnSharedPreferenceChangeListener changeListener; private Context context; private static int subTitle = -1; @Override public void onCreate(Bundle savedInstanceState) { Log.v(TAG, "Called onCreate"); super.onCreate(savedInstanceState); addPreferencesFromResource(getArguments().getInt("KEY")); NestedPreferenceFragment.subTitle = getArguments().getInt("NAME"); if(((PreferencesActivity) getActivity()).getSupportActionBar() != null) { if(getArguments().getInt("NAME") != R.string.action_settings) { ((PreferencesActivity) getActivity()).getSupportActionBar().setSubtitle(subTitle); } } setupListeners(); setPlurals(); } @Override public void onPause() { Log.v(TAG, "Called onPause"); super.onPause(); getPreferenceManager().getSharedPreferences() .unregisterOnSharedPreferenceChangeListener(changeListener); } @Override public void onResume() { Log.v(TAG, "Called onResume"); super.onResume(); getPreferenceManager().getSharedPreferences() .registerOnSharedPreferenceChangeListener(changeListener); } @Override public void onAttach(Context context) { Log.v(TAG, "Called onAttach"); super.onAttach(context); this.context = context; } @Override @SuppressWarnings("deprecation") public void onAttach(Activity activity) { Log.v(TAG, "Called onAttach"); Log.w(TAG, "onAttach(Activity activity) is deprecated"); super.onAttach(activity); this.context = activity; } public static NestedPreferenceFragment newInstance(int key, int name) { Log.v(TAG, "Creating new instance"); NestedPreferenceFragment fragment = new NestedPreferenceFragment(); Bundle args = new Bundle(); args.putInt("KEY", key); args.putInt("NAME", name); fragment.setArguments(args); return fragment; } private void attachClickListener(String key, Preference.OnPreferenceClickListener listener) { Log.d(TAG, "Attaching listener to " + key); if(findPreference(key) == null) return; findPreference(key).setOnPreferenceClickListener(listener); } private void setupListeners() { Log.v(TAG, "Setting up listeners"); final Context c = this.context; changeListener = new SharedPreferences.OnSharedPreferenceChangeListener() { @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { Log.v(TAG, "Called onSharedPreferenceChanged"); switch(key) { case "pref_appearance_theme": changeTheme(false, false); case "pref_appearance_theme_dark": PreferencesActivity prefs = (PreferencesActivity) getActivity(); Bundle bundle = new Bundle(); Intent restart = prefs.getIntent(); bundle.putInt("pref", R.xml.prefs_appearance); bundle.putInt("name", R.string.pref_cat_appearance); restart.putExtras(bundle); prefs.finish(); startActivity(restart); prefs.overridePendingTransition(android.R.anim.fade_in, android.R.anim.fade_out); break; case "pref_appearance_icon": changeTheme(false, true); break; case "pref_general_averagenum": if(sharedPreferences.getString(key, "4").equals("-1")) { Toast.makeText(context, R.string.pref_general_averagenum_unlimitedHelp, Toast.LENGTH_SHORT).show(); } break; } } }; final Preference.OnPreferenceClickListener nestedListener = new Preference.OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference) { Log.v(TAG, "Called onPreferenceClick (nestedListener)"); Log.d(TAG, "Clicked on " + preference.getKey()); int instance = -1; switch(preference.getKey()) { case "pref_cat_about": instance = R.xml.prefs_about; break; case "pref_cat_appearance": instance = R.xml.prefs_appearance; break; case "pref_cat_general": instance = R.xml.prefs_general; break; case "pref_cat_interface": instance = R.xml.prefs_interface; break; default: break; } if(instance != -1) { getFragmentManager() .beginTransaction() .setCustomAnimations(R.animator.push_left_in, R.animator.push_left_out, R.animator.push_right_in, R.animator.push_right_out) .replace(R.id.rootView, newInstance(instance, preference.getTitleRes())) .addToBackStack(preference.getKey()) .commit(); } return true; } }; final Preference.OnPreferenceClickListener cytoLauncher = new Preference.OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference) { PreferencesActivity prefs = (PreferencesActivity) getActivity(); Intent cytoLauncher = new Intent(prefs, CytoActivity.class); prefs.startActivity(cytoLauncher); return true; } }; final Preference.OnPreferenceClickListener licenseLauncher = new Preference.OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference) { AlertDialog.Builder licenseDialog = new AlertDialog.Builder(c); licenseDialog.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }); try { Resources res = getResources(); InputStream ins; switch(preference.getKey()) { case "pref_about_license": licenseDialog.setTitle(R.string.pref_about_license); ins = res.openRawResource(R.raw.freqcalc); break; default: throw new FileNotFoundException(); } byte[] b = new byte[ins.available()]; ins.read(b); licenseDialog.setMessage(Html.fromHtml(new String(b))); } catch(Exception e) { e.printStackTrace(); licenseDialog.setMessage(e.getLocalizedMessage()); } finally { AlertDialog dialog = licenseDialog.create(); dialog.show(); ((TextView) dialog.findViewById(android.R.id.message)).setMovementMethod(LinkMovementMethod.getInstance()); } return true; } }; final Preference.OnPreferenceClickListener translationsLauncher = new Preference.OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference){ PreferencesActivity prefs = (PreferencesActivity) getActivity(); Intent translationsLauncher = new Intent(prefs, TranslationsActivity.class); translationsLauncher.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NO_ANIMATION); prefs.startActivity(translationsLauncher); prefs.overridePendingTransition(R.anim.push_left_in, R.anim.push_left_out); return true; } }; Log.v(TAG, "Attaching listeners"); attachClickListener("pref_cat_about", nestedListener); attachClickListener("pref_about_developer", cytoLauncher); attachClickListener("pref_about_license", licenseLauncher); attachClickListener("pref_about_translations", translationsLauncher); attachClickListener("pref_cat_appearance", nestedListener); attachClickListener("pref_cat_general", nestedListener); attachClickListener("pref_cat_interface", nestedListener); } private void setPlurals() { ListPreference decimals = (ListPreference) findPreference("pref_general_decimals"); ListPreference average = (ListPreference) findPreference("pref_general_averagenum"); if(decimals != null) { CharSequence[] decimalEntries = { getResources().getQuantityString(R.plurals.plural_decimals, 1, 1), getResources().getQuantityString(R.plurals.plural_decimals, 2, 2), getResources().getQuantityString(R.plurals.plural_decimals, 3, 3), getResources().getQuantityString(R.plurals.plural_decimals, 4, 4), getResources().getQuantityString(R.plurals.plural_decimals, 5, 5), getResources().getQuantityString(R.plurals.plural_decimals, 6, 6), getResources().getQuantityString(R.plurals.plural_decimals, 7, 7) }; decimals.setEntries(decimalEntries); } if(average != null) { CharSequence[] averageEntries = { getResources().getQuantityString(R.plurals.plural_taps, 1, 1), getResources().getQuantityString(R.plurals.plural_taps, 2, 2), getResources().getQuantityString(R.plurals.plural_taps, 3, 3), getResources().getQuantityString(R.plurals.plural_taps, 4, 4), getResources().getString(R.string.unlimited) }; average.setEntries(averageEntries); } } private void changeTheme(boolean revert, boolean iconChange) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); if(revert) { context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.WhiteSmoke"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.DodgerBlue"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.SpringBud"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.ElectricPurple"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.OrangePeel"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.HollywoodCerise"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.SpringGreen"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.WhiteSmoke"), PackageManager.COMPONENT_ENABLED_STATE_ENABLED, PackageManager.DONT_KILL_APP); return; } if(prefs.getBoolean("pref_appearance_icon", false)) { context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.WhiteSmoke"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.DodgerBlue"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.SpringBud"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.ElectricPurple"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.OrangePeel"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.HollywoodCerise"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity.SpringGreen"), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); context.getPackageManager().setComponentEnabledSetting(new ComponentName("io.cytodev.freqcalc", "io.cytodev.freqcalc.activities.MainActivity." + prefs.getString("pref_appearance_theme", "WhiteSmoke")), PackageManager.COMPONENT_ENABLED_STATE_ENABLED, PackageManager.DONT_KILL_APP); } else { if(iconChange) { new AlertDialog.Builder(context) .setTitle(R.string.dialog_icon_title) .setMessage(R.string.dialog_icon_message) .setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { changeTheme(true, false); } }) .setNegativeButton(android.R.string.no, null) .show(); } } } }
Add support for multiple dialog preferences
app/src/main/java/io/cytodev/freqcalc/fragments/NestedPreferenceFragment.java
Add support for multiple dialog preferences
Java
mit
af83be32ef8a9064db63a79840f80ed0cab2235e
0
CS2103JAN2017-W14-B2/main,CS2103JAN2017-W14-B2/main
package guitests; import static org.junit.Assert.assertTrue; import java.util.List; import org.junit.Test; import com.google.common.collect.Lists; import guitests.guihandles.TaskCardHandle; import seedu.taskboss.commons.core.Messages; import seedu.taskboss.commons.exceptions.IllegalValueException; import seedu.taskboss.logic.commands.AddCommand; import seedu.taskboss.model.category.UniqueCategoryList.DuplicateCategoryException; import seedu.taskboss.testutil.TestCategory; import seedu.taskboss.testutil.TestTask; import seedu.taskboss.testutil.TestUtil; public class AddCommandTest extends TaskBossGuiTest { @Test public void add() throws DuplicateCategoryException, IllegalValueException { TestTask[] currentList = td.getTypicalTasks(); //add one task TestTask taskToAdd = td.taskH; assertAddSuccess(false, false, Lists.newArrayList(new TestCategory(AddCommand.BUILT_IN_ALL_TASKS, td.taskA)), taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add another task taskToAdd = td.taskI; assertAddSuccess(false, false, Lists.newArrayList(new TestCategory(AddCommand.BUILT_IN_ALL_TASKS, td.taskA)), taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add another task using short command taskToAdd = td.taskK; assertAddSuccess(false, true, Lists.newArrayList(new TestCategory(AddCommand.BUILT_IN_ALL_TASKS, td.taskA)), taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add another task using short command '+' taskToAdd = td.taskL; assertAddSuccess(true, false, Lists.newArrayList(new TestCategory(AddCommand.BUILT_IN_ALL_TASKS, td.taskA)), taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add duplicate task commandBox.runCommand(td.taskH.getAddCommand()); assertResultMessage(AddCommand.MESSAGE_DUPLICATE_TASK); assertTrue(taskListPanel.isListMatching(currentList)); //add invalid dates task commandBox.runCommand(td.taskJ.getAddCommand()); assertResultMessage(AddCommand.ERROR_INVALID_DATES); assertTrue(taskListPanel.isListMatching(currentList)); //add to empty list commandBox.runCommand("clear"); assertAddSuccess(false, false, Lists.newArrayList(new TestCategory(AddCommand.BUILT_IN_ALL_TASKS, td.taskA), new TestCategory("Friends", td.taskA)), td.taskA); //invalid command commandBox.runCommand("adds new task"); assertResultMessage(Messages.MESSAGE_UNKNOWN_COMMAND); } private void assertAddSuccess(boolean isPlusSign, boolean isShortCommand, List<TestCategory> expectedCategoryList, TestTask taskToAdd, TestTask... currentList) throws DuplicateCategoryException, IllegalValueException { if (isShortCommand) { commandBox.runCommand(taskToAdd.getShortAddCommand()); } else if (isPlusSign) { commandBox.runCommand(taskToAdd.getAddCommandPlus()); } else { commandBox.runCommand(taskToAdd.getAddCommand()); } //confirm the new card contains the right data TaskCardHandle addedCard = taskListPanel.navigateToTask(taskToAdd.getName().fullName); assertMatching(taskToAdd, addedCard); //confirm the list now contains all previous tasks plus the new task TestTask[] expectedList = TestUtil.addTasksToList(currentList, taskToAdd); assertTrue(taskListPanel.isListMatching(expectedList)); assertTrue(categoryListPanel.isListMatching(expectedCategoryList)); } }
src/test/java/guitests/AddCommandTest.java
package guitests; import static org.junit.Assert.assertTrue; import java.util.List; import org.junit.Test; import com.google.common.collect.Lists; import guitests.guihandles.TaskCardHandle; import seedu.taskboss.commons.core.Messages; import seedu.taskboss.commons.exceptions.IllegalValueException; import seedu.taskboss.logic.commands.AddCommand; import seedu.taskboss.model.category.UniqueCategoryList.DuplicateCategoryException; import seedu.taskboss.testutil.TestCategory; import seedu.taskboss.testutil.TestTask; import seedu.taskboss.testutil.TestUtil; public class AddCommandTest extends TaskBossGuiTest { @Test public void add() throws DuplicateCategoryException, IllegalValueException { TestTask[] currentList = td.getTypicalTasks(); //add one task TestTask taskToAdd = td.taskH; assertAddSuccess(false, false, Lists.newArrayList(new TestCategory (AddCommand.BUILT_IN_ALL_TASKS, td.taskA)), taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add another task taskToAdd = td.taskI; assertAddSuccess(false, false, Lists.newArrayList(new TestCategory (AddCommand.BUILT_IN_ALL_TASKS, td.taskA)), taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add another task using short command taskToAdd = td.taskK; assertAddSuccess(false, true, Lists.newArrayList(new TestCategory (AddCommand.BUILT_IN_ALL_TASKS, td.taskA)), taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add another task using short command '+' taskToAdd = td.taskL; assertAddSuccess(true, false, Lists.newArrayList(new TestCategory (AddCommand.BUILT_IN_ALL_TASKS, td.taskA)), taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add duplicate task commandBox.runCommand(td.taskH.getAddCommand()); assertResultMessage(AddCommand.MESSAGE_DUPLICATE_TASK); assertTrue(taskListPanel.isListMatching(currentList)); //add invalid dates task commandBox.runCommand(td.taskJ.getAddCommand()); assertResultMessage(AddCommand.ERROR_INVALID_DATES); assertTrue(taskListPanel.isListMatching(currentList)); //add to empty list commandBox.runCommand("clear"); assertAddSuccess(false, false, Lists.newArrayList(new TestCategory (AddCommand.BUILT_IN_ALL_TASKS, td.taskA), new TestCategory("Friends", td.taskA)), td.taskA); //invalid command commandBox.runCommand("adds new task"); assertResultMessage(Messages.MESSAGE_UNKNOWN_COMMAND); } private void assertAddSuccess(boolean isPlusSign, boolean isShortCommand, List<TestCategory> expectedCategoryList, TestTask taskToAdd, TestTask... currentList) throws DuplicateCategoryException, IllegalValueException { if (isShortCommand) { commandBox.runCommand(taskToAdd.getShortAddCommand()); } else if (isPlusSign) { commandBox.runCommand(taskToAdd.getAddCommandPlus()); } else { commandBox.runCommand(taskToAdd.getAddCommand()); } //confirm the new card contains the right data TaskCardHandle addedCard = taskListPanel.navigateToTask(taskToAdd.getName().fullName); assertMatching(taskToAdd, addedCard); //confirm the list now contains all previous tasks plus the new task TestTask[] expectedList = TestUtil.addTasksToList(currentList, taskToAdd); assertTrue(taskListPanel.isListMatching(expectedList)); assertTrue(categoryListPanel.isListMatching(expectedCategoryList)); } }
Fix very confusing travis errors
src/test/java/guitests/AddCommandTest.java
Fix very confusing travis errors
Java
mit
4417a9d111ae03c8902041e67e8925319f2c030e
0
illuminoo/yasea,illuminoo/yasea,illuminoo/yasea,illuminoo/yasea,illuminoo/yasea,illuminoo/yasea
package net.ossrs.yasea; import android.media.MediaCodec; import android.media.MediaFormat; import android.util.Log; import com.github.faucamp.simplertmp.DefaultRtmpPublisher; import com.github.faucamp.simplertmp.RtmpHandler; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicInteger; /** * Created by winlin on 5/2/15. * Updated by leoma on 4/1/16. * to POST the h.264/avc annexb frame over RTMP. * * @see android.media.MediaMuxer https://developer.android.com/reference/android/media/MediaMuxer.html */ public class SrsFlvMuxer { private static final int VIDEO_ALLOC_SIZE = 128 * 1024; private static final int AUDIO_ALLOC_SIZE = 4 * 1024; private volatile boolean connected = false; private DefaultRtmpPublisher publisher; private RtmpHandler mHandler; private Thread worker; private final Object txFrameLock = new Object(); private SrsFlv flv = new SrsFlv(); public boolean needToFindKeyFrame = true; private SrsFlvFrame mVideoSequenceHeader; private SrsFlvFrame mAudioSequenceHeader; private SrsAllocator mVideoAllocator = new SrsAllocator(VIDEO_ALLOC_SIZE); private SrsAllocator mAudioAllocator = new SrsAllocator(AUDIO_ALLOC_SIZE); private ConcurrentLinkedQueue<SrsFlvFrame> mFlvTagCache = new ConcurrentLinkedQueue<>(); public static final int VIDEO_TRACK = 100; public static final int AUDIO_TRACK = 101; private static final String TAG = "SrsFlvMuxer"; /** * Start presentation timestamp */ private long startPTS; /** * constructor. * * @param handler the rtmp event handler. */ public SrsFlvMuxer(RtmpHandler handler) { mHandler = handler; publisher = new DefaultRtmpPublisher(handler); } /** * get cached video frame number in publisher */ public AtomicInteger getVideoFrameCacheNumber() { return publisher == null ? null : publisher.getVideoFrameCacheNumber(); } /** * set video resolution for publisher * * @param width width * @param height height */ public void setVideoResolution(int width, int height) { if (publisher != null) { publisher.setVideoResolution(width, height); } } /** * Adds a track with the specified format. * * @param format The media format for the track. * @return The track index for this newly added track. */ public int addTrack(MediaFormat format) { if (format.getString(MediaFormat.KEY_MIME).contentEquals(SrsEncoder.VCODEC)) { flv.setVideoTrack(format); return VIDEO_TRACK; } else { flv.setAudioTrack(format); return AUDIO_TRACK; } } private void disconnect() { try { publisher.close(); } catch (IllegalStateException e) { // Ignore illegal state. } connected = false; mVideoSequenceHeader = null; mAudioSequenceHeader = null; Log.i(TAG, "worker: disconnect ok."); } private boolean connect(String url) { if (!connected) { Log.i(TAG, String.format("worker: connecting to RTMP server by url=%s\n", url)); if (publisher.connect(url)) { connected = publisher.publish("live"); } mVideoSequenceHeader = null; mAudioSequenceHeader = null; } return connected; } private void sendFlvTag(SrsFlvFrame frame) { if (!connected || frame == null) { return; } if (frame.isVideo()) { if (frame.isKeyFrame()) { Log.i(TAG, String.format("worker: send frame type=%d, dts=%d, size=%dB", frame.type, frame.dts, frame.flvTag.array().length)); } publisher.publishVideoData(frame.flvTag.array(), frame.flvTag.size(), frame.dts); mVideoAllocator.release(frame.flvTag); } else if (frame.isAudio()) { publisher.publishAudioData(frame.flvTag.array(), frame.flvTag.size(), frame.dts); mAudioAllocator.release(frame.flvTag); } } /** * start to the remote server for remux. */ public void start(final String rtmpUrl) { startPTS = 0; needToFindKeyFrame = true; worker = new Thread(new Runnable() { @Override public void run() { if (!connect(rtmpUrl)) { return; } try { while (!worker.interrupted()) { while (!mFlvTagCache.isEmpty()) { SrsFlvFrame frame = mFlvTagCache.poll(); if (frame.isSequenceHeader()) { if (frame.isVideo()) { mVideoSequenceHeader = frame; sendFlvTag(mVideoSequenceHeader); } else if (frame.isAudio()) { mAudioSequenceHeader = frame; sendFlvTag(mAudioSequenceHeader); } } else { if (frame.isVideo() && mVideoSequenceHeader != null) { sendFlvTag(frame); } else if (frame.isAudio() && mAudioSequenceHeader != null) { sendFlvTag(frame); } } } // Waiting for next frame synchronized (txFrameLock) { txFrameLock.wait(500); } } } catch (InterruptedException ie) { Log.i(TAG, "Stopped"); } } }); // worker.setPriority(Thread.MAX_PRIORITY); worker.setDaemon(true); worker.start(); } /** * stop the muxer, disconnect RTMP connection. */ public void stop() { mFlvTagCache.clear(); if (worker != null) { worker.interrupt(); try { worker.join(5000); } catch (InterruptedException e) { Log.e(TAG, e.getMessage(), e); } } flv.reset(); needToFindKeyFrame = true; disconnect(); Log.i(TAG, "SrsFlvMuxer closed"); } /** * send the annexb frame over RTMP. * * @param trackIndex The track index for this sample. * @param byteBuf The encoded sample. * @param bufferInfo The buffer information related to this sample. */ public void writeSampleData(int trackIndex, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) { if (VIDEO_TRACK == trackIndex) { if (startPTS==0) startPTS = bufferInfo.presentationTimeUs - 100000; bufferInfo.presentationTimeUs -= startPTS; if (bufferInfo.presentationTimeUs<0) return; AtomicInteger videoFrameCacheNumber = getVideoFrameCacheNumber(); if (videoFrameCacheNumber != null && videoFrameCacheNumber.get() < 5 * SrsAvcEncoder.VGOP) { flv.writeVideoSample(byteBuf, bufferInfo); } else { Log.w(TAG, "Network throughput too low"); needToFindKeyFrame = true; } } else { if (startPTS==0) return; bufferInfo.presentationTimeUs -= startPTS; if (bufferInfo.presentationTimeUs<0) return; flv.writeAudioSample(byteBuf, bufferInfo); } } // E.4.3.1 VIDEODATA // Frame Type UB [4] // Type of video frame. The following values are defined: // 1 = key frame (for AVC, a seekable frame) // 2 = inter frame (for AVC, a non-seekable frame) // 3 = disposable inter frame (H.263 only) // 4 = generated key frame (reserved for server use only) // 5 = video info/command frame private class SrsCodecVideoAVCFrame { // set to the zero to reserved, for array map. public final static int Reserved = 0; public final static int Reserved1 = 6; public final static int KeyFrame = 1; public final static int InterFrame = 2; public final static int DisposableInterFrame = 3; public final static int GeneratedKeyFrame = 4; public final static int VideoInfoFrame = 5; } // AVCPacketType IF CodecID == 7 UI8 // The following values are defined: // 0 = AVC sequence header // 1 = AVC NALU // 2 = AVC end of sequence (lower level NALU sequence ender is // not required or supported) private class SrsCodecVideoAVCType { // set to the max value to reserved, for array map. public final static int Reserved = 3; public final static int SequenceHeader = 0; public final static int NALU = 1; public final static int SequenceHeaderEOF = 2; } /** * E.4.1 FLV Tag, page 75 */ private class SrsCodecFlvTag { // set to the zero to reserved, for array map. public final static int Reserved = 0; // 8 = audio public final static int Audio = 8; // 9 = video public final static int Video = 9; // 18 = script data public final static int Script = 18; } ; // E.4.3.1 VIDEODATA // CodecID UB [4] // Codec Identifier. The following values are defined: // 2 = Sorenson H.263 // 3 = Screen video // 4 = On2 VP6 // 5 = On2 VP6 with alpha channel // 6 = Screen video version 2 // 7 = AVC private class SrsCodecVideo { // set to the zero to reserved, for array map. public final static int Reserved = 0; public final static int Reserved1 = 1; public final static int Reserved2 = 9; // for user to disable video, for example, use pure audio hls. public final static int Disabled = 8; public final static int SorensonH263 = 2; public final static int ScreenVideo = 3; public final static int On2VP6 = 4; public final static int On2VP6WithAlphaChannel = 5; public final static int ScreenVideoVersion2 = 6; public final static int AVC = 7; } /** * the aac object type, for RTMP sequence header * for AudioSpecificConfig, @see aac-mp4a-format-ISO_IEC_14496-3+2001.pdf, page 33 * for audioObjectType, @see aac-mp4a-format-ISO_IEC_14496-3+2001.pdf, page 23 */ private class SrsAacObjectType { public final static int Reserved = 0; // Table 1.1 – Audio Object Type definition // @see @see aac-mp4a-format-ISO_IEC_14496-3+2001.pdf, page 23 public final static int AacMain = 1; public final static int AacLC = 2; public final static int AacSSR = 3; // AAC HE = LC+SBR public final static int AacHE = 5; // AAC HEv2 = LC+SBR+PS public final static int AacHEV2 = 29; } /** * the aac profile, for ADTS(HLS/TS) */ private class SrsAacProfile { public final static int Reserved = 3; // @see 7.1 Profiles, aac-iso-13818-7.pdf, page 40 public final static int Main = 0; public final static int LC = 1; public final static int SSR = 2; } /** * the FLV/RTMP supported audio sample rate. * Sampling rate. The following values are defined: * 0 = 5.5 kHz = 5512 Hz * 1 = 11 kHz = 11025 Hz * 2 = 22 kHz = 22050 Hz * 3 = 44 kHz = 44100 Hz */ private class SrsCodecAudioSampleRate { // set to the max value to reserved, for array map. public final static int Reserved = 4; public final static int R5512 = 0; public final static int R11025 = 1; public final static int R22050 = 2; public final static int R44100 = 3; } /** * Table 7-1 – NAL unit type codes, syntax element categories, and NAL unit type classes * H.264-AVC-ISO_IEC_14496-10-2012.pdf, page 83. */ private class SrsAvcNaluType { // Unspecified public final static int Reserved = 0; // Coded slice of a non-IDR picture slice_layer_without_partitioning_rbsp( ) public final static int NonIDR = 1; // Coded slice data partition A slice_data_partition_a_layer_rbsp( ) public final static int DataPartitionA = 2; // Coded slice data partition B slice_data_partition_b_layer_rbsp( ) public final static int DataPartitionB = 3; // Coded slice data partition C slice_data_partition_c_layer_rbsp( ) public final static int DataPartitionC = 4; // Coded slice of an IDR picture slice_layer_without_partitioning_rbsp( ) public final static int IDR = 5; // Supplemental enhancement information (SEI) sei_rbsp( ) public final static int SEI = 6; // Sequence parameter set seq_parameter_set_rbsp( ) public final static int SPS = 7; // Picture parameter set pic_parameter_set_rbsp( ) public final static int PPS = 8; // Access unit delimiter access_unit_delimiter_rbsp( ) public final static int AccessUnitDelimiter = 9; // End of sequence end_of_seq_rbsp( ) public final static int EOSequence = 10; // End of stream end_of_stream_rbsp( ) public final static int EOStream = 11; // Filler data filler_data_rbsp( ) public final static int FilterData = 12; // Sequence parameter set extension seq_parameter_set_extension_rbsp( ) public final static int SPSExt = 13; // Prefix NAL unit prefix_nal_unit_rbsp( ) public final static int PrefixNALU = 14; // Subset sequence parameter set subset_seq_parameter_set_rbsp( ) public final static int SubsetSPS = 15; // Coded slice of an auxiliary coded picture without partitioning slice_layer_without_partitioning_rbsp( ) public final static int LayerWithoutPartition = 19; // Coded slice extension slice_layer_extension_rbsp( ) public final static int CodedSliceExt = 20; } /** * the search result for annexb. */ private class SrsAnnexbSearch { public int nb_start_code = 0; public boolean match = false; } /** * the demuxed tag frame. */ private class SrsFlvFrameBytes { public ByteBuffer data; public int size; } /** * the muxed flv frame. */ private class SrsFlvFrame { // the tag bytes. public SrsAllocator.Allocation flvTag; // the codec type for audio/aac and video/avc for instance. public int avc_aac_type; // the frame type, keyframe or not. public int frame_type; // the tag type, audio, video or data. public int type; // the dts in ms, tbn is 1000. public int dts; public boolean isKeyFrame() { return isVideo() && frame_type == SrsCodecVideoAVCFrame.KeyFrame; } public boolean isSequenceHeader() { return avc_aac_type == 0; } public boolean isVideo() { return type == SrsCodecFlvTag.Video; } public boolean isAudio() { return type == SrsCodecFlvTag.Audio; } } /** * the raw h.264 stream, in annexb. */ private class SrsRawH264Stream { private final static String TAG = "SrsFlvMuxer"; private SrsAnnexbSearch annexb = new SrsAnnexbSearch(); private SrsFlvFrameBytes seq_hdr = new SrsFlvFrameBytes(); private SrsFlvFrameBytes sps_hdr = new SrsFlvFrameBytes(); private SrsFlvFrameBytes sps_bb = new SrsFlvFrameBytes(); private SrsFlvFrameBytes pps_hdr = new SrsFlvFrameBytes(); private SrsFlvFrameBytes pps_bb = new SrsFlvFrameBytes(); public boolean isSps(SrsFlvFrameBytes frame) { return frame.size >= 1 && (frame.data.get(0) & 0x1f) == SrsAvcNaluType.SPS; } public boolean isPps(SrsFlvFrameBytes frame) { return frame.size >= 1 && (frame.data.get(0) & 0x1f) == SrsAvcNaluType.PPS; } public SrsFlvFrameBytes muxNaluHeader(SrsFlvFrameBytes frame) { SrsFlvFrameBytes nalu_hdr = new SrsFlvFrameBytes(); nalu_hdr.data = ByteBuffer.allocateDirect(4); nalu_hdr.size = 4; // 5.3.4.2.1 Syntax, H.264-AVC-ISO_IEC_14496-15.pdf, page 16 // lengthSizeMinusOne, or NAL_unit_length, always use 4bytes size int NAL_unit_length = frame.size; // mux the avc NALU in "ISO Base Media File Format" // from H.264-AVC-ISO_IEC_14496-15.pdf, page 20 // NALUnitLength nalu_hdr.data.putInt(NAL_unit_length); // reset the buffer. nalu_hdr.data.rewind(); return nalu_hdr; } public void muxSequenceHeader(ByteBuffer sps, ByteBuffer pps, int dts, int pts, ArrayList<SrsFlvFrameBytes> frames) { // 5bytes sps/pps header: // configurationVersion, AVCProfileIndication, profile_compatibility, // AVCLevelIndication, lengthSizeMinusOne // 3bytes size of sps: // numOfSequenceParameterSets, sequenceParameterSetLength(2B) // Nbytes of sps. // sequenceParameterSetNALUnit // 3bytes size of pps: // numOfPictureParameterSets, pictureParameterSetLength // Nbytes of pps: // pictureParameterSetNALUnit // decode the SPS: // @see: 7.3.2.1.1, H.264-AVC-ISO_IEC_14496-10-2012.pdf, page 62 if (seq_hdr.data == null) { seq_hdr.data = ByteBuffer.allocate(5); seq_hdr.size = 5; } seq_hdr.data.rewind(); // @see: Annex A Profiles and levels, H.264-AVC-ISO_IEC_14496-10.pdf, page 205 // Baseline profile profile_idc is 66(0x42). // Main profile profile_idc is 77(0x4d). // Extended profile profile_idc is 88(0x58). byte profile_idc = sps.get(1); //u_int8_t constraint_set = frame[2]; byte level_idc = sps.get(3); // generate the sps/pps header // 5.3.4.2.1 Syntax, H.264-AVC-ISO_IEC_14496-15.pdf, page 16 // configurationVersion seq_hdr.data.put((byte) 0x01); // AVCProfileIndication seq_hdr.data.put(profile_idc); // profile_compatibility seq_hdr.data.put((byte) 0x00); // AVCLevelIndication seq_hdr.data.put(level_idc); // lengthSizeMinusOne, or NAL_unit_length, always use 4bytes size, // so we always set it to 0x03. seq_hdr.data.put((byte) 0x03); // reset the buffer. seq_hdr.data.rewind(); frames.add(seq_hdr); // sps if (sps_hdr.data == null) { sps_hdr.data = ByteBuffer.allocate(3); sps_hdr.size = 3; } sps_hdr.data.rewind(); // 5.3.4.2.1 Syntax, H.264-AVC-ISO_IEC_14496-15.pdf, page 16 // numOfSequenceParameterSets, always 1 sps_hdr.data.put((byte) 0x01); // sequenceParameterSetLength sps_hdr.data.putShort((short) sps.array().length); sps_hdr.data.rewind(); frames.add(sps_hdr); // sequenceParameterSetNALUnit sps_bb.size = sps.array().length; sps_bb.data = sps.duplicate(); frames.add(sps_bb); // pps if (pps_hdr.data == null) { pps_hdr.data = ByteBuffer.allocate(3); pps_hdr.size = 3; } pps_hdr.data.rewind(); // 5.3.4.2.1 Syntax, H.264-AVC-ISO_IEC_14496-15.pdf, page 16 // numOfPictureParameterSets, always 1 pps_hdr.data.put((byte) 0x01); // pictureParameterSetLength pps_hdr.data.putShort((short) pps.array().length); pps_hdr.data.rewind(); frames.add(pps_hdr); // pictureParameterSetNALUnit pps_bb.size = pps.array().length; pps_bb.data = pps.duplicate(); frames.add(pps_bb); } public SrsAllocator.Allocation muxFlvTag(ArrayList<SrsFlvFrameBytes> frames, int frame_type, int avc_packet_type, int dts, int pts) { // for h264 in RTMP video payload, there is 5bytes header: // 1bytes, FrameType | CodecID // 1bytes, AVCPacketType // 3bytes, CompositionTime, the cts. // @see: E.4.3 Video Tags, video_file_format_spec_v10_1.pdf, page 78 int size = 5; for (int i = 0; i < frames.size(); i++) { size += frames.get(i).size; } SrsAllocator.Allocation allocation = mVideoAllocator.allocate(size); // @see: E.4.3 Video Tags, video_file_format_spec_v10_1.pdf, page 78 // Frame Type, Type of video frame. // CodecID, Codec Identifier. // set the rtmp header allocation.put((byte) ((frame_type << 4) | SrsCodecVideo.AVC)); // AVCPacketType allocation.put((byte) avc_packet_type); // CompositionTime // pts = dts + cts, or // cts = pts - dts. // where cts is the header in rtmp video packet payload header. int cts = pts - dts; allocation.put((byte) (cts >> 16)); allocation.put((byte) (cts >> 8)); allocation.put((byte) cts); // h.264 raw data. for (int i = 0; i < frames.size(); i++) { SrsFlvFrameBytes frame = frames.get(i); frame.data.get(allocation.array(), allocation.size(), frame.size); allocation.appendOffset(frame.size); } return allocation; } private SrsAnnexbSearch searchAnnexb(ByteBuffer bb, MediaCodec.BufferInfo bi) { annexb.match = false; annexb.nb_start_code = 0; for (int i = bb.position(); i < bi.size - 3; i++) { // not match. if (bb.get(i) != 0x00 || bb.get(i + 1) != 0x00) { break; } // match N[00] 00 00 01, where N>=0 if (bb.get(i + 2) == 0x01) { annexb.match = true; annexb.nb_start_code = i + 3 - bb.position(); break; } } return annexb; } public SrsFlvFrameBytes demuxAnnexb(ByteBuffer bb, MediaCodec.BufferInfo bi) { SrsFlvFrameBytes tbb = new SrsFlvFrameBytes(); while (bb.position() < bi.size) { // each frame must prefixed by annexb format. // about annexb, @see H.264-AVC-ISO_IEC_14496-10.pdf, page 211. SrsAnnexbSearch tbbsc = searchAnnexb(bb, bi); if (!tbbsc.match || tbbsc.nb_start_code < 3) { Log.e(TAG, "annexb not match."); mHandler.notifyRtmpIllegalArgumentException(new IllegalArgumentException( String.format("annexb not match for %dB, pos=%d", bi.size, bb.position()))); } // the start codes. for (int i = 0; i < tbbsc.nb_start_code; i++) { bb.get(); } // find out the frame size. tbb.data = bb.slice(); int pos = bb.position(); while (bb.position() < bi.size) { SrsAnnexbSearch bsc = searchAnnexb(bb, bi); if (bsc.match) { break; } bb.get(); } tbb.size = bb.position() - pos; break; } return tbb; } } private class SrsRawAacStreamCodec { public byte protection_absent; // SrsAacObjectType public int aac_object; public byte sampling_frequency_index; public byte channel_configuration; public short frame_length; public byte sound_format; public byte sound_rate; public byte sound_size; public byte sound_type; // 0 for sh; 1 for raw data. public byte aac_packet_type; public byte[] frame; } /** * remux the annexb to flv tags. */ private class SrsFlv { private MediaFormat videoTrack; private MediaFormat audioTrack; private int achannel; private int asample_rate; private SrsRawH264Stream avc = new SrsRawH264Stream(); private ArrayList<SrsFlvFrameBytes> ipbs = new ArrayList<>(); private SrsAllocator.Allocation audio_tag; private SrsAllocator.Allocation video_tag; private ByteBuffer h264_sps; private boolean h264_sps_changed; private ByteBuffer h264_pps; private boolean h264_pps_changed; private boolean h264_sps_pps_sent; private boolean aac_specific_config_got; public SrsFlv() { reset(); } public void reset() { h264_sps_changed = false; h264_pps_changed = false; h264_sps_pps_sent = false; aac_specific_config_got = false; } public void setVideoTrack(MediaFormat format) { videoTrack = format; } public void setAudioTrack(MediaFormat format) { audioTrack = format; achannel = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); asample_rate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); } public void writeAudioSample(final ByteBuffer bb, MediaCodec.BufferInfo bi) { int pts = (int) (bi.presentationTimeUs / 1000); int dts = pts; audio_tag = mAudioAllocator.allocate(bi.size + 2); byte aac_packet_type = 1; // 1 = AAC raw if (!aac_specific_config_got) { // @see aac-mp4a-format-ISO_IEC_14496-3+2001.pdf // AudioSpecificConfig (), page 33 // 1.6.2.1 AudioSpecificConfig // audioObjectType; 5 bslbf byte ch = (byte) (bb.get(0) & 0xf8); // 3bits left. // samplingFrequencyIndex; 4 bslbf byte samplingFrequencyIndex = 0x04; if (asample_rate == SrsCodecAudioSampleRate.R22050) { samplingFrequencyIndex = 0x07; } else if (asample_rate == SrsCodecAudioSampleRate.R11025) { samplingFrequencyIndex = 0x0a; } ch |= (samplingFrequencyIndex >> 1) & 0x07; audio_tag.put(ch, 2); ch = (byte) ((samplingFrequencyIndex << 7) & 0x80); // 7bits left. // channelConfiguration; 4 bslbf byte channelConfiguration = 1; if (achannel == 2) { channelConfiguration = 2; } ch |= (channelConfiguration << 3) & 0x78; // 3bits left. // GASpecificConfig(), page 451 // 4.4.1 Decoder configuration (GASpecificConfig) // frameLengthFlag; 1 bslbf // dependsOnCoreCoder; 1 bslbf // extensionFlag; 1 bslbf audio_tag.put(ch, 3); aac_specific_config_got = true; aac_packet_type = 0; // 0 = AAC sequence header writeAdtsHeader(audio_tag.array(), 4); audio_tag.appendOffset(7); } else { bb.get(audio_tag.array(), 2, bi.size); audio_tag.appendOffset(bi.size + 2); } byte sound_format = 10; // AAC byte sound_type = 0; // 0 = Mono sound if (achannel == 2) { sound_type = 1; // 1 = Stereo sound } byte sound_size = 1; // 1 = 16-bit samples byte sound_rate = 3; // 44100, 22050, 11025 if (asample_rate == 22050) { sound_rate = 2; } else if (asample_rate == 11025) { sound_rate = 1; } // for audio frame, there is 1 or 2 bytes header: // 1bytes, SoundFormat|SoundRate|SoundSize|SoundType // 1bytes, AACPacketType for SoundFormat == 10, 0 is sequence header. byte audio_header = (byte) (sound_type & 0x01); audio_header |= (sound_size << 1) & 0x02; audio_header |= (sound_rate << 2) & 0x0c; audio_header |= (sound_format << 4) & 0xf0; audio_tag.put(audio_header, 0); audio_tag.put(aac_packet_type, 1); writeRtmpPacket(SrsCodecFlvTag.Audio, dts, 0, aac_packet_type, audio_tag); } private void writeAdtsHeader(byte[] frame, int offset) { // adts sync word 0xfff (12-bit) frame[offset] = (byte) 0xff; frame[offset + 1] = (byte) 0xf0; // versioin 0 for MPEG-4, 1 for MPEG-2 (1-bit) frame[offset + 1] |= 0 << 3; // layer 0 (2-bit) frame[offset + 1] |= 0 << 1; // protection absent: 1 (1-bit) frame[offset + 1] |= 1; // profile: audio_object_type - 1 (2-bit) frame[offset + 2] = (SrsAacObjectType.AacLC - 1) << 6; // sampling frequency index: 4 (4-bit) frame[offset + 2] |= (4 & 0xf) << 2; // channel configuration (3-bit) frame[offset + 2] |= (2 & (byte) 0x4) >> 2; frame[offset + 3] = (byte) ((2 & (byte) 0x03) << 6); // original: 0 (1-bit) frame[offset + 3] |= 0 << 5; // home: 0 (1-bit) frame[offset + 3] |= 0 << 4; // copyright id bit: 0 (1-bit) frame[offset + 3] |= 0 << 3; // copyright id start: 0 (1-bit) frame[offset + 3] |= 0 << 2; // frame size (13-bit) frame[offset + 3] |= ((frame.length - 2) & 0x1800) >> 11; frame[offset + 4] = (byte) (((frame.length - 2) & 0x7f8) >> 3); frame[offset + 5] = (byte) (((frame.length - 2) & 0x7) << 5); // buffer fullness (0x7ff for variable bitrate) frame[offset + 5] |= (byte) 0x1f; frame[offset + 6] = (byte) 0xfc; // number of data block (nb - 1) frame[offset + 6] |= 0x0; } private void writeVideoSample(final ByteBuffer bb, MediaCodec.BufferInfo bi) { int pts = (int) (bi.presentationTimeUs / 1000); int dts = pts; int type = SrsCodecVideoAVCFrame.InterFrame; // send each frame. while (bb.position() < bi.size) { SrsFlvFrameBytes frame = avc.demuxAnnexb(bb, bi); // 5bits, 7.3.1 NAL unit syntax, // H.264-AVC-ISO_IEC_14496-10.pdf, page 44. // 7: SPS, 8: PPS, 5: I Frame, 1: P Frame int nal_unit_type = (int) (frame.data.get(0) & 0x1f); if (nal_unit_type == SrsAvcNaluType.SPS || nal_unit_type == SrsAvcNaluType.PPS) { Log.i(TAG, String.format("annexb demux %dB, pts=%d, frame=%dB, nalu=%d", bi.size, pts, frame.size, nal_unit_type)); } // for IDR frame, the frame is keyframe. if (nal_unit_type == SrsAvcNaluType.IDR) { type = SrsCodecVideoAVCFrame.KeyFrame; } // ignore the nalu type aud(9) if (nal_unit_type == SrsAvcNaluType.AccessUnitDelimiter) { continue; } // for sps if (avc.isSps(frame)) { if (!frame.data.equals(h264_sps)) { byte[] sps = new byte[frame.size]; frame.data.get(sps); h264_sps_changed = true; h264_sps = ByteBuffer.wrap(sps); } continue; } // for pps if (avc.isPps(frame)) { if (!frame.data.equals(h264_pps)) { byte[] pps = new byte[frame.size]; frame.data.get(pps); h264_pps_changed = true; h264_pps = ByteBuffer.wrap(pps); } continue; } // IPB frame. ipbs.add(avc.muxNaluHeader(frame)); ipbs.add(frame); } writeH264SpsPps(dts, pts); writeH264IpbFrame(ipbs, type, dts, pts); ipbs.clear(); } private void writeH264SpsPps(int dts, int pts) { // when sps or pps changed, update the sequence header, // for the pps maybe not changed while sps changed. // so, we must check when each video ts message frame parsed. if (h264_sps_pps_sent && !h264_sps_changed && !h264_pps_changed) { return; } // when not got sps/pps, wait. if (h264_pps == null || h264_sps == null) { return; } // h264 raw to h264 packet. ArrayList<SrsFlvFrameBytes> frames = new ArrayList<>(); avc.muxSequenceHeader(h264_sps, h264_pps, dts, pts, frames); // h264 packet to flv packet. int frame_type = SrsCodecVideoAVCFrame.KeyFrame; int avc_packet_type = SrsCodecVideoAVCType.SequenceHeader; video_tag = avc.muxFlvTag(frames, frame_type, avc_packet_type, dts, pts); // the timestamp in rtmp message header is dts. writeRtmpPacket(SrsCodecFlvTag.Video, dts, frame_type, avc_packet_type, video_tag); // reset sps and pps. h264_sps_changed = false; h264_pps_changed = false; h264_sps_pps_sent = true; Log.i(TAG, String.format("flv: h264 sps/pps sent, sps=%dB, pps=%dB", h264_sps.array().length, h264_pps.array().length)); } private void writeH264IpbFrame(ArrayList<SrsFlvFrameBytes> frames, int type, int dts, int pts) { // when sps or pps not sent, ignore the packet. // @see https://github.com/simple-rtmp-server/srs/issues/203 if (!h264_sps_pps_sent) { return; } video_tag = avc.muxFlvTag(frames, type, SrsCodecVideoAVCType.NALU, dts, pts); // the timestamp in rtmp message header is dts. writeRtmpPacket(SrsCodecFlvTag.Video, dts, type, SrsCodecVideoAVCType.NALU, video_tag); } private void writeRtmpPacket(int type, int dts, int frame_type, int avc_aac_type, SrsAllocator.Allocation tag) { SrsFlvFrame frame = new SrsFlvFrame(); frame.flvTag = tag; frame.type = type; frame.dts = dts; frame.frame_type = frame_type; frame.avc_aac_type = avc_aac_type; if (frame.isVideo()) { if (needToFindKeyFrame) { if (frame.isKeyFrame()) { needToFindKeyFrame = false; flvTagCacheAdd(frame); } } else { flvTagCacheAdd(frame); } } else if (frame.isAudio()) { flvTagCacheAdd(frame); } } private void flvTagCacheAdd(SrsFlvFrame frame) { mFlvTagCache.add(frame); if (frame.isVideo()) { getVideoFrameCacheNumber().incrementAndGet(); } synchronized (txFrameLock) { txFrameLock.notifyAll(); } } } }
library/src/main/java/net/ossrs/yasea/SrsFlvMuxer.java
package net.ossrs.yasea; import android.media.MediaCodec; import android.media.MediaFormat; import android.util.Log; import com.github.faucamp.simplertmp.DefaultRtmpPublisher; import com.github.faucamp.simplertmp.RtmpHandler; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicInteger; /** * Created by winlin on 5/2/15. * Updated by leoma on 4/1/16. * to POST the h.264/avc annexb frame over RTMP. * * @see android.media.MediaMuxer https://developer.android.com/reference/android/media/MediaMuxer.html */ public class SrsFlvMuxer { private static final int VIDEO_ALLOC_SIZE = 128 * 1024; private static final int AUDIO_ALLOC_SIZE = 4 * 1024; private volatile boolean connected = false; private DefaultRtmpPublisher publisher; private RtmpHandler mHandler; private Thread worker; private final Object txFrameLock = new Object(); private SrsFlv flv = new SrsFlv(); public boolean needToFindKeyFrame = true; private SrsFlvFrame mVideoSequenceHeader; private SrsFlvFrame mAudioSequenceHeader; private SrsAllocator mVideoAllocator = new SrsAllocator(VIDEO_ALLOC_SIZE); private SrsAllocator mAudioAllocator = new SrsAllocator(AUDIO_ALLOC_SIZE); private ConcurrentLinkedQueue<SrsFlvFrame> mFlvTagCache = new ConcurrentLinkedQueue<>(); public static final int VIDEO_TRACK = 100; public static final int AUDIO_TRACK = 101; private static final String TAG = "SrsFlvMuxer"; /** * Start presentation timestamp */ private long startPTS; /** * constructor. * * @param handler the rtmp event handler. */ public SrsFlvMuxer(RtmpHandler handler) { mHandler = handler; publisher = new DefaultRtmpPublisher(handler); } /** * get cached video frame number in publisher */ public AtomicInteger getVideoFrameCacheNumber() { return publisher == null ? null : publisher.getVideoFrameCacheNumber(); } /** * set video resolution for publisher * * @param width width * @param height height */ public void setVideoResolution(int width, int height) { if (publisher != null) { publisher.setVideoResolution(width, height); } } /** * Adds a track with the specified format. * * @param format The media format for the track. * @return The track index for this newly added track. */ public int addTrack(MediaFormat format) { if (format.getString(MediaFormat.KEY_MIME).contentEquals(SrsEncoder.VCODEC)) { flv.setVideoTrack(format); return VIDEO_TRACK; } else { flv.setAudioTrack(format); return AUDIO_TRACK; } } private void disconnect() { try { publisher.close(); } catch (IllegalStateException e) { // Ignore illegal state. } connected = false; mVideoSequenceHeader = null; mAudioSequenceHeader = null; Log.i(TAG, "worker: disconnect ok."); } private boolean connect(String url) { if (!connected) { Log.i(TAG, String.format("worker: connecting to RTMP server by url=%s\n", url)); if (publisher.connect(url)) { connected = publisher.publish("live"); } mVideoSequenceHeader = null; mAudioSequenceHeader = null; } return connected; } private void sendFlvTag(SrsFlvFrame frame) { if (!connected || frame == null) { return; } if (frame.isVideo()) { if (frame.isKeyFrame()) { Log.i(TAG, String.format("worker: send frame type=%d, dts=%d, size=%dB", frame.type, frame.dts, frame.flvTag.array().length)); } publisher.publishVideoData(frame.flvTag.array(), frame.flvTag.size(), frame.dts); mVideoAllocator.release(frame.flvTag); } else if (frame.isAudio()) { publisher.publishAudioData(frame.flvTag.array(), frame.flvTag.size(), frame.dts); mAudioAllocator.release(frame.flvTag); } } /** * start to the remote server for remux. */ public void start(final String rtmpUrl) { startPTS = 0; needToFindKeyFrame = true; worker = new Thread(new Runnable() { @Override public void run() { if (!connect(rtmpUrl)) { return; } try { while (!worker.interrupted()) { while (!mFlvTagCache.isEmpty()) { SrsFlvFrame frame = mFlvTagCache.poll(); if (frame.isSequenceHeader()) { if (frame.isVideo()) { mVideoSequenceHeader = frame; sendFlvTag(mVideoSequenceHeader); } else if (frame.isAudio()) { mAudioSequenceHeader = frame; sendFlvTag(mAudioSequenceHeader); } } else { if (frame.isVideo() && mVideoSequenceHeader != null) { sendFlvTag(frame); } else if (frame.isAudio() && mAudioSequenceHeader != null) { sendFlvTag(frame); } } } // Waiting for next frame synchronized (txFrameLock) { txFrameLock.wait(500); } } } catch (InterruptedException ie) { Log.i(TAG, "Stopped"); } } }); worker.setPriority(Thread.MAX_PRIORITY); worker.setDaemon(true); worker.start(); } /** * stop the muxer, disconnect RTMP connection. */ public void stop() { mFlvTagCache.clear(); if (worker != null) { worker.interrupt(); try { worker.join(5000); } catch (InterruptedException e) { Log.e(TAG, e.getMessage(), e); } } flv.reset(); needToFindKeyFrame = true; disconnect(); Log.i(TAG, "SrsFlvMuxer closed"); } /** * send the annexb frame over RTMP. * * @param trackIndex The track index for this sample. * @param byteBuf The encoded sample. * @param bufferInfo The buffer information related to this sample. */ public void writeSampleData(int trackIndex, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) { if (VIDEO_TRACK == trackIndex) { if (startPTS==0) startPTS = bufferInfo.presentationTimeUs - 100000; bufferInfo.presentationTimeUs -= startPTS; if (bufferInfo.presentationTimeUs<0) return; AtomicInteger videoFrameCacheNumber = getVideoFrameCacheNumber(); if (videoFrameCacheNumber != null && videoFrameCacheNumber.get() < 5 * SrsAvcEncoder.VGOP) { flv.writeVideoSample(byteBuf, bufferInfo); } else { Log.w(TAG, "Network throughput too low"); needToFindKeyFrame = true; } } else { if (startPTS==0) return; bufferInfo.presentationTimeUs -= startPTS; if (bufferInfo.presentationTimeUs<0) return; flv.writeAudioSample(byteBuf, bufferInfo); } } // E.4.3.1 VIDEODATA // Frame Type UB [4] // Type of video frame. The following values are defined: // 1 = key frame (for AVC, a seekable frame) // 2 = inter frame (for AVC, a non-seekable frame) // 3 = disposable inter frame (H.263 only) // 4 = generated key frame (reserved for server use only) // 5 = video info/command frame private class SrsCodecVideoAVCFrame { // set to the zero to reserved, for array map. public final static int Reserved = 0; public final static int Reserved1 = 6; public final static int KeyFrame = 1; public final static int InterFrame = 2; public final static int DisposableInterFrame = 3; public final static int GeneratedKeyFrame = 4; public final static int VideoInfoFrame = 5; } // AVCPacketType IF CodecID == 7 UI8 // The following values are defined: // 0 = AVC sequence header // 1 = AVC NALU // 2 = AVC end of sequence (lower level NALU sequence ender is // not required or supported) private class SrsCodecVideoAVCType { // set to the max value to reserved, for array map. public final static int Reserved = 3; public final static int SequenceHeader = 0; public final static int NALU = 1; public final static int SequenceHeaderEOF = 2; } /** * E.4.1 FLV Tag, page 75 */ private class SrsCodecFlvTag { // set to the zero to reserved, for array map. public final static int Reserved = 0; // 8 = audio public final static int Audio = 8; // 9 = video public final static int Video = 9; // 18 = script data public final static int Script = 18; } ; // E.4.3.1 VIDEODATA // CodecID UB [4] // Codec Identifier. The following values are defined: // 2 = Sorenson H.263 // 3 = Screen video // 4 = On2 VP6 // 5 = On2 VP6 with alpha channel // 6 = Screen video version 2 // 7 = AVC private class SrsCodecVideo { // set to the zero to reserved, for array map. public final static int Reserved = 0; public final static int Reserved1 = 1; public final static int Reserved2 = 9; // for user to disable video, for example, use pure audio hls. public final static int Disabled = 8; public final static int SorensonH263 = 2; public final static int ScreenVideo = 3; public final static int On2VP6 = 4; public final static int On2VP6WithAlphaChannel = 5; public final static int ScreenVideoVersion2 = 6; public final static int AVC = 7; } /** * the aac object type, for RTMP sequence header * for AudioSpecificConfig, @see aac-mp4a-format-ISO_IEC_14496-3+2001.pdf, page 33 * for audioObjectType, @see aac-mp4a-format-ISO_IEC_14496-3+2001.pdf, page 23 */ private class SrsAacObjectType { public final static int Reserved = 0; // Table 1.1 – Audio Object Type definition // @see @see aac-mp4a-format-ISO_IEC_14496-3+2001.pdf, page 23 public final static int AacMain = 1; public final static int AacLC = 2; public final static int AacSSR = 3; // AAC HE = LC+SBR public final static int AacHE = 5; // AAC HEv2 = LC+SBR+PS public final static int AacHEV2 = 29; } /** * the aac profile, for ADTS(HLS/TS) */ private class SrsAacProfile { public final static int Reserved = 3; // @see 7.1 Profiles, aac-iso-13818-7.pdf, page 40 public final static int Main = 0; public final static int LC = 1; public final static int SSR = 2; } /** * the FLV/RTMP supported audio sample rate. * Sampling rate. The following values are defined: * 0 = 5.5 kHz = 5512 Hz * 1 = 11 kHz = 11025 Hz * 2 = 22 kHz = 22050 Hz * 3 = 44 kHz = 44100 Hz */ private class SrsCodecAudioSampleRate { // set to the max value to reserved, for array map. public final static int Reserved = 4; public final static int R5512 = 0; public final static int R11025 = 1; public final static int R22050 = 2; public final static int R44100 = 3; } /** * Table 7-1 – NAL unit type codes, syntax element categories, and NAL unit type classes * H.264-AVC-ISO_IEC_14496-10-2012.pdf, page 83. */ private class SrsAvcNaluType { // Unspecified public final static int Reserved = 0; // Coded slice of a non-IDR picture slice_layer_without_partitioning_rbsp( ) public final static int NonIDR = 1; // Coded slice data partition A slice_data_partition_a_layer_rbsp( ) public final static int DataPartitionA = 2; // Coded slice data partition B slice_data_partition_b_layer_rbsp( ) public final static int DataPartitionB = 3; // Coded slice data partition C slice_data_partition_c_layer_rbsp( ) public final static int DataPartitionC = 4; // Coded slice of an IDR picture slice_layer_without_partitioning_rbsp( ) public final static int IDR = 5; // Supplemental enhancement information (SEI) sei_rbsp( ) public final static int SEI = 6; // Sequence parameter set seq_parameter_set_rbsp( ) public final static int SPS = 7; // Picture parameter set pic_parameter_set_rbsp( ) public final static int PPS = 8; // Access unit delimiter access_unit_delimiter_rbsp( ) public final static int AccessUnitDelimiter = 9; // End of sequence end_of_seq_rbsp( ) public final static int EOSequence = 10; // End of stream end_of_stream_rbsp( ) public final static int EOStream = 11; // Filler data filler_data_rbsp( ) public final static int FilterData = 12; // Sequence parameter set extension seq_parameter_set_extension_rbsp( ) public final static int SPSExt = 13; // Prefix NAL unit prefix_nal_unit_rbsp( ) public final static int PrefixNALU = 14; // Subset sequence parameter set subset_seq_parameter_set_rbsp( ) public final static int SubsetSPS = 15; // Coded slice of an auxiliary coded picture without partitioning slice_layer_without_partitioning_rbsp( ) public final static int LayerWithoutPartition = 19; // Coded slice extension slice_layer_extension_rbsp( ) public final static int CodedSliceExt = 20; } /** * the search result for annexb. */ private class SrsAnnexbSearch { public int nb_start_code = 0; public boolean match = false; } /** * the demuxed tag frame. */ private class SrsFlvFrameBytes { public ByteBuffer data; public int size; } /** * the muxed flv frame. */ private class SrsFlvFrame { // the tag bytes. public SrsAllocator.Allocation flvTag; // the codec type for audio/aac and video/avc for instance. public int avc_aac_type; // the frame type, keyframe or not. public int frame_type; // the tag type, audio, video or data. public int type; // the dts in ms, tbn is 1000. public int dts; public boolean isKeyFrame() { return isVideo() && frame_type == SrsCodecVideoAVCFrame.KeyFrame; } public boolean isSequenceHeader() { return avc_aac_type == 0; } public boolean isVideo() { return type == SrsCodecFlvTag.Video; } public boolean isAudio() { return type == SrsCodecFlvTag.Audio; } } /** * the raw h.264 stream, in annexb. */ private class SrsRawH264Stream { private final static String TAG = "SrsFlvMuxer"; private SrsAnnexbSearch annexb = new SrsAnnexbSearch(); private SrsFlvFrameBytes seq_hdr = new SrsFlvFrameBytes(); private SrsFlvFrameBytes sps_hdr = new SrsFlvFrameBytes(); private SrsFlvFrameBytes sps_bb = new SrsFlvFrameBytes(); private SrsFlvFrameBytes pps_hdr = new SrsFlvFrameBytes(); private SrsFlvFrameBytes pps_bb = new SrsFlvFrameBytes(); public boolean isSps(SrsFlvFrameBytes frame) { return frame.size >= 1 && (frame.data.get(0) & 0x1f) == SrsAvcNaluType.SPS; } public boolean isPps(SrsFlvFrameBytes frame) { return frame.size >= 1 && (frame.data.get(0) & 0x1f) == SrsAvcNaluType.PPS; } public SrsFlvFrameBytes muxNaluHeader(SrsFlvFrameBytes frame) { SrsFlvFrameBytes nalu_hdr = new SrsFlvFrameBytes(); nalu_hdr.data = ByteBuffer.allocateDirect(4); nalu_hdr.size = 4; // 5.3.4.2.1 Syntax, H.264-AVC-ISO_IEC_14496-15.pdf, page 16 // lengthSizeMinusOne, or NAL_unit_length, always use 4bytes size int NAL_unit_length = frame.size; // mux the avc NALU in "ISO Base Media File Format" // from H.264-AVC-ISO_IEC_14496-15.pdf, page 20 // NALUnitLength nalu_hdr.data.putInt(NAL_unit_length); // reset the buffer. nalu_hdr.data.rewind(); return nalu_hdr; } public void muxSequenceHeader(ByteBuffer sps, ByteBuffer pps, int dts, int pts, ArrayList<SrsFlvFrameBytes> frames) { // 5bytes sps/pps header: // configurationVersion, AVCProfileIndication, profile_compatibility, // AVCLevelIndication, lengthSizeMinusOne // 3bytes size of sps: // numOfSequenceParameterSets, sequenceParameterSetLength(2B) // Nbytes of sps. // sequenceParameterSetNALUnit // 3bytes size of pps: // numOfPictureParameterSets, pictureParameterSetLength // Nbytes of pps: // pictureParameterSetNALUnit // decode the SPS: // @see: 7.3.2.1.1, H.264-AVC-ISO_IEC_14496-10-2012.pdf, page 62 if (seq_hdr.data == null) { seq_hdr.data = ByteBuffer.allocate(5); seq_hdr.size = 5; } seq_hdr.data.rewind(); // @see: Annex A Profiles and levels, H.264-AVC-ISO_IEC_14496-10.pdf, page 205 // Baseline profile profile_idc is 66(0x42). // Main profile profile_idc is 77(0x4d). // Extended profile profile_idc is 88(0x58). byte profile_idc = sps.get(1); //u_int8_t constraint_set = frame[2]; byte level_idc = sps.get(3); // generate the sps/pps header // 5.3.4.2.1 Syntax, H.264-AVC-ISO_IEC_14496-15.pdf, page 16 // configurationVersion seq_hdr.data.put((byte) 0x01); // AVCProfileIndication seq_hdr.data.put(profile_idc); // profile_compatibility seq_hdr.data.put((byte) 0x00); // AVCLevelIndication seq_hdr.data.put(level_idc); // lengthSizeMinusOne, or NAL_unit_length, always use 4bytes size, // so we always set it to 0x03. seq_hdr.data.put((byte) 0x03); // reset the buffer. seq_hdr.data.rewind(); frames.add(seq_hdr); // sps if (sps_hdr.data == null) { sps_hdr.data = ByteBuffer.allocate(3); sps_hdr.size = 3; } sps_hdr.data.rewind(); // 5.3.4.2.1 Syntax, H.264-AVC-ISO_IEC_14496-15.pdf, page 16 // numOfSequenceParameterSets, always 1 sps_hdr.data.put((byte) 0x01); // sequenceParameterSetLength sps_hdr.data.putShort((short) sps.array().length); sps_hdr.data.rewind(); frames.add(sps_hdr); // sequenceParameterSetNALUnit sps_bb.size = sps.array().length; sps_bb.data = sps.duplicate(); frames.add(sps_bb); // pps if (pps_hdr.data == null) { pps_hdr.data = ByteBuffer.allocate(3); pps_hdr.size = 3; } pps_hdr.data.rewind(); // 5.3.4.2.1 Syntax, H.264-AVC-ISO_IEC_14496-15.pdf, page 16 // numOfPictureParameterSets, always 1 pps_hdr.data.put((byte) 0x01); // pictureParameterSetLength pps_hdr.data.putShort((short) pps.array().length); pps_hdr.data.rewind(); frames.add(pps_hdr); // pictureParameterSetNALUnit pps_bb.size = pps.array().length; pps_bb.data = pps.duplicate(); frames.add(pps_bb); } public SrsAllocator.Allocation muxFlvTag(ArrayList<SrsFlvFrameBytes> frames, int frame_type, int avc_packet_type, int dts, int pts) { // for h264 in RTMP video payload, there is 5bytes header: // 1bytes, FrameType | CodecID // 1bytes, AVCPacketType // 3bytes, CompositionTime, the cts. // @see: E.4.3 Video Tags, video_file_format_spec_v10_1.pdf, page 78 int size = 5; for (int i = 0; i < frames.size(); i++) { size += frames.get(i).size; } SrsAllocator.Allocation allocation = mVideoAllocator.allocate(size); // @see: E.4.3 Video Tags, video_file_format_spec_v10_1.pdf, page 78 // Frame Type, Type of video frame. // CodecID, Codec Identifier. // set the rtmp header allocation.put((byte) ((frame_type << 4) | SrsCodecVideo.AVC)); // AVCPacketType allocation.put((byte) avc_packet_type); // CompositionTime // pts = dts + cts, or // cts = pts - dts. // where cts is the header in rtmp video packet payload header. int cts = pts - dts; allocation.put((byte) (cts >> 16)); allocation.put((byte) (cts >> 8)); allocation.put((byte) cts); // h.264 raw data. for (int i = 0; i < frames.size(); i++) { SrsFlvFrameBytes frame = frames.get(i); frame.data.get(allocation.array(), allocation.size(), frame.size); allocation.appendOffset(frame.size); } return allocation; } private SrsAnnexbSearch searchAnnexb(ByteBuffer bb, MediaCodec.BufferInfo bi) { annexb.match = false; annexb.nb_start_code = 0; for (int i = bb.position(); i < bi.size - 3; i++) { // not match. if (bb.get(i) != 0x00 || bb.get(i + 1) != 0x00) { break; } // match N[00] 00 00 01, where N>=0 if (bb.get(i + 2) == 0x01) { annexb.match = true; annexb.nb_start_code = i + 3 - bb.position(); break; } } return annexb; } public SrsFlvFrameBytes demuxAnnexb(ByteBuffer bb, MediaCodec.BufferInfo bi) { SrsFlvFrameBytes tbb = new SrsFlvFrameBytes(); while (bb.position() < bi.size) { // each frame must prefixed by annexb format. // about annexb, @see H.264-AVC-ISO_IEC_14496-10.pdf, page 211. SrsAnnexbSearch tbbsc = searchAnnexb(bb, bi); if (!tbbsc.match || tbbsc.nb_start_code < 3) { Log.e(TAG, "annexb not match."); mHandler.notifyRtmpIllegalArgumentException(new IllegalArgumentException( String.format("annexb not match for %dB, pos=%d", bi.size, bb.position()))); } // the start codes. for (int i = 0; i < tbbsc.nb_start_code; i++) { bb.get(); } // find out the frame size. tbb.data = bb.slice(); int pos = bb.position(); while (bb.position() < bi.size) { SrsAnnexbSearch bsc = searchAnnexb(bb, bi); if (bsc.match) { break; } bb.get(); } tbb.size = bb.position() - pos; break; } return tbb; } } private class SrsRawAacStreamCodec { public byte protection_absent; // SrsAacObjectType public int aac_object; public byte sampling_frequency_index; public byte channel_configuration; public short frame_length; public byte sound_format; public byte sound_rate; public byte sound_size; public byte sound_type; // 0 for sh; 1 for raw data. public byte aac_packet_type; public byte[] frame; } /** * remux the annexb to flv tags. */ private class SrsFlv { private MediaFormat videoTrack; private MediaFormat audioTrack; private int achannel; private int asample_rate; private SrsRawH264Stream avc = new SrsRawH264Stream(); private ArrayList<SrsFlvFrameBytes> ipbs = new ArrayList<>(); private SrsAllocator.Allocation audio_tag; private SrsAllocator.Allocation video_tag; private ByteBuffer h264_sps; private boolean h264_sps_changed; private ByteBuffer h264_pps; private boolean h264_pps_changed; private boolean h264_sps_pps_sent; private boolean aac_specific_config_got; public SrsFlv() { reset(); } public void reset() { h264_sps_changed = false; h264_pps_changed = false; h264_sps_pps_sent = false; aac_specific_config_got = false; } public void setVideoTrack(MediaFormat format) { videoTrack = format; } public void setAudioTrack(MediaFormat format) { audioTrack = format; achannel = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); asample_rate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); } public void writeAudioSample(final ByteBuffer bb, MediaCodec.BufferInfo bi) { int pts = (int) (bi.presentationTimeUs / 1000); int dts = pts; audio_tag = mAudioAllocator.allocate(bi.size + 2); byte aac_packet_type = 1; // 1 = AAC raw if (!aac_specific_config_got) { // @see aac-mp4a-format-ISO_IEC_14496-3+2001.pdf // AudioSpecificConfig (), page 33 // 1.6.2.1 AudioSpecificConfig // audioObjectType; 5 bslbf byte ch = (byte) (bb.get(0) & 0xf8); // 3bits left. // samplingFrequencyIndex; 4 bslbf byte samplingFrequencyIndex = 0x04; if (asample_rate == SrsCodecAudioSampleRate.R22050) { samplingFrequencyIndex = 0x07; } else if (asample_rate == SrsCodecAudioSampleRate.R11025) { samplingFrequencyIndex = 0x0a; } ch |= (samplingFrequencyIndex >> 1) & 0x07; audio_tag.put(ch, 2); ch = (byte) ((samplingFrequencyIndex << 7) & 0x80); // 7bits left. // channelConfiguration; 4 bslbf byte channelConfiguration = 1; if (achannel == 2) { channelConfiguration = 2; } ch |= (channelConfiguration << 3) & 0x78; // 3bits left. // GASpecificConfig(), page 451 // 4.4.1 Decoder configuration (GASpecificConfig) // frameLengthFlag; 1 bslbf // dependsOnCoreCoder; 1 bslbf // extensionFlag; 1 bslbf audio_tag.put(ch, 3); aac_specific_config_got = true; aac_packet_type = 0; // 0 = AAC sequence header writeAdtsHeader(audio_tag.array(), 4); audio_tag.appendOffset(7); } else { bb.get(audio_tag.array(), 2, bi.size); audio_tag.appendOffset(bi.size + 2); } byte sound_format = 10; // AAC byte sound_type = 0; // 0 = Mono sound if (achannel == 2) { sound_type = 1; // 1 = Stereo sound } byte sound_size = 1; // 1 = 16-bit samples byte sound_rate = 3; // 44100, 22050, 11025 if (asample_rate == 22050) { sound_rate = 2; } else if (asample_rate == 11025) { sound_rate = 1; } // for audio frame, there is 1 or 2 bytes header: // 1bytes, SoundFormat|SoundRate|SoundSize|SoundType // 1bytes, AACPacketType for SoundFormat == 10, 0 is sequence header. byte audio_header = (byte) (sound_type & 0x01); audio_header |= (sound_size << 1) & 0x02; audio_header |= (sound_rate << 2) & 0x0c; audio_header |= (sound_format << 4) & 0xf0; audio_tag.put(audio_header, 0); audio_tag.put(aac_packet_type, 1); writeRtmpPacket(SrsCodecFlvTag.Audio, dts, 0, aac_packet_type, audio_tag); } private void writeAdtsHeader(byte[] frame, int offset) { // adts sync word 0xfff (12-bit) frame[offset] = (byte) 0xff; frame[offset + 1] = (byte) 0xf0; // versioin 0 for MPEG-4, 1 for MPEG-2 (1-bit) frame[offset + 1] |= 0 << 3; // layer 0 (2-bit) frame[offset + 1] |= 0 << 1; // protection absent: 1 (1-bit) frame[offset + 1] |= 1; // profile: audio_object_type - 1 (2-bit) frame[offset + 2] = (SrsAacObjectType.AacLC - 1) << 6; // sampling frequency index: 4 (4-bit) frame[offset + 2] |= (4 & 0xf) << 2; // channel configuration (3-bit) frame[offset + 2] |= (2 & (byte) 0x4) >> 2; frame[offset + 3] = (byte) ((2 & (byte) 0x03) << 6); // original: 0 (1-bit) frame[offset + 3] |= 0 << 5; // home: 0 (1-bit) frame[offset + 3] |= 0 << 4; // copyright id bit: 0 (1-bit) frame[offset + 3] |= 0 << 3; // copyright id start: 0 (1-bit) frame[offset + 3] |= 0 << 2; // frame size (13-bit) frame[offset + 3] |= ((frame.length - 2) & 0x1800) >> 11; frame[offset + 4] = (byte) (((frame.length - 2) & 0x7f8) >> 3); frame[offset + 5] = (byte) (((frame.length - 2) & 0x7) << 5); // buffer fullness (0x7ff for variable bitrate) frame[offset + 5] |= (byte) 0x1f; frame[offset + 6] = (byte) 0xfc; // number of data block (nb - 1) frame[offset + 6] |= 0x0; } private void writeVideoSample(final ByteBuffer bb, MediaCodec.BufferInfo bi) { int pts = (int) (bi.presentationTimeUs / 1000); int dts = pts; int type = SrsCodecVideoAVCFrame.InterFrame; // send each frame. while (bb.position() < bi.size) { SrsFlvFrameBytes frame = avc.demuxAnnexb(bb, bi); // 5bits, 7.3.1 NAL unit syntax, // H.264-AVC-ISO_IEC_14496-10.pdf, page 44. // 7: SPS, 8: PPS, 5: I Frame, 1: P Frame int nal_unit_type = (int) (frame.data.get(0) & 0x1f); if (nal_unit_type == SrsAvcNaluType.SPS || nal_unit_type == SrsAvcNaluType.PPS) { Log.i(TAG, String.format("annexb demux %dB, pts=%d, frame=%dB, nalu=%d", bi.size, pts, frame.size, nal_unit_type)); } // for IDR frame, the frame is keyframe. if (nal_unit_type == SrsAvcNaluType.IDR) { type = SrsCodecVideoAVCFrame.KeyFrame; } // ignore the nalu type aud(9) if (nal_unit_type == SrsAvcNaluType.AccessUnitDelimiter) { continue; } // for sps if (avc.isSps(frame)) { if (!frame.data.equals(h264_sps)) { byte[] sps = new byte[frame.size]; frame.data.get(sps); h264_sps_changed = true; h264_sps = ByteBuffer.wrap(sps); } continue; } // for pps if (avc.isPps(frame)) { if (!frame.data.equals(h264_pps)) { byte[] pps = new byte[frame.size]; frame.data.get(pps); h264_pps_changed = true; h264_pps = ByteBuffer.wrap(pps); } continue; } // IPB frame. ipbs.add(avc.muxNaluHeader(frame)); ipbs.add(frame); } writeH264SpsPps(dts, pts); writeH264IpbFrame(ipbs, type, dts, pts); ipbs.clear(); } private void writeH264SpsPps(int dts, int pts) { // when sps or pps changed, update the sequence header, // for the pps maybe not changed while sps changed. // so, we must check when each video ts message frame parsed. if (h264_sps_pps_sent && !h264_sps_changed && !h264_pps_changed) { return; } // when not got sps/pps, wait. if (h264_pps == null || h264_sps == null) { return; } // h264 raw to h264 packet. ArrayList<SrsFlvFrameBytes> frames = new ArrayList<>(); avc.muxSequenceHeader(h264_sps, h264_pps, dts, pts, frames); // h264 packet to flv packet. int frame_type = SrsCodecVideoAVCFrame.KeyFrame; int avc_packet_type = SrsCodecVideoAVCType.SequenceHeader; video_tag = avc.muxFlvTag(frames, frame_type, avc_packet_type, dts, pts); // the timestamp in rtmp message header is dts. writeRtmpPacket(SrsCodecFlvTag.Video, dts, frame_type, avc_packet_type, video_tag); // reset sps and pps. h264_sps_changed = false; h264_pps_changed = false; h264_sps_pps_sent = true; Log.i(TAG, String.format("flv: h264 sps/pps sent, sps=%dB, pps=%dB", h264_sps.array().length, h264_pps.array().length)); } private void writeH264IpbFrame(ArrayList<SrsFlvFrameBytes> frames, int type, int dts, int pts) { // when sps or pps not sent, ignore the packet. // @see https://github.com/simple-rtmp-server/srs/issues/203 if (!h264_sps_pps_sent) { return; } video_tag = avc.muxFlvTag(frames, type, SrsCodecVideoAVCType.NALU, dts, pts); // the timestamp in rtmp message header is dts. writeRtmpPacket(SrsCodecFlvTag.Video, dts, type, SrsCodecVideoAVCType.NALU, video_tag); } private void writeRtmpPacket(int type, int dts, int frame_type, int avc_aac_type, SrsAllocator.Allocation tag) { SrsFlvFrame frame = new SrsFlvFrame(); frame.flvTag = tag; frame.type = type; frame.dts = dts; frame.frame_type = frame_type; frame.avc_aac_type = avc_aac_type; if (frame.isVideo()) { if (needToFindKeyFrame) { if (frame.isKeyFrame()) { needToFindKeyFrame = false; flvTagCacheAdd(frame); } } else { flvTagCacheAdd(frame); } } else if (frame.isAudio()) { flvTagCacheAdd(frame); } } private void flvTagCacheAdd(SrsFlvFrame frame) { mFlvTagCache.add(frame); if (frame.isVideo()) { getVideoFrameCacheNumber().incrementAndGet(); } synchronized (txFrameLock) { txFrameLock.notifyAll(); } } } }
LISA not stopping after end time #89 - Removed delay in agent stop - Set interval in start/stopAgent with interrupt for current cycle - Lowered priority of FLVmuxer
library/src/main/java/net/ossrs/yasea/SrsFlvMuxer.java
LISA not stopping after end time #89
Java
mit
2caa8d82daee5b6d6e4acd668095dcc8f5911b18
0
varatep/2014-SS12-Dead-Reckoning
package com.example.deadreckoning; import java.net.Inet4Address; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import Network.Client; import Network.Server; import android.app.Activity; import android.content.IntentFilter; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorManager; import android.location.Location; import android.location.LocationManager; import android.net.wifi.p2p.WifiP2pManager; import android.net.wifi.p2p.WifiP2pManager.Channel; import android.widget.TextView; import android.location.Criteria; import android.location.LocationListener; import android.net.wifi.p2p.WifiP2pConfig; import android.net.wifi.p2p.WifiP2pDevice; import android.net.wifi.p2p.WifiP2pDeviceList; import android.net.wifi.p2p.WifiP2pManager.ActionListener; import android.net.wifi.p2p.WifiP2pManager.PeerListListener; import android.os.Bundle; import android.content.Context; import android.util.Log; import android.view.Menu; import android.view.View; import android.widget.EditText; import android.widget.Toast; public class LocateActivity extends Activity implements SensorEventListener { private SensorManager sensorManager; double ax,ay,az; TextView axText; TextView ayText; TextView azText; //Location location; //TextView latitude; //TextView longitude; //LocationManager locationManager; //private String provider; //boolean threadStarted = true; ///////////////////////////////////////////// //private final IntentFilter intentFilter = new IntentFilter(); //Channel mChannel; //WifiP2pManager mManager; //WiFiDirectBroadcastReceiver receiver; //private PeerListListener myPeerListListener; //private WifiP2pDeviceList deviceList; //private List<WifiP2pDevice> peers = new ArrayList<WifiP2pDevice>(); /*PeerListListener peerListListener = new PeerListListener() { @Override public void onPeersAvailable(WifiP2pDeviceList peerList) { Log.i("ss12", "onPeersAvailable - main"); // Out with the old, in with the new. peers.clear(); peers.addAll(peerList.getDeviceList()); // If an AdapterView is backed by this data, notify it // of the change. For instance, if you have a ListView of available // peers, trigger an update. //((WiFiPeerListAdapter) getListAdapter()).notifyDataSetChanged(); Log.i("ss12", peers.toString()); if (peers.size() == 0) { Log.i("ss12", "No devices found"); return; } else { WifiP2pDevice device = peers.get(0); WifiP2pConfig config = new WifiP2pConfig(); config.deviceAddress = device.deviceAddress; mManager.connect(mChannel, config, new ActionListener() { @Override public void onSuccess() { //success logic String ip = getDottedDecimalIP(getLocalIPAddress()); @SuppressWarnings("unused") Client client = new Client(1247, ip); Log.i("ss12", "Holy Shit we connected to the device via direct wifi"); } @Override public void onFailure(int reason) { Toast.makeText(LocateActivity.this, "Connect failed. Retry.", Toast.LENGTH_SHORT).show(); } }); } } };*/ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_location); axText = (TextView) findViewById(R.id.ax); ayText = (TextView) findViewById(R.id.ay); azText = (TextView) findViewById(R.id.az); sensorManager=(SensorManager) getSystemService(SENSOR_SERVICE); sensorManager.registerListener(this, sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_NORMAL); // Indicates a change in the Wi-Fi P2P status. /*intentFilter.addAction(WifiP2pManager.WIFI_P2P_STATE_CHANGED_ACTION); // Indicates a change in the list of available peers. intentFilter.addAction(WifiP2pManager.WIFI_P2P_PEERS_CHANGED_ACTION); // Indicates the state of Wi-Fi P2P connectivity has changed. intentFilter.addAction(WifiP2pManager.WIFI_P2P_CONNECTION_CHANGED_ACTION); // Indicates this device's details have changed. intentFilter.addAction(WifiP2pManager.WIFI_P2P_THIS_DEVICE_CHANGED_ACTION); mManager = (WifiP2pManager) getSystemService(Context.WIFI_P2P_SERVICE); mChannel = mManager.initialize(this, getMainLooper(), null); receiver = new WiFiDirectBroadcastReceiver(mManager, mChannel, this, peerListListener); //////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////// latitude = (TextView) findViewById(R.id.latitude); longitude = (TextView) findViewById(R.id.longitude); locationManager = (LocationManager) getSystemService(Context.LOCATION_SERVICE); Criteria criteria = new Criteria(); //criteria.setAccuracy(Criteria.ACCURACY_FINE); provider = locationManager.getBestProvider(criteria, false); location = locationManager.getLastKnownLocation(provider); Log.i("ss12", provider); if (location != null) { System.out.println("Provider " + provider + " has been selected."); onLocationChanged(location); } else { latitude.setText("Location not available"); longitude.setText("Location not available"); }*/ } //// not using this method anymore /*@Override public void onLocationChanged(Location location) { double lat = location.getLatitude(); double lng = location.getLongitude(); latitude.setText(String.valueOf(lat)); longitude.setText(String.valueOf(lng)); } @Override public void onStatusChanged(String provider, int status, Bundle extras) { // TODO Auto-generated method stub } @Override public void onProviderEnabled(String provider) { Toast.makeText(this, "Enabled new provider " + provider, Toast.LENGTH_SHORT).show(); } @Override public void onProviderDisabled(String provider) { Toast.makeText(this, "Disabled provider " + provider, Toast.LENGTH_SHORT).show(); }*/ @Override protected void onResume() { super.onResume(); sensorManager.registerListener(this, sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_NORMAL); //receiver = new WiFiDirectBroadcastReceiver(mManager, mChannel, this, peerListListener); //registerReceiver(receiver, intentFilter); //locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 10, 0, this); } @Override protected void onPause() { super.onPause(); //unregisterReceiver(receiver); //locationManager.removeUpdates(this); } @Override public void onAccuracyChanged(Sensor sensor, int accuracy) { // can be safely ignored for this demo } @Override public void onSensorChanged(SensorEvent event) { if (event.sensor.getType()==Sensor.TYPE_ACCELEROMETER){ ax=event.values[0]; ay=event.values[1]; az=event.values[2]; } axText.setText(Double.toString(ax)); ayText.setText(Double.toString(ay)); azText.setText(Double.toString(az)); //Log.i("ss12", "ax: " + ax); //Log.i("ss12", "ay: " + ay); //Log.i("ss12", "az: " + az); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } //public void connect(View view) { /*WifiP2pManager manager = (WifiP2pManager) this.getSystemService(Context.WIFI_P2P_SERVICE); Channel channel = manager.initialize(this, this.getMainLooper(), null); try { Method method1 = manager.getClass().getMethod("enableP2p", Channel.class); method1.invoke(manager, channel); //Toast.makeText(getActivity(), "method found", // Toast.LENGTH_SHORT).show(); } catch (Exception e) { //Toast.makeText(getActivity(), "method did not found", // Toast.LENGTH_SHORT).show(); } mManager.discoverPeers(mChannel, new WifiP2pManager.ActionListener() { @Override public void onSuccess() { Log.i("ss12", "discover peers started"); } @Override public void onFailure(int reasonCode) { Log.i("ss12", "discover peers - failed " + reasonCode); } });*/ //} private byte[] getLocalIPAddress() { try { for (Enumeration<NetworkInterface> en = NetworkInterface.getNetworkInterfaces(); en.hasMoreElements();) { NetworkInterface intf = en.nextElement(); for (Enumeration<InetAddress> enumIpAddr = intf.getInetAddresses(); enumIpAddr.hasMoreElements();) { InetAddress inetAddress = enumIpAddr.nextElement(); if (!inetAddress.isLoopbackAddress()) { if (inetAddress instanceof Inet4Address) { // fix for Galaxy Nexus. IPv4 is easy to use :-) return inetAddress.getAddress(); } //return inetAddress.getHostAddress().toString(); // Galaxy Nexus returns IPv6 } } } } catch (SocketException ex) { //Log.e("AndroidNetworkAddressFactory", "getLocalIPAddress()", ex); } catch (NullPointerException ex) { //Log.e("AndroidNetworkAddressFactory", "getLocalIPAddress()", ex); } return null; } private String getDottedDecimalIP(byte[] ipAddr) { //convert to dotted decimal notation: String ipAddrStr = ""; for (int i=0; i<ipAddr.length; i++) { if (i > 0) { ipAddrStr += "."; } ipAddrStr += ipAddr[i]&0xFF; } return ipAddrStr; } }
src/com/example/deadreckoning/LocateActivity.java
package com.example.deadreckoning; import java.net.Inet4Address; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import Network.Client; import Network.Server; import Network.WiFiDirectBroadcastReceiver; import android.app.Activity; import android.content.IntentFilter; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorListener; import android.hardware.SensorManager; import android.location.Location; import android.location.LocationManager; import android.net.wifi.p2p.WifiP2pManager; import android.net.wifi.p2p.WifiP2pManager.Channel; import android.widget.TextView; import android.location.Criteria; import android.location.LocationListener; import android.net.wifi.p2p.WifiP2pConfig; import android.net.wifi.p2p.WifiP2pDevice; import android.net.wifi.p2p.WifiP2pDeviceList; import android.net.wifi.p2p.WifiP2pManager.ActionListener; import android.net.wifi.p2p.WifiP2pManager.PeerListListener; import android.os.Bundle; import android.content.Context; import android.util.Log; import android.view.Menu; import android.view.View; import android.widget.EditText; import android.widget.Toast; public class LocateActivity extends Activity implements SensorEventListener { private SensorManager sensorManager; double ax,ay,az; TextView axText; TextView ayText; TextView azText; //Location location; //TextView latitude; //TextView longitude; //LocationManager locationManager; //private String provider; //boolean threadStarted = true; ///////////////////////////////////////////// //private final IntentFilter intentFilter = new IntentFilter(); //Channel mChannel; //WifiP2pManager mManager; //WiFiDirectBroadcastReceiver receiver; //private PeerListListener myPeerListListener; //private WifiP2pDeviceList deviceList; //private List<WifiP2pDevice> peers = new ArrayList<WifiP2pDevice>(); /*PeerListListener peerListListener = new PeerListListener() { @Override public void onPeersAvailable(WifiP2pDeviceList peerList) { Log.i("ss12", "onPeersAvailable - main"); // Out with the old, in with the new. peers.clear(); peers.addAll(peerList.getDeviceList()); // If an AdapterView is backed by this data, notify it // of the change. For instance, if you have a ListView of available // peers, trigger an update. //((WiFiPeerListAdapter) getListAdapter()).notifyDataSetChanged(); Log.i("ss12", peers.toString()); if (peers.size() == 0) { Log.i("ss12", "No devices found"); return; } else { WifiP2pDevice device = peers.get(0); WifiP2pConfig config = new WifiP2pConfig(); config.deviceAddress = device.deviceAddress; mManager.connect(mChannel, config, new ActionListener() { @Override public void onSuccess() { //success logic String ip = getDottedDecimalIP(getLocalIPAddress()); @SuppressWarnings("unused") Client client = new Client(1247, ip); Log.i("ss12", "Holy Shit we connected to the device via direct wifi"); } @Override public void onFailure(int reason) { Toast.makeText(LocateActivity.this, "Connect failed. Retry.", Toast.LENGTH_SHORT).show(); } }); } } };*/ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_location); axText = (TextView) findViewById(R.id.ax); ayText = (TextView) findViewById(R.id.ay); azText = (TextView) findViewById(R.id.az); sensorManager=(SensorManager) getSystemService(SENSOR_SERVICE); sensorManager.registerListener(this, sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_NORMAL); // Indicates a change in the Wi-Fi P2P status. /*intentFilter.addAction(WifiP2pManager.WIFI_P2P_STATE_CHANGED_ACTION); // Indicates a change in the list of available peers. intentFilter.addAction(WifiP2pManager.WIFI_P2P_PEERS_CHANGED_ACTION); // Indicates the state of Wi-Fi P2P connectivity has changed. intentFilter.addAction(WifiP2pManager.WIFI_P2P_CONNECTION_CHANGED_ACTION); // Indicates this device's details have changed. intentFilter.addAction(WifiP2pManager.WIFI_P2P_THIS_DEVICE_CHANGED_ACTION); mManager = (WifiP2pManager) getSystemService(Context.WIFI_P2P_SERVICE); mChannel = mManager.initialize(this, getMainLooper(), null); receiver = new WiFiDirectBroadcastReceiver(mManager, mChannel, this, peerListListener); //////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////// latitude = (TextView) findViewById(R.id.latitude); longitude = (TextView) findViewById(R.id.longitude); locationManager = (LocationManager) getSystemService(Context.LOCATION_SERVICE); Criteria criteria = new Criteria(); //criteria.setAccuracy(Criteria.ACCURACY_FINE); provider = locationManager.getBestProvider(criteria, false); location = locationManager.getLastKnownLocation(provider); Log.i("ss12", provider); if (location != null) { System.out.println("Provider " + provider + " has been selected."); onLocationChanged(location); } else { latitude.setText("Location not available"); longitude.setText("Location not available"); }*/ } //// not using this method anymore /*@Override public void onLocationChanged(Location location) { double lat = location.getLatitude(); double lng = location.getLongitude(); latitude.setText(String.valueOf(lat)); longitude.setText(String.valueOf(lng)); } @Override public void onStatusChanged(String provider, int status, Bundle extras) { // TODO Auto-generated method stub } @Override public void onProviderEnabled(String provider) { Toast.makeText(this, "Enabled new provider " + provider, Toast.LENGTH_SHORT).show(); } @Override public void onProviderDisabled(String provider) { Toast.makeText(this, "Disabled provider " + provider, Toast.LENGTH_SHORT).show(); }*/ @Override protected void onResume() { super.onResume(); sensorManager.registerListener(this, sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_NORMAL); //receiver = new WiFiDirectBroadcastReceiver(mManager, mChannel, this, peerListListener); //registerReceiver(receiver, intentFilter); //locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 10, 0, this); } @Override protected void onPause() { super.onPause(); //unregisterReceiver(receiver); //locationManager.removeUpdates(this); } @Override public void onAccuracyChanged(Sensor sensor, int accuracy) { // can be safely ignored for this demo } @Override public void onSensorChanged(SensorEvent event) { if (event.sensor.getType()==Sensor.TYPE_ACCELEROMETER){ ax=event.values[0]; ay=event.values[1]; az=event.values[2]; } axText.setText(Double.toString(ax)); ayText.setText(Double.toString(ay)); azText.setText(Double.toString(az)); //Log.i("ss12", "ax: " + ax); //Log.i("ss12", "ay: " + ay); //Log.i("ss12", "az: " + az); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } //public void connect(View view) { /*WifiP2pManager manager = (WifiP2pManager) this.getSystemService(Context.WIFI_P2P_SERVICE); Channel channel = manager.initialize(this, this.getMainLooper(), null); try { Method method1 = manager.getClass().getMethod("enableP2p", Channel.class); method1.invoke(manager, channel); //Toast.makeText(getActivity(), "method found", // Toast.LENGTH_SHORT).show(); } catch (Exception e) { //Toast.makeText(getActivity(), "method did not found", // Toast.LENGTH_SHORT).show(); } mManager.discoverPeers(mChannel, new WifiP2pManager.ActionListener() { @Override public void onSuccess() { Log.i("ss12", "discover peers started"); } @Override public void onFailure(int reasonCode) { Log.i("ss12", "discover peers - failed " + reasonCode); } });*/ //} private byte[] getLocalIPAddress() { try { for (Enumeration<NetworkInterface> en = NetworkInterface.getNetworkInterfaces(); en.hasMoreElements();) { NetworkInterface intf = en.nextElement(); for (Enumeration<InetAddress> enumIpAddr = intf.getInetAddresses(); enumIpAddr.hasMoreElements();) { InetAddress inetAddress = enumIpAddr.nextElement(); if (!inetAddress.isLoopbackAddress()) { if (inetAddress instanceof Inet4Address) { // fix for Galaxy Nexus. IPv4 is easy to use :-) return inetAddress.getAddress(); } //return inetAddress.getHostAddress().toString(); // Galaxy Nexus returns IPv6 } } } } catch (SocketException ex) { //Log.e("AndroidNetworkAddressFactory", "getLocalIPAddress()", ex); } catch (NullPointerException ex) { //Log.e("AndroidNetworkAddressFactory", "getLocalIPAddress()", ex); } return null; } private String getDottedDecimalIP(byte[] ipAddr) { //convert to dotted decimal notation: String ipAddrStr = ""; for (int i=0; i<ipAddr.length; i++) { if (i > 0) { ipAddrStr += "."; } ipAddrStr += ipAddr[i]&0xFF; } return ipAddrStr; } }
Update
src/com/example/deadreckoning/LocateActivity.java
Update
Java
mit
0d45970d2517f814aa3a3630953a6ce5017529bb
0
gsdlab/chocosolver,gsdlab/chocosolver
package org.clafer.ast.compiler; import gnu.trove.list.array.TIntArrayList; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.clafer.ast.AstAbstractClafer; import org.clafer.ast.AstArithm; import org.clafer.ast.AstBoolArithm; import org.clafer.ast.AstBoolExpr; import org.clafer.ast.AstCard; import org.clafer.ast.AstClafer; import org.clafer.ast.AstCompare; import org.clafer.ast.AstConcat; import org.clafer.ast.AstConcreteClafer; import org.clafer.ast.AstConstant; import org.clafer.ast.AstConstraint; import org.clafer.ast.AstDecl; import org.clafer.ast.AstDifference; import org.clafer.ast.AstDowncast; import org.clafer.ast.AstException; import org.clafer.ast.AstExpr; import org.clafer.ast.AstExprVisitor; import org.clafer.ast.AstGlobal; import org.clafer.ast.AstIfThenElse; import org.clafer.ast.AstIntClafer; import org.clafer.ast.AstIntersection; import org.clafer.ast.AstJoin; import org.clafer.ast.AstJoinParent; import org.clafer.ast.AstJoinRef; import org.clafer.ast.AstLength; import org.clafer.ast.AstLocal; import org.clafer.ast.AstMembership; import org.clafer.ast.AstMinus; import org.clafer.ast.AstModel; import org.clafer.ast.AstNot; import org.clafer.ast.AstPrefix; import org.clafer.ast.AstQuantify; import org.clafer.ast.AstQuantify.Quantifier; import org.clafer.ast.AstRef; import org.clafer.ast.AstSetExpr; import org.clafer.ast.AstSetTest; import org.clafer.ast.AstStringClafer; import org.clafer.ast.AstStringConstant; import org.clafer.ast.AstSuffix; import org.clafer.ast.AstSum; import org.clafer.ast.AstTernary; import org.clafer.ast.AstThis; import org.clafer.ast.AstUnion; import org.clafer.ast.AstUpcast; import org.clafer.ast.AstUtil; import org.clafer.ast.Card; import org.clafer.ast.JoinSetWithStringException; import org.clafer.ast.analysis.AbstractOffsetAnalyzer; import org.clafer.ast.analysis.Analysis; import org.clafer.ast.analysis.Analyzer; import org.clafer.ast.analysis.CardAnalyzer; import org.clafer.ast.analysis.Format; import org.clafer.ast.analysis.FormatAnalyzer; import org.clafer.ast.analysis.GlobalCardAnalyzer; import org.clafer.ast.analysis.OptimizerAnalyzer; import org.clafer.ast.analysis.PartialIntAnalyzer; import org.clafer.ast.analysis.PartialSolution; import org.clafer.ast.analysis.PartialSolutionAnalyzer; import org.clafer.ast.analysis.ScopeAnalyzer; import org.clafer.ast.analysis.SymmetryAnalyzer; import org.clafer.ast.analysis.Type; import org.clafer.ast.analysis.TypeAnalyzer; import org.clafer.collection.DisjointSets; import org.clafer.collection.Either; import org.clafer.collection.Pair; import org.clafer.collection.Triple; import org.clafer.common.Check; import org.clafer.common.Util; import org.clafer.graph.GraphUtil; import org.clafer.graph.KeyGraph; import org.clafer.graph.Vertex; import org.clafer.ir.IrBoolExpr; import org.clafer.ir.IrBoolVar; import org.clafer.ir.IrDomain; import org.clafer.ir.IrExpr; import org.clafer.ir.IrIntExpr; import org.clafer.ir.IrIntVar; import org.clafer.ir.IrModule; import org.clafer.ir.IrSetExpr; import org.clafer.ir.IrSetVar; import org.clafer.ir.IrStringExpr; import org.clafer.ir.IrStringVar; import org.clafer.ir.IrUtil; import org.clafer.ir.IrVar; import static org.clafer.ir.Irs.*; import org.clafer.objective.Objective; import org.clafer.scope.Scope; /** * Compile from AST to IR. * * @author jimmy */ public class AstCompiler { public static final Analyzer[] DefaultAnalyzers = new Analyzer[]{ new TypeAnalyzer(), new GlobalCardAnalyzer(), new ScopeAnalyzer(), new CardAnalyzer(), new FormatAnalyzer(), new AbstractOffsetAnalyzer(), new OptimizerAnalyzer(), new PartialSolutionAnalyzer(), new PartialIntAnalyzer(), new SymmetryAnalyzer(), // Reanalyze types new TypeAnalyzer() }; private final Analysis analysis; private final IrModule module; private final List<Symmetry> symmetries = new ArrayList<>(); private final boolean fullSymmetryBreaking; private AstCompiler(AstModel model, Scope scope, IrModule module, Analyzer[] analyzers, boolean fullSymmetryBreaking) { this(model, scope, new Objective[0], module, analyzers, fullSymmetryBreaking); } private AstCompiler(AstModel model, Scope scope, Objective[] objectives, IrModule module, Analyzer[] analyzers, boolean fullSymmetryBreaking) { this.analysis = Analysis.analyze(model, scope, objectives, analyzers); this.module = Check.notNull(module); this.fullSymmetryBreaking = fullSymmetryBreaking; } public static AstSolutionMap compile(AstModel in, Scope scope, IrModule out, boolean fullSymmetryBreaking) { return compile(in, scope, out, DefaultAnalyzers, fullSymmetryBreaking); } public static AstSolutionMap compile(AstModel in, Scope scope, IrModule out, Analyzer[] analyzers, boolean fullSymmetryBreaking) { AstCompiler compiler = new AstCompiler(in, scope, out, analyzers, fullSymmetryBreaking); return compiler.compile(); } public static AstSolutionMap compile(AstModel in, Scope scope, Objective[] objectives, IrModule out, boolean fullSymmetryBreaking) { return compile(in, scope, objectives, out, DefaultAnalyzers, fullSymmetryBreaking); } public static AstSolutionMap compile(AstModel in, Scope scope, Objective[] objectives, IrModule out, Analyzer[] analyzers, boolean fullSymmetryBreaking) { AstCompiler compiler = new AstCompiler(in, scope, objectives, out, analyzers, fullSymmetryBreaking); return compiler.compile(); } /** * @return the order to initialize regular variables */ private List<AstClafer> initOrder() { List<AstAbstractClafer> abstractClafers = analysis.getAbstractClafers(); List<AstConcreteClafer> concreteClafers = analysis.getConcreteClafers(); KeyGraph<AstClafer> dependency = new KeyGraph<>(); for (AstAbstractClafer abstractClafer : abstractClafers) { Vertex<AstClafer> node = dependency.getVertex(abstractClafer); for (AstClafer sub : abstractClafer.getSubs()) { node.addNeighbour(dependency.getVertex(sub)); } } for (AstConcreteClafer concreteClafer : concreteClafers) { Vertex<AstClafer> node = dependency.getVertex(concreteClafer); if (Format.ParentGroup.equals(getFormat(concreteClafer))) { /* * Low group does not create the dependency because it does not * require the parent to initialize first. This allows for * models like the one below. * * abstract Path * p : Path ? * * If the "?" were a fixed cardinality instead, then an * exception will occur, but the model would not be satisfiable * anyways for any fixed cardinality greater than zero. */ node.addNeighbour(dependency.getVertex(concreteClafer.getParent())); } } List<Set<AstClafer>> components = GraphUtil.computeStronglyConnectedComponents(dependency); List<AstClafer> clafers = new ArrayList<>(); for (Set<AstClafer> component : components) { if (component.size() != 1) { // See the above comment about low groups. throw new AstException("Cannot satisfy the cycle " + component); } clafers.addAll(component); } return clafers; } private AstSolutionMap compile() { IrSetVar rootSet = constant(new int[]{0}); sets.put(analysis.getModel(), rootSet); siblingSets.put(analysis.getModel(), new IrSetVar[]{rootSet}); memberships.put(analysis.getModel(), new IrBoolExpr[]{True}); List<AstClafer> clafers = initOrder(); for (AstClafer clafer : clafers) { if (clafer instanceof AstConcreteClafer && !AstUtil.isRoot((AstConcreteClafer) clafer)) { initConcrete((AstConcreteClafer) clafer); } else if (clafer instanceof AstAbstractClafer) { initAbstract((AstAbstractClafer) clafer); } } for (AstClafer clafer : clafers) { if (clafer instanceof AstConcreteClafer && !AstUtil.isRoot((AstConcreteClafer) clafer)) { constrainConcrete((AstConcreteClafer) clafer); } else if (clafer instanceof AstAbstractClafer) { constrainAbstract((AstAbstractClafer) clafer); } constrainGroupCardinality(clafer); } Map<AstConstraint, IrBoolVar> softVars = new HashMap<>(); for (AstConstraint constraint : analysis.getConstraints()) { AstClafer clafer = constraint.getContext(); int scope = getScope(clafer); if (analysis.isHard(constraint)) { for (int j = 0; j < scope; j++) { ExpressionCompiler expressionCompiler = new ExpressionCompiler(j); IrBoolExpr thisConstraint = expressionCompiler.compile(analysis.getExpr(constraint)); module.addConstraint(implies(memberships.get(clafer)[j], thisConstraint)); } } else { IrBoolVar softVar = bool(constraint.toString()); softVars.put(constraint, softVar); for (int j = 0; j < scope; j++) { ExpressionCompiler expressionCompiler = new ExpressionCompiler(j); IrBoolExpr thisConstraint = expressionCompiler.compile(analysis.getExpr(constraint)); module.addConstraint(ifOnlyIf(softVar, implies(memberships.get(clafer)[j], thisConstraint))); } module.addVariable(softVar); } } IrIntExpr softSum = add(softVars.values()); IrIntVar sumSoftVars = domainInt("SumSoftVar", softSum.getDomain()); module.addConstraint(equal(sumSoftVars, softSum)); for (IrSetVar[] childSet : siblingSets.values()) { module.addVariables(childSet); } for (IrIntVar[] refs : refPointers.values()) { module.addVariables(refs); } for (IrStringVar[] refs : refStrings.values()) { module.addVariables(refs); } for (Set<AstClafer> component : analysis.getClafersInParentAndSubOrder()) { if (component.size() > 1) { /* * Add additional constraints for to handle cases where a * descendent inherits an ancestor. * * Let A be an abstract Clafer and B be a descendant of A that * inherits A. Let F be a mapping every B to its ancestor A. * Enforce that F is an acyclic function. */ List<AstClafer> types = new ArrayList<>(); for (AstClafer clafer : component) { types.add(clafer); } AstAbstractClafer unionType = (AstAbstractClafer) AstUtil.getLowestCommonSupertype(types); IrIntExpr[] edges = new IrIntExpr[getScope(unionType)]; IrIntExpr uninitialized = constant(edges.length); Arrays.fill(edges, uninitialized); for (AstClafer clafer : component) { if (clafer instanceof AstConcreteClafer) { AstConcreteClafer concreteChild = (AstConcreteClafer) clafer; IrBoolExpr[] members = memberships.get(concreteChild); IrIntExpr[] parents = parentPointers.get(concreteChild); int offset = getOffset(unionType, concreteChild); int parentOffset = getOffset(unionType, concreteChild.getParent()); for (int i = 0; i < members.length; i++) { assert edges[i + offset] == uninitialized; IrIntExpr value = ternary(members[i], // Add the offset to upcast the parent pointer add(parents[i], parentOffset), uninitialized); IrIntVar edge = domainInt( "Edge@" + concreteChild + "->" + concreteChild.getParent() + "#" + i, value.getDomain()); module.addConstraint(equal(edge, value)); edges[i + offset] = edge; } } } for (AstClafer clafer : component) { if (clafer instanceof AstConcreteClafer) { IrBoolExpr[] members = memberships.get(clafer); int offset = getOffset(unionType, clafer); for (int i = 0; i < members.length; i++) { for (int j = i + 1; j < members.length; j++) { /* * Symmetry breaking. The lower indexed element * appears on top of the higher indexed element. */ module.addConstraint(unreachable(edges, i + offset, j + offset)); } } } } module.addConstraint(acyclic(edges)); } } ExpressionCompiler expressionCompiler = new ExpressionCompiler(0); Map<Objective, IrIntVar> objectiveVars = new HashMap<>(); Map<Objective, AstSetExpr> objectives = analysis.getObjectiveExprs(); for (Entry<Objective, AstSetExpr> objective : objectives.entrySet()) { IrIntExpr objectiveExpr = expressionCompiler.asInt( expressionCompiler.compile(objective.getValue())); IrIntVar objectiveVar = domainInt("Objective" + objective.getKey(), objectiveExpr.getDomain()); module.addConstraint(equal(objectiveVar, objectiveExpr)); objectiveVars.put(objective.getKey(), objectiveVar); } KeyGraph<Either<IrExpr, IrBoolExpr>> dependencies = new KeyGraph<>(); for (Symmetry symmetry : symmetries) { IrBoolExpr constraint = symmetry.getConstraint(); Vertex<Either<IrExpr, IrBoolExpr>> constraintNode = dependencies.getVertex(Either.<IrExpr, IrBoolExpr>right(constraint)); for (IrExpr output : symmetry.getOutput()) { dependencies.getVertex(Either.<IrExpr, IrBoolExpr>left(output)) .addNeighbour(constraintNode); } for (IrExpr input : symmetry.getInput()) { constraintNode.addNeighbour( dependencies.getVertex(Either.<IrExpr, IrBoolExpr>left(input))); } } Set<IrVar> variables = module.getVariables(); Set<Vertex<Either<IrExpr, IrBoolExpr>>> start = new HashSet<>(); for (IrVar variable : variables) { Vertex<Either<IrExpr, IrBoolExpr>> vertex = dependencies.getVertexIfPresent(Either.<IrExpr, IrBoolExpr>left(variable)); if (vertex != null) { start.add(vertex); } } Set<Either<IrExpr, IrBoolExpr>> reachables = GraphUtil.reachable(start, dependencies); for (Either<IrExpr, IrBoolExpr> reachable : reachables) { if (reachable.isRight()) { module.addConstraint(reachable.getRight()); } } return new AstSolutionMap(analysis.getModel(), siblingSets, refPointers, refStrings, softVars, sumSoftVars, objectiveVars, analysis); } private void initConcrete(AstConcreteClafer clafer) { parentPointers.put(clafer, buildParentPointers(clafer)); buildRef(clafer); switch (getFormat(clafer)) { case LowGroup: initLowGroupConcrete(clafer); break; case ParentGroup: initParentGroupConcrete(clafer); break; default: throw new AstException(); } IrSetVar[] siblingSet = siblingSets.get(clafer); switch (siblingSet.length) { case 0: sets.put(clafer, EmptySet); break; case 1: sets.put(clafer, siblingSet[0]); break; default: IrSetExpr union = union(siblingSet, true); IrSetVar set = set(clafer.getName(), union.getEnv(), union.getKer(), union.getCard()); module.addConstraint(equal(set, union)); sets.put(clafer, set); break; } if (fullSymmetryBreaking) { int scope = getScope(clafer); int parentScope = getScope(clafer.getParent()); IrIntExpr[][] index; AstRef ref = AstUtil.getInheritedRef(clafer); // If the Clafer either needs children or reference to be introduce symmetry. if (analysis.hasInteritedBreakableChildren(clafer) || (ref != null && analysis.isBreakableRef(ref)) || analysis.isInheritedBreakableTarget(clafer)) { index = new IrIntExpr[parentScope][getCard(clafer).getHigh()]; for (int i = 0; i < index.length; i++) { for (int j = 0; j < index[i].length; j++) { index[i][j] = boundInt(clafer.getName() + "@Index#" + i + "#" + j, -1, scope); } } } else { // Optimize for nonsymmetric nodes. Don't compute the smallest indices, // just use the cardinalities. IrSetVar[] childSet = siblingSets.get(clafer); index = new IrIntExpr[childSet.length][]; for (int i = 0; i < index.length; i++) { index[i] = new IrIntExpr[]{card(childSet[i])}; } } indices.put(clafer, index); } } private void constrainConcrete(AstConcreteClafer clafer) { IrSetExpr[] siblingSet = siblingSets.get(clafer); IrIntExpr[] parents = parentPointers.get(clafer); if (!getPartialSolution(clafer).parentSolutionKnown()) { if (getGlobalCard(clafer).isExact()) { // No unused module.addConstraint(intChannel(parents, siblingSet)); } else { IrSetVar unused = set(clafer.getName() + "@Unused", getPartialSolution(clafer).getUnknownClafers()); module.addConstraint(intChannel(parents, Util.snoc(siblingSet, unused))); } } Pair<AstRef, Integer> refPair = analysis.getInheritedRefId(clafer); AstRef ref = refPair == null ? null : refPair.getFst(); int refOffset = refPair == null ? 0 : refPair.getSnd().intValue(); int scope = getScope(clafer); IrBoolExpr[] members = memberships.get(clafer); // Two ids a and b are in the same partition if symmetry breaking guarantees // that ref[a] and ref[b] ard different. DisjointSets<Integer> refPartitions = null; // If the Clafer either needs children or reference to be introduce symmetry. if (fullSymmetryBreaking && scope > 1 && (analysis.hasInteritedBreakableChildren(clafer) || (ref != null && analysis.isBreakableRef(ref)) || analysis.isInheritedBreakableTarget(clafer))) { IrIntExpr[] weight = new IrIntExpr[scope]; IrIntExpr[][] index = indices.get(clafer); analysis.getHierarcyIds(clafer, refOffset); IrIntExpr[][] childIndices = new IrIntExpr[weight.length][]; List<Pair<AstClafer, Integer>> offsets = analysis.getHierarcyOffsets(clafer); Collections.reverse(offsets); boolean[] breakableRefIds = new boolean[childIndices.length]; for (int i = 0; i < childIndices.length; i++) { List<IrIntExpr> childIndex = new ArrayList<>(); for (Pair<AstClafer, Integer> offset : offsets) { for (AstConcreteClafer child : analysis.getBreakableChildren(offset.getFst())) { childIndex.addAll(Arrays.asList(indices.get(child)[i + offset.getSnd()])); } } if (ref != null && analysis.isBreakableRef(ref)) { breakableRefIds[i] = analysis.isBreakableRefId(ref, i + refOffset); if (ref.getTargetType() instanceof AstStringClafer) { childIndex.addAll(Arrays.asList(IrUtil.pad( refStrings.get(ref)[i + refOffset].getCharVars(), analysis.getScope().getStringLength()))); if (ref.isUnique()) { childIndex.add(members[i]); } } else { // References need a positive weight, so to use their value as // a weight, need to offset it so that it always positive. childIndex.add( breakableRefIds[i] // The id of the target is the weight. ? minus(refPointers.get(ref)[i + refOffset]) // If analysis says that this id does not need breaking // then give it a constant weight. Any constant is fine. : Zero); } } if (analysis.isInheritedBreakableTarget(clafer)) { for (Pair<AstClafer, Integer> hierarchy : analysis.getHierarcyIds(clafer, i)) { for (AstRef sourceRef : analysis.getBreakableTarget(hierarchy.getFst())) { IrIntVar[] sourceRefs = refPointers.get(sourceRef); IrIntExpr[] array = new IrIntExpr[sourceRefs.length]; System.arraycopy(sourceRefs, 0, array, 0, array.length); IrIntExpr count = count(hierarchy.getSnd().intValue(), array); IrIntVar countVar = domainInt("CountVar" + countCount++, count.getDomain()); module.addConstraint(equal(countVar, count)); childIndex.add(countVar); } } } childIndices[i] = childIndex.toArray(new IrIntExpr[childIndex.size()]); } for (int i = 0; i < weight.length; i++) { weight[i] = childIndices[i].length == 0 ? Zero : boundInt(clafer.getName() + "#" + i + "@Weight", 0, scope - 1); } if (getScope(clafer.getParent()) > 1) { symmetries.add(new LexChainChannel(childIndices, weight)); for (int i = 0; i < siblingSet.length; i++) { symmetries.add(new FilterString(siblingSet[i], weight, index[i])); } } if (getCard(clafer).getHigh() > 1) { for (int i = 0; i < parents.length - 1; i++) { if (ref != null && analysis.isBreakableRef(ref) && ref.isUnique()) { assert childIndices[i + 1].length == childIndices[i].length; if (breakableRefIds[i]) { if (refPartitions == null) { refPartitions = new DisjointSets<>(); } refPartitions.union(i, i + 1); } // Refs are unique and part of the weight. It is impossible for // two weights to be the same. Enforce a strict order. module.addConstraint(implies(and(members[i], equal(parents[i], parents[i + 1])), sortStrict(childIndices[i + 1], childIndices[i]))); } else { module.addConstraint(implies(equal(parents[i], parents[i + 1]), sort(childIndices[i + 1], childIndices[i]))); } } } } if (ref != null) { AstClafer tar = ref.getTargetType(); if (tar instanceof AstStringClafer) { IrStringVar[] strings = Arrays.copyOfRange(refStrings.get(ref), refOffset, refOffset + getScope(clafer)); if (ref.isUnique()) { if (getCard(clafer).getHigh() > 1) { for (int i = 0; i < strings.length - 1; i++) { for (int j = i + 1; j < strings.length; j++) { if (refPartitions == null || !refPartitions.connected(i, j)) { module.addConstraint( implies(and(members[i], members[j], equal(parents[i], parents[j])), notEqual(strings[i], strings[j]))); } } } } } assert strings.length == members.length; for (int i = 0; i < members.length; i++) { module.addConstraint(implies(not(members[i]), equal(strings[i], EmptyString))); } } else { IrIntVar[] refs = Arrays.copyOfRange(refPointers.get(ref), refOffset, refOffset + getScope(clafer)); if (ref.isUnique()) { if (getCard(clafer).getHigh() > 1) { for (int i = 0; i < refs.length - 1; i++) { for (int j = i + 1; j < refs.length; j++) { if (refPartitions == null || !refPartitions.connected(i, j)) { module.addConstraint( implies(and(members[i], equal(parents[i], parents[j])), notEqual(refs[i], refs[j]))); } } } } IrIntExpr size = ref.getTargetType() instanceof AstIntClafer ? constant(analysis.getScope().getIntHigh() - analysis.getScope().getIntLow() + 1) : card(sets.get(ref.getTargetType())); for (IrSetExpr sibling : siblingSet) { module.addConstraint(lessThanEqual(card(sibling), size)); } } assert refs.length == members.length; for (int i = 0; i < members.length; i++) { // The ref pointers must point to the special uninitialized value // if the Clafer owning the ref pointers does not exists. module.addConstraint(ifOnlyIf(not(members[i]), equal(refs[i], getUninitalizedRef(tar)))); } if (!ref.getTargetType().isPrimitive()) { IrSetVar targetSet = sets.get(ref.getTargetType()); for (int i = 0; i < refs.length; i++) { // The ref pointers must point to a target that exists. module.addConstraint(ifOnlyIf(members[i], member(refs[i], targetSet))); } } } } switch (getFormat(clafer)) { case LowGroup: constrainLowGroupConcrete(clafer); break; case ParentGroup: constrainParentGroupConcrete(clafer); break; default: throw new AstException(); } } private void constrainGroupCardinality(AstClafer clafer) { Card groupCard = clafer.getGroupCard(); List<AstConcreteClafer> children = clafer.getChildren(); if (groupCard.isBounded()) { IrBoolExpr[] members = memberships.get(clafer); IrSetVar[][] childrenSets = new IrSetVar[children.size()][]; for (int i = 0; i < childrenSets.length; i++) { AstConcreteClafer child = children.get(i); childrenSets[i] = siblingSets.get(child); } int scope = getScope(clafer); for (int i = 0; i < scope; i++) { IrIntExpr[] cards = new IrIntExpr[childrenSets.length]; for (int j = 0; j < cards.length; j++) { cards[j] = card(childrenSets[j][i]); } module.addConstraint(implies(members[i], constrainCard(add(cards), groupCard))); } } } private void initLowGroupConcrete(AstConcreteClafer clafer) { PartialSolution partialSolution = getPartialSolution(clafer); IrSetVar[] childSet = buildChildSet(clafer); siblingSets.put(clafer, childSet); IrBoolExpr[] members = new IrBoolExpr[getScope(clafer)]; for (int i = 0; i < members.length; i++) { if (partialSolution.hasClafer(i)) { members[i] = True; } else { members[i] = bool(clafer.getName() + "@Membership#" + i); if (childSet.length == 1 && members.length == 1) { module.addConstraint(equal(members[i], card(childSet[0]))); } } } Check.noNulls(members); memberships.put(clafer, members); } private void constrainLowGroupConcrete(AstConcreteClafer clafer) { IrBoolExpr[] members = memberships.get(clafer); IrSetVar set = sets.get(clafer); IrBoolExpr[] parentMembership = memberships.get(clafer.getParent()); Card card = getCard(clafer); IrSetVar[] childSet = siblingSets.get(clafer); if (fullSymmetryBreaking) { module.addConstraint(selectN(members, card(set))); module.addConstraint(sort(childSet)); } for (int i = 0; i < parentMembership.length; i++) { IrBoolExpr parentMember = parentMembership[i]; if (card.isBounded()) { // Enforce cardinality. module.addConstraint(implies(parentMember, constrainCard(card(childSet[i]), card))); } module.addConstraint(implies(not(parentMember), equal(childSet[i], EmptySet))); } if (!(childSet.length == 1 && members.length == 1)) { module.addConstraint(boolChannel(members, set)); } /** * What is this optimization? * * Force the lower number atoms to choose lower number parents. For * example consider the following Clafer model: * * <pre> * Person 2 * Hand 2 * </pre> * * The constraint forbids the case where Hand0 belongs to Person1 and * Hand1 belongs to Person0. Otherwise, the children can swap around * creating many isomorphic solutions. */ if (fullSymmetryBreaking) { module.addConstraint(sort(parentPointers.get(clafer))); } } private void initParentGroupConcrete(AstConcreteClafer clafer) { PartialSolution partialParentSolution = getPartialParentSolution(clafer); IrSetVar[] children = new IrSetVar[partialParentSolution.size()]; assert getCard(clafer).getLow() == getCard(clafer).getHigh(); int lowCard = getCard(clafer).getLow(); for (int i = 0; i < children.length; i++) { if (partialParentSolution.hasClafer(i)) { children[i] = constant(Util.fromTo(i * lowCard, i * lowCard + lowCard)); } else { children[i] = set(clafer.getName() + "#" + i, Util.fromTo(i * lowCard, i * lowCard + lowCard)); } } siblingSets.put(clafer, children); IrBoolExpr[] members = new IrBoolExpr[getScope(clafer)]; IrBoolExpr[] parentMembership = memberships.get(clafer.getParent()); if (lowCard == 1) { if (members.length == parentMembership.length) { members = parentMembership; } else { System.arraycopy(parentMembership, 0, members, 0, parentMembership.length); Arrays.fill(members, parentMembership.length, members.length, False); } } else { for (int i = 0; i < parentMembership.length; i++) { for (int j = 0; j < lowCard; j++) { members[i * lowCard + j] = parentMembership[i]; } } Arrays.fill(members, parentMembership.length * lowCard, members.length, False); } Check.noNulls(members); memberships.put(clafer, members); } private void constrainParentGroupConcrete(AstConcreteClafer clafer) { PartialSolution partialParentSolution = getPartialParentSolution(clafer); IrSetVar[] children = siblingSets.get(clafer); assert getCard(clafer).getLow() == getCard(clafer).getHigh(); int lowCard = getCard(clafer).getLow(); for (int i = 0; i < children.length; i++) { if (!partialParentSolution.hasClafer(i)) { if (lowCard == 1) { module.addConstraint(equal(memberships.get(clafer.getParent())[i], card(children[i]))); } module.addConstraint(implies(memberships.get(clafer.getParent())[i], equal(children[i], constant(Util.fromTo(i * lowCard, i * lowCard + lowCard))))); module.addConstraint(implies(not(memberships.get(clafer.getParent())[i]), equal(children[i], EmptySet))); } } } private void initAbstract(AstAbstractClafer clafer) { IrSetVar[] subSets = new IrSetVar[clafer.getSubs().size()]; IrBoolExpr[] members = new IrBoolExpr[getScope(clafer)]; for (int i = 0; i < subSets.length; i++) { AstClafer sub = clafer.getSubs().get(i); subSets[i] = sets.get(sub); IrBoolExpr[] subMembers = memberships.get(sub); int offset = getOffset(clafer, sub); for (int j = 0; j < subMembers.length; j++) { assert members[offset + j] == null; members[offset + j] = Check.notNull(subMembers[j]); } } if (subSets.length == 1) { sets.put(clafer, sets.get(clafer.getSubs().get(0))); } else { TIntArrayList env = new TIntArrayList(); TIntArrayList ker = new TIntArrayList(); for (int i = 0; i < members.length; i++) { if (IrUtil.isTrue(members[i])) { ker.add(i); } if (!IrUtil.isFalse(members[i])) { env.add(i); } } IrSetVar unionSet = set(clafer.getName(), env.toArray(), ker.toArray()); if (!AstUtil.isTypeRoot(clafer)) { module.addConstraint(boolChannel(members, unionSet)); } sets.put(clafer, unionSet); } Check.noNulls(members); memberships.put(clafer, members); buildRef(clafer); } private void constrainAbstract(AstAbstractClafer clafer) { // Do nothing. } private final Map<AstClafer, IrSetVar> sets = new HashMap<>(); private final Map<AstClafer, IrSetVar[]> siblingSets = new HashMap<>(); private final Map<AstClafer, IrBoolExpr[]> memberships = new HashMap<>(); private final Map<AstConcreteClafer, IrIntVar[]> parentPointers = new HashMap<>(); private final Map<AstRef, IrIntVar[]> refPointers = new HashMap<>(); private final Map<AstRef, IrStringVar[]> refStrings = new HashMap<>(); private final Map<AstClafer, IrIntExpr[][]> indices = new HashMap<>(); private int countCount = 0; private int sumCount = 0; private int localCount = 0; private class ExpressionCompiler implements AstExprVisitor<Void, IrExpr> { private final int thisId; private final Map<AstLocal, IrIntExpr> locals = new HashMap<>(); private ExpressionCompiler(int thisId) { this.thisId = thisId; } private IrExpr compile(AstExpr expr) { return expr.accept(this, null); } private IrExpr[] compile(AstExpr[] exprs) { IrExpr[] compiled = new IrExpr[exprs.length]; for (int i = 0; i < compiled.length; i++) { compiled[i] = compile(exprs[i]); } return compiled; } private IrBoolExpr compile(AstBoolExpr expr) { return (IrBoolExpr) compile((AstExpr) expr); } private IrBoolExpr[] compile(AstBoolExpr[] exprs) { IrBoolExpr[] compiled = new IrBoolExpr[exprs.length]; for (int i = 0; i < compiled.length; i++) { compiled[i] = compile(exprs[i]); } return compiled; } private IrIntExpr asInt(IrExpr expr) { if (expr instanceof IrIntExpr) { return (IrIntExpr) expr; } if (expr instanceof IrSetExpr) { return sum((IrSetExpr) expr); } // Bug. throw new AstException("Should not have passed type checking."); } private IrIntExpr[] asInts(IrExpr[] exprs) { IrIntExpr[] ints = new IrIntExpr[exprs.length]; for (int i = 0; i < ints.length; i++) { ints[i] = asInt(exprs[i]); } return ints; } private IrSetExpr asSet(IrExpr expr) { if (expr instanceof IrIntExpr) { return singleton((IrIntExpr) expr); } if (expr instanceof IrSetExpr) { return (IrSetExpr) expr; } // Bug. throw new AstException("Should not have passed type checking."); } private IrSetExpr[] asSets(IrExpr[] exprs) { IrSetExpr[] sets = new IrSetExpr[exprs.length]; for (int i = 0; i < sets.length; i++) { sets[i] = asSet(exprs[i]); } return sets; } private IrStringExpr asString(IrExpr expr) { if (expr instanceof IrStringExpr) { return ((IrStringExpr) expr); } // Bug. throw new AstException("Should not have passed type checking."); } private IrStringExpr[] asString(IrExpr[] exprs) { IrStringExpr[] strings = new IrStringExpr[exprs.length]; for (int i = 0; i < strings.length; i++) { strings[i] = asString(exprs[i]); } return strings; } @Override public IrExpr visit(AstThis ast, Void a) { return constant(thisId); } @Override public IrExpr visit(AstGlobal ast, Void a) { IrSetVar global = sets.get(ast.getType()); if (global.getEnv().size() == 1) { int[] constant = IrUtil.getConstant(global); if (constant != null) { return constant(constant[0]); } } return global; } @Override public IrExpr visit(AstConstant ast, Void a) { int[] value = ast.getValue(); if (value.length == 1) { return constant(value[0]); } return constant(value); } @Override public IrExpr visit(AstStringConstant ast, Void a) { return constant(ast.getValue()); } @Override public IrExpr visit(AstJoin ast, Void a) { return doJoin(compile(ast.getLeft()), ast.getRight()); } private IrExpr doJoin(IrExpr left, AstConcreteClafer right) { if (left instanceof IrIntExpr) { IrIntExpr $intLeft = (IrIntExpr) left; if (Format.ParentGroup.equals(getFormat(right)) && getCard(right).getLow() == 1) { assert getCard(right).isExact(); return $intLeft; } // Why empty set? The "take" var can contain unused. return joinRelation(singleton($intLeft), Util.snoc(siblingSets.get(right), EmptySet), true); } else if (left instanceof IrSetExpr) { IrSetExpr $setLeft = (IrSetExpr) left; // Why empty set? The "take" var can contain unused. return joinRelation($setLeft, Util.snoc(siblingSets.get(right), EmptySet), true); } throw new AstException(); } @Override public IrExpr visit(AstJoinParent ast, Void a) { AstConcreteClafer childrenType = (AstConcreteClafer) getCommonSupertype(ast.getChildren()); IrExpr children = compile(ast.getChildren()); if (children instanceof IrIntExpr) { IrIntExpr intChildren = (IrIntExpr) children; switch (getFormat(childrenType)) { case ParentGroup: assert getCard(childrenType).isExact(); int lowCard = getCard(childrenType).getLow(); return div(intChildren, constant(lowCard)); case LowGroup: return element(parentPointers.get(childrenType), intChildren); } } else if (children instanceof IrSetExpr) { IrSetExpr setChildren = (IrSetExpr) children; return joinFunction(setChildren, parentPointers.get(childrenType), null); } throw new AstException(); } @Override public IrExpr visit(AstJoinRef ast, Void a) { AstSetExpr deref = ast.getDeref(); AstClafer derefType = getCommonSupertype(deref); Integer globalCardinality = null; IrExpr $deref; if (derefType.getRef().isUnique()) { if (deref instanceof AstJoin) { AstJoin join = (AstJoin) deref; IrExpr left = compile(join.getLeft()); $deref = doJoin(left, join.getRight()); globalCardinality = left instanceof IrSetExpr ? ((IrSetExpr) left).getCard().getHighBound() : 1; } else { $deref = compile(deref); if (derefType instanceof AstConcreteClafer) { globalCardinality = getScope(((AstConcreteClafer) derefType).getParent()); } } } else { $deref = compile(deref); } AstRef ref = derefType.getRef(); if ($deref instanceof IrIntExpr) { IrIntExpr $intDeref = (IrIntExpr) $deref; if (ref.getTargetType() instanceof AstStringClafer) { // Why empty string? The "take" var can contain unused. return element(Util.snoc(refStrings.get(ref), EmptyString), $intDeref); } else { // Why zero? The "take" var can contain unused. return element(Util.snoc(refPointers.get(ref), Zero), $intDeref); } } else if ($deref instanceof IrSetExpr) { IrSetExpr $setDeref = (IrSetExpr) $deref; if (ref.getTargetType() instanceof AstStringClafer) { throw new JoinSetWithStringException(ast, $setDeref.getCard()); } // Why zero? The "take" var can contain unused. return joinFunction($setDeref, Util.snoc(refPointers.get(ref), Zero), globalCardinality); } throw new AstException(); } @Override public IrExpr visit(AstCard ast, Void a) { IrExpr set = compile(ast.getSet()); if (set instanceof IrIntExpr) { return One; } return card((IrSetExpr) set); } @Override public IrExpr visit(AstNot ast, Void a) { return not(compile(ast.getExpr())); } @Override public IrExpr visit(AstMinus ast, Void a) { return minus(asInt(compile(ast.getExpr()))); } @Override public IrExpr visit(AstSetTest ast, Void a) { IrExpr left = compile(ast.getLeft()); IrExpr right = compile(ast.getRight()); if (left instanceof IrIntExpr && right instanceof IrIntExpr) { IrIntExpr intLeft = (IrIntExpr) left; IrIntExpr intRight = (IrIntExpr) right; switch (ast.getOp()) { case Equal: return equal(intLeft, intRight); case NotEqual: return notEqual(intLeft, intRight); } } if (left instanceof IrStringExpr && right instanceof IrStringExpr) { IrStringExpr stringLeft = (IrStringExpr) left; IrStringExpr stringRight = (IrStringExpr) right; switch (ast.getOp()) { case Equal: return equal(stringLeft, stringRight); case NotEqual: return notEqual(stringLeft, stringRight); } } switch (ast.getOp()) { case Equal: return equal(asSet(left), asSet(right)); case NotEqual: return notEqual(asSet(left), asSet(right)); default: throw new AstException(); } } @Override public IrExpr visit(AstCompare ast, Void a) { IrIntExpr left = asInt(compile(ast.getLeft())); IrIntExpr right = asInt(compile(ast.getRight())); switch (ast.getOp()) { case LessThan: return lessThan(left, right); case LessThanEqual: return lessThanEqual(left, right); case GreaterThan: return greaterThan(left, right); case GreaterThanEqual: return greaterThanEqual(left, right); default: throw new AstException(); } } @Override public IrExpr visit(AstArithm ast, Void a) { IrIntExpr[] operands = asInts(compile(ast.getOperands())); switch (ast.getOp()) { case Add: return add(operands); case Sub: return sub(operands); case Mul: IrIntExpr product = operands[0]; for (int i = 1; i < operands.length; i++) { product = mul(product, operands[i]); } return product; case Div: IrIntExpr quotient = operands[0]; for (int i = 1; i < operands.length; i++) { quotient = div(quotient, operands[i]); } return quotient; default: throw new AstException(); } } @Override public IrExpr visit(AstSum ast, Void a) { AstSetExpr set = ast.getSet(); AstClafer setType = getCommonSupertype(set); assert setType.hasRef(); IrIntVar[] refs = refPointers.get(setType.getRef()); int count = sumCount++; IrBoolExpr[] members; if (set instanceof AstGlobal) { members = memberships.get(setType); } else { IrExpr $set = compile(set); if ($set instanceof IrIntExpr) { IrIntExpr intSet = (IrIntExpr) $set; return element(refs, intSet); } IrSetExpr setSet = (IrSetExpr) $set; if (setSet.getEnv().isEmpty()) { return Zero; } assert setSet.getEnv().getLowBound() >= 0; members = new IrBoolExpr[setSet.getEnv().getHighBound() + 1]; for (int i = 0; i < members.length; i++) { members[i] = bool("SumMember" + count + "@" + i); } module.addConstraint(boolChannel(members, setSet)); } assert members.length <= refs.length; IrIntVar[] score = new IrIntVar[members.length]; for (int i = 0; i < members.length; i++) { IrDomain domain = refs[i].getDomain(); int uninitializedRef = getUninitalizedRef(setType.getRef().getTargetType()); // Score's use 0 as the uninitialized value. domain = IrUtil.add(IrUtil.remove(domain, uninitializedRef), 0); score[i] = domainInt("Score" + count + "@" + i, domain); module.addConstraint(ifThenElse(members[i], equal(score[i], refs[i]), equal(score[i], 0))); } return add(score); } @Override public IrExpr visit(AstBoolArithm ast, Void a) { IrBoolExpr[] operands = compile(ast.getOperands()); switch (ast.getOp()) { case And: return and(operands); case IfOnlyIf: IrBoolExpr ifOnlyIf = operands[0]; for (int i = 1; i < operands.length; i++) { ifOnlyIf = ifOnlyIf(ifOnlyIf, operands[i]); } return ifOnlyIf; case Implies: IrBoolExpr implies = operands[0]; for (int i = 1; i < operands.length; i++) { implies = implies(implies, operands[i]); } return implies; case Or: return or(operands); case Xor: IrBoolExpr xor = operands[0]; for (int i = 1; i < operands.length; i++) { xor = xor(xor, operands[i]); } return xor; default: throw new AstException(); } } @Override public IrExpr visit(AstDifference ast, Void a) { return difference( asSet(compile(ast.getLeft())), asSet(compile(ast.getRight()))); } @Override public IrExpr visit(AstIntersection ast, Void a) { return intersection( asSet(compile(ast.getLeft())), asSet(compile(ast.getRight()))); } @Override public IrExpr visit(AstUnion ast, Void a) { return union( asSet(compile(ast.getLeft())), asSet(compile(ast.getRight()))); } @Override public IrExpr visit(AstMembership ast, Void a) { IrExpr member = compile(ast.getMember()); IrExpr set = compile(ast.getSet()); if (member instanceof IrIntExpr && set instanceof IrIntExpr) { return AstMembership.Op.In.equals(ast.getOp()) ? equal((IrIntExpr) member, (IrIntExpr) set) : notEqual((IrIntExpr) member, (IrIntExpr) set); } if (member instanceof IrIntExpr && set instanceof IrSetExpr) { return AstMembership.Op.In.equals(ast.getOp()) ? member((IrIntExpr) member, (IrSetExpr) set) : notMember((IrIntExpr) member, (IrSetExpr) set); } if (member instanceof IrSetExpr && set instanceof IrIntExpr) { return AstMembership.Op.In.equals(ast.getOp()) ? equal((IrSetExpr) member, singleton((IrIntExpr) set)) : notEqual((IrSetExpr) member, singleton((IrIntExpr) set)); } return AstMembership.Op.In.equals(ast.getOp()) ? subsetEq(asSet(member), asSet(set)) : not(subsetEq(asSet(member), asSet(set))); } @Override public IrExpr visit(AstTernary ast, Void a) { IrBoolExpr antecedent = compile(ast.getAntecedent()); IrExpr consequent = compile(ast.getConsequent()); IrExpr alternative = compile(ast.getAlternative()); if (consequent instanceof IrIntExpr && alternative instanceof IrIntExpr) { return ternary(antecedent, (IrIntExpr) consequent, (IrIntExpr) alternative); } return ternary(antecedent, asSet(consequent), asSet(alternative)); } @Override public IrExpr visit(AstIfThenElse ast, Void a) { return ifThenElse(compile(ast.getAntecedent()), compile(ast.getConsequent()), compile(ast.getAlternative())); } @Override public IrExpr visit(AstDowncast ast, Void a) { AstSetExpr base = ast.getBase(); int offset = getOffset((AstAbstractClafer) getCommonSupertype(base), ast.getTarget()); IrExpr $base = compile(ast.getBase()); if ($base instanceof IrIntExpr) { IrIntExpr intBase = (IrIntExpr) $base; return sub(intBase, constant(offset)); } return mask((IrSetExpr) $base, offset, offset + getScope(ast.getTarget())); } @Override public IrExpr visit(AstUpcast ast, Void a) { AstSetExpr base = ast.getBase(); int offset = getOffset(ast.getTarget(), getCommonSupertype(base)); IrExpr $base = compile(ast.getBase()); if ($base instanceof IrIntExpr) { IrIntExpr intBase = (IrIntExpr) $base; return add(intBase, constant(offset)); } return offset((IrSetExpr) $base, offset); } @Override public IrExpr visit(AstLocal ast, Void a) { return locals.get(ast); } private Triple<AstLocal, IrIntExpr, IrBoolExpr>[][] compileDecl(AstDecl decl) { IrExpr body = compile(decl.getBody()); if (body instanceof IrIntExpr) { IrIntExpr intBody = (IrIntExpr) body; @SuppressWarnings("unchecked") Triple<AstLocal, IrIntExpr, IrBoolExpr>[] labeledPermutation = new Triple[decl.getLocals().length]; for (int i = 0; i < labeledPermutation.length; i++) { labeledPermutation[i] = new Triple<AstLocal, IrIntExpr, IrBoolExpr>( decl.getLocals()[i], intBody, True); } @SuppressWarnings("unchecked") Triple<AstLocal, IrIntExpr, IrBoolExpr>[][] labeledSequence = new Triple[][]{labeledPermutation}; return labeledSequence; } if (body instanceof IrSetExpr) { IrSetExpr setBody = (IrSetExpr) body; IrDomain env = setBody.getEnv(); IrDomain ker = setBody.getKer(); // TODO: need a different strategy otherwise assert env.getLowBound() >= 0; @SuppressWarnings("unchecked") Pair<IrIntExpr, IrBoolExpr>[] members = new Pair[env.getHighBound() + 1]; for (int i = 0; i < env.getLowBound(); i++) { members[i] = new Pair<IrIntExpr, IrBoolExpr>(constant(i), False); } for (int i = env.getLowBound(); i <= env.getHighBound(); i++) { members[i] = new Pair<IrIntExpr, IrBoolExpr>(constant(i), ker.contains(i) ? True : bool(Util.intercalate("/", AstUtil.getNames(decl.getLocals())) + "#" + i + "#" + localCount++)); } module.addConstraint(boolChannel(Pair.mapSnd(members), setBody)); Pair<IrIntExpr, IrBoolExpr>[][] sequence = decl.isDisjoint() ? Util.permutations(members, decl.getLocals().length) : Util.sequence(members, decl.getLocals().length); @SuppressWarnings("unchecked") Triple<AstLocal, IrIntExpr, IrBoolExpr>[][] labeledSequence = new Triple[sequence.length][]; for (int i = 0; i < labeledSequence.length; i++) { Pair<IrIntExpr, IrBoolExpr>[] permutation = sequence[i]; @SuppressWarnings("unchecked") Triple<AstLocal, IrIntExpr, IrBoolExpr>[] labeledPermutation = new Triple[permutation.length]; for (int j = 0; j < labeledPermutation.length; j++) { labeledPermutation[j] = new Triple<>( decl.getLocals()[j], permutation[j]); } labeledSequence[i] = labeledPermutation; } return labeledSequence; } throw new AstException(); } // TODO optimize SOME @Override public IrExpr visit(AstQuantify ast, Void a) { AstDecl decls[] = ast.getDecls(); @SuppressWarnings("unchecked") Triple<AstLocal, IrIntExpr, IrBoolExpr>[][][] compiledDecls = new Triple[decls.length][][]; for (int i = 0; i < compiledDecls.length; i++) { compiledDecls[i] = compileDecl(decls[i]); } compiledDecls = Util.sequence(compiledDecls); List<IrBoolExpr> compiled = new ArrayList<>(); for (Triple<AstLocal, IrIntExpr, IrBoolExpr>[][] quants : compiledDecls) { List<IrBoolExpr> constraints = new ArrayList<>(); for (Triple<AstLocal, IrIntExpr, IrBoolExpr>[] quantDecls : quants) { for (Triple<AstLocal, IrIntExpr, IrBoolExpr> quantLocals : quantDecls) { constraints.add(quantLocals.getThd()); locals.put(quantLocals.getFst(), quantLocals.getSnd()); } } IrBoolExpr compiledBody = compile(ast.getBody()); if (Quantifier.All.equals(ast.getQuantifier())) { compiled.add(implies(and(constraints), compiledBody)); } else { constraints.add(compiledBody); compiled.add(and(constraints)); } } switch (ast.getQuantifier()) { case All: return and(compiled); case Lone: return lone(compiled); case None: return not(or(compiled)); case One: return one(compiled); case Some: return or(compiled); default: throw new AstException(); } } @Override public IrExpr visit(AstLength ast, Void a) { return length(asString(compile(ast.getString()))); } @Override public IrExpr visit(AstConcat ast, Void a) { return concat(asString(compile(ast.getLeft())), asString(compile(ast.getRight()))); } @Override public IrExpr visit(AstPrefix ast, Void a) { return prefix(asString(compile(ast.getPrefix())), asString(compile(ast.getWord()))); } @Override public IrExpr visit(AstSuffix ast, Void a) { return suffix(asString(compile(ast.getSuffix())), asString(compile(ast.getWord()))); } }; /* ****************** * Build functions. ****************** */ /** * Build the child set for the Clafer. * * @param clafer the Clafer * @return the variables to represent the child relation */ private IrSetVar[] buildChildSet(AstConcreteClafer clafer) { assert Format.LowGroup.equals(getFormat(clafer)); int parentScope = getScope(clafer.getParent()); PartialSolution partialParentSolution = getPartialSolution(clafer.getParent()); int claferScope = getScope(clafer); Card card = getCard(clafer); assert card.hasHigh(); int low = 0; int high = card.getHigh(); int max = claferScope - 1; IrSetVar[] skip = new IrSetVar[parentScope]; for (int i = 0; i < skip.length; i++) { if (low <= max) { IrDomain env = boundDomain(low, Math.min(high - 1, max)); IrDomain ker = EmptyDomain; int cardLow = 0; int cardHigh = card.getHigh(); if (partialParentSolution.hasClafer(i)) { int prevHigh = high - card.getHigh(); int nextLow = low + card.getLow(); if (nextLow > prevHigh) { ker = boundDomain(prevHigh, Math.min(nextLow - 1, max)); } cardLow = card.getLow(); } cardLow = Math.max(cardLow, ker.size()); cardHigh = Math.min(cardHigh, env.size()); skip[i] = set(clafer.getName() + "#" + i, env, ker, boundDomain(cardLow, cardHigh)); } else { skip[i] = EmptySet; } if (partialParentSolution.hasClafer(i)) { low += card.getLow(); } high += card.getHigh(); } return skip; } /** * Create the parent pointers for the Clafer. * * @param clafer the Clafer * @return the variables to represent the parent relation */ private IrIntVar[] buildParentPointers(AstConcreteClafer clafer) { PartialSolution solution = getPartialSolution(clafer); boolean known = solution.parentSolutionKnown(); IrIntVar[] pointers = new IrIntVar[solution.size()]; for (int i = 0; i < pointers.length; i++) { int[] possibleParents = solution.getPossibleParents(i); pointers[i] = enumInt(clafer.getName() + "@Parent#" + i, solution.hasClafer(i) || known ? possibleParents : Util.snoc(possibleParents, getScope(clafer.getParent()))); } return pointers; } private void buildRef(AstClafer clafer) { if (clafer.hasRef()) { AstRef ref = clafer.getRef(); if (ref.getTargetType() instanceof AstStringClafer) { refStrings.put(ref, buildStrings(ref)); } else { refPointers.put(ref, buildRefPointers(ref)); } } } /** * Create the references pointers for the Clafer. * * @param ref the reference Clafer * @return the variables to represent the reference relation */ private IrIntVar[] buildRefPointers(AstRef ref) { AstClafer src = ref.getSourceType(); AstClafer tar = ref.getTargetType(); assert !(tar instanceof AstStringClafer); PartialSolution partialSolution = getPartialSolution(src); IrDomain[] partialInts = getPartialInts(ref); IrIntVar[] ivs = new IrIntVar[getScope(src)]; for (int i = 0; i < ivs.length; i++) { if (partialSolution.hasClafer(i)) { ivs[i] = domainInt(src.getName() + "@Ref" + i, partialInts[i]); } else { ivs[i] = domainInt(src.getName() + "@Ref" + i, IrUtil.add(partialInts[i], getUninitalizedRef(tar))); } } return ivs; } private IrStringVar[] buildStrings(AstRef ref) { AstClafer src = ref.getSourceType(); AstClafer tar = ref.getTargetType(); assert tar instanceof AstStringClafer; int stringLength = analysis.getScope().getStringLength(); char charLow = analysis.getScope().getCharLow(); char charHigh = analysis.getScope().getCharHigh(); IrDomain charDomain = IrUtil.add(boundDomain(charLow, charHigh), 0); IrStringVar[] svs = new IrStringVar[getScope(src)]; for (int i = 0; i < svs.length; i++) { IrIntVar[] chars = new IrIntVar[stringLength]; for (int j = 0; j < chars.length; j++) { chars[j] = domainInt(src.getName() + "@String" + i + "[" + j + "]", charDomain); } svs[i] = string(src.getName(), chars, boundInt(src.getName() + "@Length" + i, 0, stringLength)); } return svs; } /** * Enforce the size of a set to be within the cardinality. * * @param setCard the set to constrain * @param card the cardinality * @return card.low &le; |setCard| &le; card.high */ private IrBoolExpr constrainCard(IrIntExpr setCard, Card card) { if (card.isExact()) { return equal(setCard, card.getLow()); } if (card.hasLow() && card.hasHigh()) { return within(setCard, boundDomain(card.getLow(), card.getHigh())); } if (card.hasLow()) { return greaterThanEqual(setCard, card.getLow()); } if (card.hasHigh()) { return lessThanEqual(setCard, card.getHigh()); } return True; } /* ************************ * Convenience functions. ************************ */ private int getUninitalizedRef(AstClafer clafer) { return clafer instanceof AstIntClafer ? analysis.getScope().getIntHigh() + 1 : getScope(clafer); } private int getScope(AstClafer clafer) { return analysis.getScope().getScope(clafer); } private Format getFormat(AstClafer clafer) { return analysis.getFormat(clafer); } private PartialSolution getPartialSolution(AstClafer clafer) { return analysis.getPartialSolution(clafer); } private PartialSolution getPartialParentSolution(AstConcreteClafer clafer) { return getPartialSolution(clafer.getParent()); } private IrDomain[] getPartialInts(AstRef ref) { return analysis.getPartialInts(ref); } private int getOffset(AstAbstractClafer sup, AstClafer sub) { int offset = 0; for (AstClafer cur = sub; !sup.equals(cur); cur = cur.getSuperClafer()) { if (!cur.hasSuperClafer()) { throw new AstException(sub + " is not a sub clafer of " + sup); } offset += analysis.getOffsets(cur.getSuperClafer()).getOffset(cur); } return offset; } private Card getCard(AstConcreteClafer clafer) { return analysis.getCard(clafer); } private Card getGlobalCard(AstClafer clafer) { return analysis.getGlobalCard(clafer); } private Type getType(AstExpr expr) { return analysis.getType(expr); } private AstClafer getCommonSupertype(AstExpr expr) { return analysis.getCommonSupertype(expr); } private static interface Symmetry { IrExpr[] getInput(); IrExpr[] getOutput(); IrBoolExpr getConstraint(); } private static class FilterString implements Symmetry { private final IrSetExpr set; private final IrIntExpr[] string; private final IrIntExpr[] result; FilterString(IrSetExpr set, IrIntExpr[] string, IrIntExpr[] result) { this.set = set; this.string = string; this.result = result; } @Override public IrExpr[] getInput() { IrExpr[] input = new IrExpr[string.length + 1]; input[0] = set; System.arraycopy(string, 0, input, 1, string.length); return input; } @Override public IrExpr[] getOutput() { return result; } @Override public IrBoolExpr getConstraint() { return filterString(set, string, result); } } private static class LexChainChannel implements Symmetry { private final IrIntExpr[][] strings; private final IrIntExpr[] ints; LexChainChannel(IrIntExpr[][] strings, IrIntExpr[] ints) { this.strings = strings; this.ints = ints; } @Override public IrExpr[] getInput() { return Util.concat(strings); } @Override public IrExpr[] getOutput() { return ints; } @Override public IrBoolExpr getConstraint() { return sortChannel(strings, ints); } } }
src/main/java/org/clafer/ast/compiler/AstCompiler.java
package org.clafer.ast.compiler; import gnu.trove.list.array.TIntArrayList; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.clafer.ast.AstAbstractClafer; import org.clafer.ast.AstArithm; import org.clafer.ast.AstBoolArithm; import org.clafer.ast.AstBoolExpr; import org.clafer.ast.AstCard; import org.clafer.ast.AstClafer; import org.clafer.ast.AstCompare; import org.clafer.ast.AstConcat; import org.clafer.ast.AstConcreteClafer; import org.clafer.ast.AstConstant; import org.clafer.ast.AstConstraint; import org.clafer.ast.AstDecl; import org.clafer.ast.AstDifference; import org.clafer.ast.AstDowncast; import org.clafer.ast.AstException; import org.clafer.ast.AstExpr; import org.clafer.ast.AstExprVisitor; import org.clafer.ast.AstGlobal; import org.clafer.ast.AstIfThenElse; import org.clafer.ast.AstIntClafer; import org.clafer.ast.AstIntersection; import org.clafer.ast.AstJoin; import org.clafer.ast.AstJoinParent; import org.clafer.ast.AstJoinRef; import org.clafer.ast.AstLength; import org.clafer.ast.AstLocal; import org.clafer.ast.AstMembership; import org.clafer.ast.AstMinus; import org.clafer.ast.AstModel; import org.clafer.ast.AstNot; import org.clafer.ast.AstPrefix; import org.clafer.ast.AstQuantify; import org.clafer.ast.AstQuantify.Quantifier; import org.clafer.ast.AstRef; import org.clafer.ast.AstSetExpr; import org.clafer.ast.AstSetTest; import org.clafer.ast.AstStringClafer; import org.clafer.ast.AstStringConstant; import org.clafer.ast.AstSuffix; import org.clafer.ast.AstSum; import org.clafer.ast.AstTernary; import org.clafer.ast.AstThis; import org.clafer.ast.AstUnion; import org.clafer.ast.AstUpcast; import org.clafer.ast.AstUtil; import org.clafer.ast.Card; import org.clafer.ast.JoinSetWithStringException; import org.clafer.ast.analysis.AbstractOffsetAnalyzer; import org.clafer.ast.analysis.Analysis; import org.clafer.ast.analysis.Analyzer; import org.clafer.ast.analysis.CardAnalyzer; import org.clafer.ast.analysis.Format; import org.clafer.ast.analysis.FormatAnalyzer; import org.clafer.ast.analysis.GlobalCardAnalyzer; import org.clafer.ast.analysis.OptimizerAnalyzer; import org.clafer.ast.analysis.PartialIntAnalyzer; import org.clafer.ast.analysis.PartialSolution; import org.clafer.ast.analysis.PartialSolutionAnalyzer; import org.clafer.ast.analysis.ScopeAnalyzer; import org.clafer.ast.analysis.SymmetryAnalyzer; import org.clafer.ast.analysis.Type; import org.clafer.ast.analysis.TypeAnalyzer; import org.clafer.collection.DisjointSets; import org.clafer.collection.Either; import org.clafer.collection.Pair; import org.clafer.collection.Triple; import org.clafer.common.Check; import org.clafer.common.Util; import org.clafer.graph.GraphUtil; import org.clafer.graph.KeyGraph; import org.clafer.graph.Vertex; import org.clafer.ir.IrBoolExpr; import org.clafer.ir.IrBoolVar; import org.clafer.ir.IrDomain; import org.clafer.ir.IrExpr; import org.clafer.ir.IrIntExpr; import org.clafer.ir.IrIntVar; import org.clafer.ir.IrModule; import org.clafer.ir.IrSetExpr; import org.clafer.ir.IrSetVar; import org.clafer.ir.IrStringExpr; import org.clafer.ir.IrStringVar; import org.clafer.ir.IrUtil; import org.clafer.ir.IrVar; import static org.clafer.ir.Irs.*; import org.clafer.objective.Objective; import org.clafer.scope.Scope; /** * Compile from AST to IR. * * @author jimmy */ public class AstCompiler { public static final Analyzer[] DefaultAnalyzers = new Analyzer[]{ new TypeAnalyzer(), new GlobalCardAnalyzer(), new ScopeAnalyzer(), new CardAnalyzer(), new FormatAnalyzer(), new AbstractOffsetAnalyzer(), new OptimizerAnalyzer(), new PartialSolutionAnalyzer(), new PartialIntAnalyzer(), new SymmetryAnalyzer(), // Reanalyze types new TypeAnalyzer() }; private final Analysis analysis; private final IrModule module; private final List<Symmetry> symmetries = new ArrayList<>(); private final boolean fullSymmetryBreaking; private AstCompiler(AstModel model, Scope scope, IrModule module, Analyzer[] analyzers, boolean fullSymmetryBreaking) { this(model, scope, new Objective[0], module, analyzers, fullSymmetryBreaking); } private AstCompiler(AstModel model, Scope scope, Objective[] objectives, IrModule module, Analyzer[] analyzers, boolean fullSymmetryBreaking) { this.analysis = Analysis.analyze(model, scope, objectives, analyzers); this.module = Check.notNull(module); this.fullSymmetryBreaking = fullSymmetryBreaking; } public static AstSolutionMap compile(AstModel in, Scope scope, IrModule out, boolean fullSymmetryBreaking) { return compile(in, scope, out, DefaultAnalyzers, fullSymmetryBreaking); } public static AstSolutionMap compile(AstModel in, Scope scope, IrModule out, Analyzer[] analyzers, boolean fullSymmetryBreaking) { AstCompiler compiler = new AstCompiler(in, scope, out, analyzers, fullSymmetryBreaking); return compiler.compile(); } public static AstSolutionMap compile(AstModel in, Scope scope, Objective[] objectives, IrModule out, boolean fullSymmetryBreaking) { return compile(in, scope, objectives, out, DefaultAnalyzers, fullSymmetryBreaking); } public static AstSolutionMap compile(AstModel in, Scope scope, Objective[] objectives, IrModule out, Analyzer[] analyzers, boolean fullSymmetryBreaking) { AstCompiler compiler = new AstCompiler(in, scope, objectives, out, analyzers, fullSymmetryBreaking); return compiler.compile(); } /** * @return the order to initialize regular variables */ private List<AstClafer> initOrder() { List<AstAbstractClafer> abstractClafers = analysis.getAbstractClafers(); List<AstConcreteClafer> concreteClafers = analysis.getConcreteClafers(); KeyGraph<AstClafer> dependency = new KeyGraph<>(); for (AstAbstractClafer abstractClafer : abstractClafers) { Vertex<AstClafer> node = dependency.getVertex(abstractClafer); for (AstClafer sub : abstractClafer.getSubs()) { node.addNeighbour(dependency.getVertex(sub)); } } for (AstConcreteClafer concreteClafer : concreteClafers) { Vertex<AstClafer> node = dependency.getVertex(concreteClafer); if (Format.ParentGroup.equals(getFormat(concreteClafer))) { /* * Low group does not create the dependency because it does not * require the parent to initialize first. This allows for * models like the one below. * * abstract Path * p : Path ? * * If the "?" were a fixed cardinality instead, then an * exception will occur, but the model would not be satisfiable * anyways for any fixed cardinality greater than zero. */ node.addNeighbour(dependency.getVertex(concreteClafer.getParent())); } } List<Set<AstClafer>> components = GraphUtil.computeStronglyConnectedComponents(dependency); List<AstClafer> clafers = new ArrayList<>(); for (Set<AstClafer> component : components) { if (component.size() != 1) { // See the above comment about low groups. throw new AstException("Cannot satisfy the cycle " + component); } clafers.addAll(component); } return clafers; } private AstSolutionMap compile() { IrSetVar rootSet = constant(new int[]{0}); sets.put(analysis.getModel(), rootSet); siblingSets.put(analysis.getModel(), new IrSetVar[]{rootSet}); memberships.put(analysis.getModel(), new IrBoolExpr[]{True}); List<AstClafer> clafers = initOrder(); for (AstClafer clafer : clafers) { if (clafer instanceof AstConcreteClafer && !AstUtil.isRoot((AstConcreteClafer) clafer)) { initConcrete((AstConcreteClafer) clafer); } else if (clafer instanceof AstAbstractClafer) { initAbstract((AstAbstractClafer) clafer); } } for (AstClafer clafer : clafers) { if (clafer instanceof AstConcreteClafer && !AstUtil.isRoot((AstConcreteClafer) clafer)) { constrainConcrete((AstConcreteClafer) clafer); } else if (clafer instanceof AstAbstractClafer) { constrainAbstract((AstAbstractClafer) clafer); } constrainGroupCardinality(clafer); } Map<AstConstraint, IrBoolVar> softVars = new HashMap<>(); for (AstConstraint constraint : analysis.getConstraints()) { AstClafer clafer = constraint.getContext(); int scope = getScope(clafer); if (analysis.isHard(constraint)) { for (int j = 0; j < scope; j++) { ExpressionCompiler expressionCompiler = new ExpressionCompiler(j); IrBoolExpr thisConstraint = expressionCompiler.compile(analysis.getExpr(constraint)); module.addConstraint(implies(memberships.get(clafer)[j], thisConstraint)); } } else { IrBoolVar softVar = bool(constraint.toString()); softVars.put(constraint, softVar); for (int j = 0; j < scope; j++) { ExpressionCompiler expressionCompiler = new ExpressionCompiler(j); IrBoolExpr thisConstraint = expressionCompiler.compile(analysis.getExpr(constraint)); module.addConstraint(ifOnlyIf(softVar, implies(memberships.get(clafer)[j], thisConstraint))); } module.addVariable(softVar); } } IrIntExpr softSum = add(softVars.values()); IrIntVar sumSoftVars = domainInt("SumSoftVar", softSum.getDomain()); module.addConstraint(equal(sumSoftVars, softSum)); for (IrSetVar[] childSet : siblingSets.values()) { module.addVariables(childSet); } for (IrIntVar[] refs : refPointers.values()) { module.addVariables(refs); } for (IrStringVar[] refs : refStrings.values()) { module.addVariables(refs); } for (Set<AstClafer> component : analysis.getClafersInParentAndSubOrder()) { if (component.size() > 1) { /* * Add additional constraints for to handle cases where a * descendent inherits an ancestor. * * Let A be an abstract Clafer and B be a descendant of A that * inherits A. Let F be a mapping every B to its ancestor A. * Enforce that F is an acyclic function. */ List<AstClafer> types = new ArrayList<>(); for (AstClafer clafer : component) { types.add(clafer); } AstAbstractClafer unionType = (AstAbstractClafer) AstUtil.getLowestCommonSupertype(types); IrIntExpr[] edges = new IrIntExpr[getScope(unionType)]; IrIntExpr uninitialized = constant(edges.length); Arrays.fill(edges, uninitialized); for (AstClafer clafer : component) { if (clafer instanceof AstConcreteClafer) { AstConcreteClafer concreteChild = (AstConcreteClafer) clafer; IrBoolExpr[] members = memberships.get(concreteChild); IrIntExpr[] parents = parentPointers.get(concreteChild); int offset = getOffset(unionType, concreteChild); int parentOffset = getOffset(unionType, concreteChild.getParent()); for (int i = 0; i < members.length; i++) { assert edges[i + offset] == uninitialized; IrIntExpr value = ternary(members[i], // Add the offset to upcast the parent pointer add(parents[i], parentOffset), uninitialized); IrIntVar edge = domainInt( "Edge@" + concreteChild + "->" + concreteChild.getParent() + "#" + i, value.getDomain()); module.addConstraint(equal(edge, value)); edges[i + offset] = edge; } } } for (AstClafer clafer : component) { if (clafer instanceof AstConcreteClafer) { IrBoolExpr[] members = memberships.get(clafer); int offset = getOffset(unionType, clafer); for (int i = 0; i < members.length; i++) { for (int j = i + 1; j < members.length; j++) { /* * Symmetry breaking. The lower indexed element * appears on top of the higher indexed element. */ module.addConstraint(unreachable(edges, i + offset, j + offset)); } } } } module.addConstraint(acyclic(edges)); } } ExpressionCompiler expressionCompiler = new ExpressionCompiler(0); Map<Objective, IrIntVar> objectiveVars = new HashMap<>(); Map<Objective, AstSetExpr> objectives = analysis.getObjectiveExprs(); for (Entry<Objective, AstSetExpr> objective : objectives.entrySet()) { IrIntExpr objectiveExpr = expressionCompiler.asInt( expressionCompiler.compile(objective.getValue())); IrIntVar objectiveVar = domainInt("Objective" + objective.getKey(), objectiveExpr.getDomain()); module.addConstraint(equal(objectiveVar, objectiveExpr)); objectiveVars.put(objective.getKey(), objectiveVar); } KeyGraph<Either<IrExpr, IrBoolExpr>> dependencies = new KeyGraph<>(); for (Symmetry symmetry : symmetries) { IrBoolExpr constraint = symmetry.getConstraint(); Vertex<Either<IrExpr, IrBoolExpr>> constraintNode = dependencies.getVertex(Either.<IrExpr, IrBoolExpr>right(constraint)); for (IrExpr output : symmetry.getOutput()) { dependencies.getVertex(Either.<IrExpr, IrBoolExpr>left(output)) .addNeighbour(constraintNode); } for (IrExpr input : symmetry.getInput()) { constraintNode.addNeighbour( dependencies.getVertex(Either.<IrExpr, IrBoolExpr>left(input))); } } Set<IrVar> variables = module.getVariables(); Set<Vertex<Either<IrExpr, IrBoolExpr>>> start = new HashSet<>(); for (IrVar variable : variables) { Vertex<Either<IrExpr, IrBoolExpr>> vertex = dependencies.getVertexIfPresent(Either.<IrExpr, IrBoolExpr>left(variable)); if (vertex != null) { start.add(vertex); } } Set<Either<IrExpr, IrBoolExpr>> reachables = GraphUtil.reachable(start, dependencies); for (Either<IrExpr, IrBoolExpr> reachable : reachables) { if (reachable.isRight()) { module.addConstraint(reachable.getRight()); } } return new AstSolutionMap(analysis.getModel(), siblingSets, refPointers, refStrings, softVars, sumSoftVars, objectiveVars, analysis); } private void initConcrete(AstConcreteClafer clafer) { parentPointers.put(clafer, buildParentPointers(clafer)); buildRef(clafer); switch (getFormat(clafer)) { case LowGroup: initLowGroupConcrete(clafer); break; case ParentGroup: initParentGroupConcrete(clafer); break; default: throw new AstException(); } IrSetVar[] siblingSet = siblingSets.get(clafer); switch (siblingSet.length) { case 0: sets.put(clafer, EmptySet); break; case 1: sets.put(clafer, siblingSet[0]); break; default: IrSetExpr union = union(siblingSet, true); IrSetVar set = set(clafer.getName(), union.getEnv(), union.getKer(), union.getCard()); module.addConstraint(equal(set, union)); sets.put(clafer, set); break; } if (fullSymmetryBreaking) { int scope = getScope(clafer); int parentScope = getScope(clafer.getParent()); IrIntExpr[][] index; AstRef ref = AstUtil.getInheritedRef(clafer); // If the Clafer either needs children or reference to be introduce symmetry. if (analysis.hasInteritedBreakableChildren(clafer) || (ref != null && analysis.isBreakableRef(ref)) || analysis.isInheritedBreakableTarget(clafer)) { index = new IrIntExpr[parentScope][getCard(clafer).getHigh()]; for (int i = 0; i < index.length; i++) { for (int j = 0; j < index[i].length; j++) { index[i][j] = boundInt(clafer.getName() + "@Index#" + i + "#" + j, -1, scope); } } } else { // Optimize for nonsymmetric nodes. Don't compute the smallest indices, // just use the cardinalities. IrSetVar[] childSet = siblingSets.get(clafer); index = new IrIntExpr[childSet.length][]; for (int i = 0; i < index.length; i++) { index[i] = new IrIntExpr[]{card(childSet[i])}; } } indices.put(clafer, index); } } private void constrainConcrete(AstConcreteClafer clafer) { IrSetExpr[] siblingSet = siblingSets.get(clafer); IrIntExpr[] parents = parentPointers.get(clafer); if (!getPartialSolution(clafer).parentSolutionKnown()) { if (getGlobalCard(clafer).isExact()) { // No unused module.addConstraint(intChannel(parents, siblingSet)); } else { IrSetVar unused = set(clafer.getName() + "@Unused", getPartialSolution(clafer).getUnknownClafers()); module.addConstraint(intChannel(parents, Util.snoc(siblingSet, unused))); } } Pair<AstRef, Integer> refPair = analysis.getInheritedRefId(clafer); AstRef ref = refPair == null ? null : refPair.getFst(); int refOffset = refPair == null ? 0 : refPair.getSnd().intValue(); int scope = getScope(clafer); IrBoolExpr[] members = memberships.get(clafer); // Two ids a and b are in the same partition if symmetry breaking guarantees // that ref[a] and ref[b] ard different. DisjointSets<Integer> refPartitions = null; // If the Clafer either needs children or reference to be introduce symmetry. if (fullSymmetryBreaking && scope > 1 && (analysis.hasInteritedBreakableChildren(clafer) || (ref != null && analysis.isBreakableRef(ref)) || analysis.isInheritedBreakableTarget(clafer))) { IrIntExpr[] weight = new IrIntExpr[scope]; IrIntExpr[][] index = indices.get(clafer); analysis.getHierarcyIds(clafer, refOffset); IrIntExpr[][] childIndices = new IrIntExpr[weight.length][]; List<Pair<AstClafer, Integer>> offsets = analysis.getHierarcyOffsets(clafer); Collections.reverse(offsets); boolean[] breakableRefIds = new boolean[childIndices.length]; for (int i = 0; i < childIndices.length; i++) { List<IrIntExpr> childIndex = new ArrayList<>(); for (Pair<AstClafer, Integer> offset : offsets) { for (AstConcreteClafer child : analysis.getBreakableChildren(offset.getFst())) { childIndex.addAll(Arrays.asList(indices.get(child)[i + offset.getSnd()])); } } if (ref != null && analysis.isBreakableRef(ref)) { breakableRefIds[i] = analysis.isBreakableRefId(ref, i + refOffset); if (ref.getTargetType() instanceof AstStringClafer) { childIndex.addAll(Arrays.asList(IrUtil.pad( refStrings.get(ref)[i + refOffset].getCharVars(), analysis.getScope().getStringLength()))); if (ref.isUnique()) { childIndex.add(members[i]); } } else { // References need a positive weight, so to use their value as // a weight, need to offset it so that it always positive. childIndex.add( breakableRefIds[i] // The id of the target is the weight. ? minus(refPointers.get(ref)[i + refOffset]) // If analysis says that this id does not need breaking // then give it a constant weight. Any constant is fine. : Zero); } } if (analysis.isInheritedBreakableTarget(clafer)) { for (Pair<AstClafer, Integer> hierarchy : analysis.getHierarcyIds(clafer, i)) { for (AstRef sourceRef : analysis.getBreakableTarget(hierarchy.getFst())) { IrIntVar[] sourceRefs = refPointers.get(sourceRef); IrIntExpr[] array = new IrIntExpr[sourceRefs.length]; System.arraycopy(sourceRefs, 0, array, 0, array.length); IrIntExpr count = count(hierarchy.getSnd().intValue(), array); IrIntVar countVar = domainInt("CountVar" + countCount++, count.getDomain()); module.addConstraint(equal(countVar, count)); childIndex.add(countVar); } } } childIndices[i] = childIndex.toArray(new IrIntExpr[childIndex.size()]); } for (int i = 0; i < weight.length; i++) { weight[i] = childIndices[i].length == 0 ? Zero : boundInt(clafer.getName() + "#" + i + "@Weight", 0, scope - 1); } if (getScope(clafer.getParent()) > 1) { symmetries.add(new LexChainChannel(childIndices, weight)); for (int i = 0; i < siblingSet.length; i++) { symmetries.add(new FilterString(siblingSet[i], weight, index[i])); } } if (getCard(clafer).getHigh() > 1) { for (int i = 0; i < parents.length - 1; i++) { if (ref != null && analysis.isBreakableRef(ref) && ref.isUnique()) { assert childIndices[i + 1].length == childIndices[i].length; if (breakableRefIds[i]) { if (refPartitions == null) { refPartitions = new DisjointSets<>(); } refPartitions.union(i, i + 1); } // Refs are unique and part of the weight. It is impossible for // two weights to be the same. Enforce a strict order. module.addConstraint(implies(and(members[i], equal(parents[i], parents[i + 1])), sortStrict(childIndices[i + 1], childIndices[i]))); } else { module.addConstraint(implies(equal(parents[i], parents[i + 1]), sort(childIndices[i + 1], childIndices[i]))); } } } } if (ref != null) { AstClafer tar = ref.getTargetType(); if (tar instanceof AstStringClafer) { IrStringVar[] strings = Arrays.copyOfRange(refStrings.get(ref), refOffset, refOffset + getScope(clafer)); if (ref.isUnique()) { if (getCard(clafer).getHigh() > 1) { for (int i = 0; i < strings.length - 1; i++) { for (int j = i + 1; j < strings.length; j++) { if (refPartitions == null || !refPartitions.connected(i, j)) { module.addConstraint( implies(and(members[i], members[j], equal(parents[i], parents[j])), notEqual(strings[i], strings[j]))); } } } } } assert strings.length == members.length; for (int i = 0; i < members.length; i++) { module.addConstraint(implies(not(members[i]), equal(strings[i], EmptyString))); } } else { IrIntVar[] refs = Arrays.copyOfRange(refPointers.get(ref), refOffset, refOffset + getScope(clafer)); if (ref.isUnique()) { if (getCard(clafer).getHigh() > 1) { for (int i = 0; i < refs.length - 1; i++) { for (int j = i + 1; j < refs.length; j++) { if (refPartitions == null || !refPartitions.connected(i, j)) { module.addConstraint( implies(and(members[i], equal(parents[i], parents[j])), notEqual(refs[i], refs[j]))); } } } } IrIntExpr size = ref.getTargetType() instanceof AstIntClafer ? constant(analysis.getScope().getIntHigh() - analysis.getScope().getIntLow() + 1) : card(sets.get(ref.getTargetType())); for (IrSetExpr sibling : siblingSet) { module.addConstraint(lessThanEqual(card(sibling), size)); } } assert refs.length == members.length; for (int i = 0; i < members.length; i++) { // The ref pointers must point to the special uninitialized value // if the Clafer owning the ref pointers does not exists. module.addConstraint(ifOnlyIf(not(members[i]), equal(refs[i], getUninitalizedRef(tar)))); } if (!ref.getTargetType().isPrimitive()) { IrSetVar targetSet = sets.get(ref.getTargetType()); for (int i = 0; i < refs.length; i++) { // The ref pointers must point to a target that exists. module.addConstraint(ifOnlyIf(members[i], member(refs[i], targetSet))); } } } } switch (getFormat(clafer)) { case LowGroup: constrainLowGroupConcrete(clafer); break; case ParentGroup: constrainParentGroupConcrete(clafer); break; default: throw new AstException(); } } private void constrainGroupCardinality(AstClafer clafer) { Card groupCard = clafer.getGroupCard(); List<AstConcreteClafer> children = clafer.getChildren(); if (groupCard.isBounded()) { IrBoolExpr[] members = memberships.get(clafer); IrSetVar[][] childrenSets = new IrSetVar[children.size()][]; for (int i = 0; i < childrenSets.length; i++) { AstConcreteClafer child = children.get(i); childrenSets[i] = siblingSets.get(child); } int scope = getScope(clafer); for (int i = 0; i < scope; i++) { IrIntExpr[] cards = new IrIntExpr[childrenSets.length]; for (int j = 0; j < cards.length; j++) { cards[j] = card(childrenSets[j][i]); } module.addConstraint(implies(members[i], constrainCard(add(cards), groupCard))); } } } private void initLowGroupConcrete(AstConcreteClafer clafer) { PartialSolution partialSolution = getPartialSolution(clafer); IrSetVar[] childSet = buildChildSet(clafer); siblingSets.put(clafer, childSet); IrBoolExpr[] members = new IrBoolExpr[getScope(clafer)]; for (int i = 0; i < members.length; i++) { if (partialSolution.hasClafer(i)) { members[i] = True; } else { members[i] = bool(clafer.getName() + "@Membership#" + i); if (childSet.length == 1 && members.length == 1) { module.addConstraint(equal(members[i], card(childSet[0]))); } } } Check.noNulls(members); memberships.put(clafer, members); } private void constrainLowGroupConcrete(AstConcreteClafer clafer) { IrBoolExpr[] members = memberships.get(clafer); IrSetVar set = sets.get(clafer); IrBoolExpr[] parentMembership = memberships.get(clafer.getParent()); Card card = getCard(clafer); IrSetVar[] childSet = siblingSets.get(clafer); if (fullSymmetryBreaking) { module.addConstraint(selectN(members, card(set))); module.addConstraint(sort(childSet)); } for (int i = 0; i < parentMembership.length; i++) { IrBoolExpr parentMember = parentMembership[i]; if (card.isBounded()) { // Enforce cardinality. module.addConstraint(implies(parentMember, constrainCard(card(childSet[i]), card))); } module.addConstraint(implies(not(parentMember), equal(childSet[i], EmptySet))); } if (!(childSet.length == 1 && members.length == 1)) { module.addConstraint(boolChannel(members, set)); } /** * What is this optimization? * * Force the lower number atoms to choose lower number parents. For * example consider the following Clafer model: * * <pre> * Person 2 * Hand 2 * </pre> * * The constraint forbids the case where Hand0 belongs to Person1 and * Hand1 belongs to Person0. Otherwise, the children can swap around * creating many isomorphic solutions. */ if (fullSymmetryBreaking) { module.addConstraint(sort(parentPointers.get(clafer))); } } private void initParentGroupConcrete(AstConcreteClafer clafer) { PartialSolution partialParentSolution = getPartialParentSolution(clafer); IrSetVar[] children = new IrSetVar[partialParentSolution.size()]; assert getCard(clafer).getLow() == getCard(clafer).getHigh(); int lowCard = getCard(clafer).getLow(); for (int i = 0; i < children.length; i++) { if (partialParentSolution.hasClafer(i)) { children[i] = constant(Util.fromTo(i * lowCard, i * lowCard + lowCard)); } else { children[i] = set(clafer.getName() + "#" + i, Util.fromTo(i * lowCard, i * lowCard + lowCard)); } } siblingSets.put(clafer, children); IrBoolExpr[] members = new IrBoolExpr[getScope(clafer)]; IrBoolExpr[] parentMembership = memberships.get(clafer.getParent()); if (lowCard == 1) { if (members.length == parentMembership.length) { members = parentMembership; } else { System.arraycopy(parentMembership, 0, members, 0, parentMembership.length); Arrays.fill(members, parentMembership.length, members.length, False); } } else { for (int i = 0; i < parentMembership.length; i++) { for (int j = 0; j < lowCard; j++) { members[i * lowCard + j] = parentMembership[i]; } } Arrays.fill(members, parentMembership.length * lowCard, members.length, False); } Check.noNulls(members); memberships.put(clafer, members); } private void constrainParentGroupConcrete(AstConcreteClafer clafer) { PartialSolution partialParentSolution = getPartialParentSolution(clafer); IrSetVar[] children = siblingSets.get(clafer); assert getCard(clafer).getLow() == getCard(clafer).getHigh(); int lowCard = getCard(clafer).getLow(); for (int i = 0; i < children.length; i++) { if (!partialParentSolution.hasClafer(i)) { if (lowCard == 1) { module.addConstraint(equal(memberships.get(clafer.getParent())[i], card(children[i]))); } module.addConstraint(implies(memberships.get(clafer.getParent())[i], equal(children[i], constant(Util.fromTo(i * lowCard, i * lowCard + lowCard))))); module.addConstraint(implies(not(memberships.get(clafer.getParent())[i]), equal(children[i], EmptySet))); } } } private void initAbstract(AstAbstractClafer clafer) { IrSetVar[] subSets = new IrSetVar[clafer.getSubs().size()]; IrBoolExpr[] members = new IrBoolExpr[getScope(clafer)]; for (int i = 0; i < subSets.length; i++) { AstClafer sub = clafer.getSubs().get(i); subSets[i] = sets.get(sub); IrBoolExpr[] subMembers = memberships.get(sub); int offset = getOffset(clafer, sub); for (int j = 0; j < subMembers.length; j++) { assert members[offset + j] == null; members[offset + j] = Check.notNull(subMembers[j]); } } if (subSets.length == 1) { sets.put(clafer, sets.get(clafer.getSubs().get(0))); } else { TIntArrayList env = new TIntArrayList(); TIntArrayList ker = new TIntArrayList(); for (int i = 0; i < members.length; i++) { if (IrUtil.isTrue(members[i])) { ker.add(i); } if (!IrUtil.isFalse(members[i])) { env.add(i); } } IrSetVar unionSet = set(clafer.getName(), env.toArray(), ker.toArray()); if (!AstUtil.isTypeRoot(clafer)) { module.addConstraint(boolChannel(members, unionSet)); } sets.put(clafer, unionSet); } Check.noNulls(members); memberships.put(clafer, members); buildRef(clafer); } private void constrainAbstract(AstAbstractClafer clafer) { // Do nothing. } private final Map<AstClafer, IrSetVar> sets = new HashMap<>(); private final Map<AstClafer, IrSetVar[]> siblingSets = new HashMap<>(); private final Map<AstClafer, IrBoolExpr[]> memberships = new HashMap<>(); private final Map<AstConcreteClafer, IrIntVar[]> parentPointers = new HashMap<>(); private final Map<AstRef, IrIntVar[]> refPointers = new HashMap<>(); private final Map<AstRef, IrStringVar[]> refStrings = new HashMap<>(); private final Map<AstClafer, IrIntExpr[][]> indices = new HashMap<>(); private int countCount = 0; private int localCount = 0; private class ExpressionCompiler implements AstExprVisitor<Void, IrExpr> { private final int thisId; private final Map<AstLocal, IrIntExpr> locals = new HashMap<>(); private ExpressionCompiler(int thisId) { this.thisId = thisId; } private IrExpr compile(AstExpr expr) { return expr.accept(this, null); } private IrExpr[] compile(AstExpr[] exprs) { IrExpr[] compiled = new IrExpr[exprs.length]; for (int i = 0; i < compiled.length; i++) { compiled[i] = compile(exprs[i]); } return compiled; } private IrBoolExpr compile(AstBoolExpr expr) { return (IrBoolExpr) compile((AstExpr) expr); } private IrBoolExpr[] compile(AstBoolExpr[] exprs) { IrBoolExpr[] compiled = new IrBoolExpr[exprs.length]; for (int i = 0; i < compiled.length; i++) { compiled[i] = compile(exprs[i]); } return compiled; } private IrIntExpr asInt(IrExpr expr) { if (expr instanceof IrIntExpr) { return (IrIntExpr) expr; } if (expr instanceof IrSetExpr) { return sum((IrSetExpr) expr); } // Bug. throw new AstException("Should not have passed type checking."); } private IrIntExpr[] asInts(IrExpr[] exprs) { IrIntExpr[] ints = new IrIntExpr[exprs.length]; for (int i = 0; i < ints.length; i++) { ints[i] = asInt(exprs[i]); } return ints; } private IrSetExpr asSet(IrExpr expr) { if (expr instanceof IrIntExpr) { return singleton((IrIntExpr) expr); } if (expr instanceof IrSetExpr) { return (IrSetExpr) expr; } // Bug. throw new AstException("Should not have passed type checking."); } private IrSetExpr[] asSets(IrExpr[] exprs) { IrSetExpr[] sets = new IrSetExpr[exprs.length]; for (int i = 0; i < sets.length; i++) { sets[i] = asSet(exprs[i]); } return sets; } private IrStringExpr asString(IrExpr expr) { if (expr instanceof IrStringExpr) { return ((IrStringExpr) expr); } // Bug. throw new AstException("Should not have passed type checking."); } private IrStringExpr[] asString(IrExpr[] exprs) { IrStringExpr[] strings = new IrStringExpr[exprs.length]; for (int i = 0; i < strings.length; i++) { strings[i] = asString(exprs[i]); } return strings; } @Override public IrExpr visit(AstThis ast, Void a) { return constant(thisId); } @Override public IrExpr visit(AstGlobal ast, Void a) { IrSetVar global = sets.get(ast.getType()); if (global.getEnv().size() == 1) { int[] constant = IrUtil.getConstant(global); if (constant != null) { return constant(constant[0]); } } return global; } @Override public IrExpr visit(AstConstant ast, Void a) { int[] value = ast.getValue(); if (value.length == 1) { return constant(value[0]); } return constant(value); } @Override public IrExpr visit(AstStringConstant ast, Void a) { return constant(ast.getValue()); } @Override public IrExpr visit(AstJoin ast, Void a) { return doJoin(compile(ast.getLeft()), ast.getRight()); } private IrExpr doJoin(IrExpr left, AstConcreteClafer right) { if (left instanceof IrIntExpr) { IrIntExpr $intLeft = (IrIntExpr) left; if (Format.ParentGroup.equals(getFormat(right)) && getCard(right).getLow() == 1) { assert getCard(right).isExact(); return $intLeft; } // Why empty set? The "take" var can contain unused. return joinRelation(singleton($intLeft), Util.snoc(siblingSets.get(right), EmptySet), true); } else if (left instanceof IrSetExpr) { IrSetExpr $setLeft = (IrSetExpr) left; // Why empty set? The "take" var can contain unused. return joinRelation($setLeft, Util.snoc(siblingSets.get(right), EmptySet), true); } throw new AstException(); } @Override public IrExpr visit(AstJoinParent ast, Void a) { AstConcreteClafer childrenType = (AstConcreteClafer) getCommonSupertype(ast.getChildren()); IrExpr children = compile(ast.getChildren()); if (children instanceof IrIntExpr) { IrIntExpr intChildren = (IrIntExpr) children; switch (getFormat(childrenType)) { case ParentGroup: assert getCard(childrenType).isExact(); int lowCard = getCard(childrenType).getLow(); return div(intChildren, constant(lowCard)); case LowGroup: return element(parentPointers.get(childrenType), intChildren); } } else if (children instanceof IrSetExpr) { IrSetExpr setChildren = (IrSetExpr) children; return joinFunction(setChildren, parentPointers.get(childrenType), null); } throw new AstException(); } @Override public IrExpr visit(AstJoinRef ast, Void a) { AstSetExpr deref = ast.getDeref(); AstClafer derefType = getCommonSupertype(deref); Integer globalCardinality = null; IrExpr $deref; if (derefType.getRef().isUnique()) { if (deref instanceof AstJoin) { AstJoin join = (AstJoin) deref; IrExpr left = compile(join.getLeft()); $deref = doJoin(left, join.getRight()); globalCardinality = left instanceof IrSetExpr ? ((IrSetExpr) left).getCard().getHighBound() : 1; } else { $deref = compile(deref); if (derefType instanceof AstConcreteClafer) { globalCardinality = getScope(((AstConcreteClafer) derefType).getParent()); } } } else { $deref = compile(deref); } AstRef ref = derefType.getRef(); if ($deref instanceof IrIntExpr) { IrIntExpr $intDeref = (IrIntExpr) $deref; if (ref.getTargetType() instanceof AstStringClafer) { // Why empty string? The "take" var can contain unused. return element(Util.snoc(refStrings.get(ref), EmptyString), $intDeref); } else { // Why zero? The "take" var can contain unused. return element(Util.snoc(refPointers.get(ref), Zero), $intDeref); } } else if ($deref instanceof IrSetExpr) { IrSetExpr $setDeref = (IrSetExpr) $deref; if (ref.getTargetType() instanceof AstStringClafer) { throw new JoinSetWithStringException(ast, $setDeref.getCard()); } // Why zero? The "take" var can contain unused. return joinFunction($setDeref, Util.snoc(refPointers.get(ref), Zero), globalCardinality); } throw new AstException(); } @Override public IrExpr visit(AstCard ast, Void a) { IrExpr set = compile(ast.getSet()); if (set instanceof IrIntExpr) { return One; } return card((IrSetExpr) set); } @Override public IrExpr visit(AstNot ast, Void a) { return not(compile(ast.getExpr())); } @Override public IrExpr visit(AstMinus ast, Void a) { return minus(asInt(compile(ast.getExpr()))); } @Override public IrExpr visit(AstSetTest ast, Void a) { IrExpr left = compile(ast.getLeft()); IrExpr right = compile(ast.getRight()); if (left instanceof IrIntExpr && right instanceof IrIntExpr) { IrIntExpr intLeft = (IrIntExpr) left; IrIntExpr intRight = (IrIntExpr) right; switch (ast.getOp()) { case Equal: return equal(intLeft, intRight); case NotEqual: return notEqual(intLeft, intRight); } } if (left instanceof IrStringExpr && right instanceof IrStringExpr) { IrStringExpr stringLeft = (IrStringExpr) left; IrStringExpr stringRight = (IrStringExpr) right; switch (ast.getOp()) { case Equal: return equal(stringLeft, stringRight); case NotEqual: return notEqual(stringLeft, stringRight); } } switch (ast.getOp()) { case Equal: return equal(asSet(left), asSet(right)); case NotEqual: return notEqual(asSet(left), asSet(right)); default: throw new AstException(); } } @Override public IrExpr visit(AstCompare ast, Void a) { IrIntExpr left = asInt(compile(ast.getLeft())); IrIntExpr right = asInt(compile(ast.getRight())); switch (ast.getOp()) { case LessThan: return lessThan(left, right); case LessThanEqual: return lessThanEqual(left, right); case GreaterThan: return greaterThan(left, right); case GreaterThanEqual: return greaterThanEqual(left, right); default: throw new AstException(); } } @Override public IrExpr visit(AstArithm ast, Void a) { IrIntExpr[] operands = asInts(compile(ast.getOperands())); switch (ast.getOp()) { case Add: return add(operands); case Sub: return sub(operands); case Mul: IrIntExpr product = operands[0]; for (int i = 1; i < operands.length; i++) { product = mul(product, operands[i]); } return product; case Div: IrIntExpr quotient = operands[0]; for (int i = 1; i < operands.length; i++) { quotient = div(quotient, operands[i]); } return quotient; default: throw new AstException(); } } @Override public IrExpr visit(AstSum ast, Void a) { AstSetExpr set = ast.getSet(); AstClafer setType = getCommonSupertype(set); assert setType.hasRef(); IrIntVar[] refs = refPointers.get(setType.getRef()); IrBoolExpr[] members; if (set instanceof AstGlobal) { members = memberships.get(setType); } else { IrExpr $set = compile(set); if ($set instanceof IrIntExpr) { IrIntExpr intSet = (IrIntExpr) $set; return element(refs, intSet); } IrSetExpr setSet = (IrSetExpr) $set; if (setSet.getEnv().isEmpty()) { return Zero; } assert setSet.getEnv().getLowBound() >= 0; members = new IrBoolExpr[setSet.getEnv().getHighBound() + 1]; for (int i = 0; i < members.length; i++) { members[i] = bool("SumMember@" + i); } module.addConstraint(boolChannel(members, setSet)); } assert members.length <= refs.length; IrIntVar[] score = new IrIntVar[members.length]; for (int i = 0; i < members.length; i++) { IrDomain domain = refs[i].getDomain(); int uninitializedRef = getUninitalizedRef(setType.getRef().getTargetType()); // Score's use 0 as the uninitialized value. domain = IrUtil.add(IrUtil.remove(domain, uninitializedRef), 0); score[i] = domainInt("Score@" + i, domain); module.addConstraint(ifThenElse(members[i], equal(score[i], refs[i]), equal(score[i], 0))); } return add(score); } @Override public IrExpr visit(AstBoolArithm ast, Void a) { IrBoolExpr[] operands = compile(ast.getOperands()); switch (ast.getOp()) { case And: return and(operands); case IfOnlyIf: IrBoolExpr ifOnlyIf = operands[0]; for (int i = 1; i < operands.length; i++) { ifOnlyIf = ifOnlyIf(ifOnlyIf, operands[i]); } return ifOnlyIf; case Implies: IrBoolExpr implies = operands[0]; for (int i = 1; i < operands.length; i++) { implies = implies(implies, operands[i]); } return implies; case Or: return or(operands); case Xor: IrBoolExpr xor = operands[0]; for (int i = 1; i < operands.length; i++) { xor = xor(xor, operands[i]); } return xor; default: throw new AstException(); } } @Override public IrExpr visit(AstDifference ast, Void a) { return difference( asSet(compile(ast.getLeft())), asSet(compile(ast.getRight()))); } @Override public IrExpr visit(AstIntersection ast, Void a) { return intersection( asSet(compile(ast.getLeft())), asSet(compile(ast.getRight()))); } @Override public IrExpr visit(AstUnion ast, Void a) { return union( asSet(compile(ast.getLeft())), asSet(compile(ast.getRight()))); } @Override public IrExpr visit(AstMembership ast, Void a) { IrExpr member = compile(ast.getMember()); IrExpr set = compile(ast.getSet()); if (member instanceof IrIntExpr && set instanceof IrIntExpr) { return AstMembership.Op.In.equals(ast.getOp()) ? equal((IrIntExpr) member, (IrIntExpr) set) : notEqual((IrIntExpr) member, (IrIntExpr) set); } if (member instanceof IrIntExpr && set instanceof IrSetExpr) { return AstMembership.Op.In.equals(ast.getOp()) ? member((IrIntExpr) member, (IrSetExpr) set) : notMember((IrIntExpr) member, (IrSetExpr) set); } if (member instanceof IrSetExpr && set instanceof IrIntExpr) { return AstMembership.Op.In.equals(ast.getOp()) ? equal((IrSetExpr) member, singleton((IrIntExpr) set)) : notEqual((IrSetExpr) member, singleton((IrIntExpr) set)); } return AstMembership.Op.In.equals(ast.getOp()) ? subsetEq(asSet(member), asSet(set)) : not(subsetEq(asSet(member), asSet(set))); } @Override public IrExpr visit(AstTernary ast, Void a) { IrBoolExpr antecedent = compile(ast.getAntecedent()); IrExpr consequent = compile(ast.getConsequent()); IrExpr alternative = compile(ast.getAlternative()); if (consequent instanceof IrIntExpr && alternative instanceof IrIntExpr) { return ternary(antecedent, (IrIntExpr) consequent, (IrIntExpr) alternative); } return ternary(antecedent, asSet(consequent), asSet(alternative)); } @Override public IrExpr visit(AstIfThenElse ast, Void a) { return ifThenElse(compile(ast.getAntecedent()), compile(ast.getConsequent()), compile(ast.getAlternative())); } @Override public IrExpr visit(AstDowncast ast, Void a) { AstSetExpr base = ast.getBase(); int offset = getOffset((AstAbstractClafer) getCommonSupertype(base), ast.getTarget()); IrExpr $base = compile(ast.getBase()); if ($base instanceof IrIntExpr) { IrIntExpr intBase = (IrIntExpr) $base; return sub(intBase, constant(offset)); } return mask((IrSetExpr) $base, offset, offset + getScope(ast.getTarget())); } @Override public IrExpr visit(AstUpcast ast, Void a) { AstSetExpr base = ast.getBase(); int offset = getOffset(ast.getTarget(), getCommonSupertype(base)); IrExpr $base = compile(ast.getBase()); if ($base instanceof IrIntExpr) { IrIntExpr intBase = (IrIntExpr) $base; return add(intBase, constant(offset)); } return offset((IrSetExpr) $base, offset); } @Override public IrExpr visit(AstLocal ast, Void a) { return locals.get(ast); } private Triple<AstLocal, IrIntExpr, IrBoolExpr>[][] compileDecl(AstDecl decl) { IrExpr body = compile(decl.getBody()); if (body instanceof IrIntExpr) { IrIntExpr intBody = (IrIntExpr) body; @SuppressWarnings("unchecked") Triple<AstLocal, IrIntExpr, IrBoolExpr>[] labeledPermutation = new Triple[decl.getLocals().length]; for (int i = 0; i < labeledPermutation.length; i++) { labeledPermutation[i] = new Triple<AstLocal, IrIntExpr, IrBoolExpr>( decl.getLocals()[i], intBody, True); } @SuppressWarnings("unchecked") Triple<AstLocal, IrIntExpr, IrBoolExpr>[][] labeledSequence = new Triple[][]{labeledPermutation}; return labeledSequence; } if (body instanceof IrSetExpr) { IrSetExpr setBody = (IrSetExpr) body; IrDomain env = setBody.getEnv(); IrDomain ker = setBody.getKer(); // TODO: need a different strategy otherwise assert env.getLowBound() >= 0; @SuppressWarnings("unchecked") Pair<IrIntExpr, IrBoolExpr>[] members = new Pair[env.getHighBound() + 1]; for (int i = 0; i < env.getLowBound(); i++) { members[i] = new Pair<IrIntExpr, IrBoolExpr>(constant(i), False); } for (int i = env.getLowBound(); i <= env.getHighBound(); i++) { members[i] = new Pair<IrIntExpr, IrBoolExpr>(constant(i), ker.contains(i) ? True : bool(Util.intercalate("/", AstUtil.getNames(decl.getLocals())) + "#" + i + "#" + localCount++)); } module.addConstraint(boolChannel(Pair.mapSnd(members), setBody)); Pair<IrIntExpr, IrBoolExpr>[][] sequence = decl.isDisjoint() ? Util.permutations(members, decl.getLocals().length) : Util.sequence(members, decl.getLocals().length); @SuppressWarnings("unchecked") Triple<AstLocal, IrIntExpr, IrBoolExpr>[][] labeledSequence = new Triple[sequence.length][]; for (int i = 0; i < labeledSequence.length; i++) { Pair<IrIntExpr, IrBoolExpr>[] permutation = sequence[i]; @SuppressWarnings("unchecked") Triple<AstLocal, IrIntExpr, IrBoolExpr>[] labeledPermutation = new Triple[permutation.length]; for (int j = 0; j < labeledPermutation.length; j++) { labeledPermutation[j] = new Triple<>( decl.getLocals()[j], permutation[j]); } labeledSequence[i] = labeledPermutation; } return labeledSequence; } throw new AstException(); } // TODO optimize SOME @Override public IrExpr visit(AstQuantify ast, Void a) { AstDecl decls[] = ast.getDecls(); @SuppressWarnings("unchecked") Triple<AstLocal, IrIntExpr, IrBoolExpr>[][][] compiledDecls = new Triple[decls.length][][]; for (int i = 0; i < compiledDecls.length; i++) { compiledDecls[i] = compileDecl(decls[i]); } compiledDecls = Util.sequence(compiledDecls); List<IrBoolExpr> compiled = new ArrayList<>(); for (Triple<AstLocal, IrIntExpr, IrBoolExpr>[][] quants : compiledDecls) { List<IrBoolExpr> constraints = new ArrayList<>(); for (Triple<AstLocal, IrIntExpr, IrBoolExpr>[] quantDecls : quants) { for (Triple<AstLocal, IrIntExpr, IrBoolExpr> quantLocals : quantDecls) { constraints.add(quantLocals.getThd()); locals.put(quantLocals.getFst(), quantLocals.getSnd()); } } IrBoolExpr compiledBody = compile(ast.getBody()); if (Quantifier.All.equals(ast.getQuantifier())) { compiled.add(implies(and(constraints), compiledBody)); } else { constraints.add(compiledBody); compiled.add(and(constraints)); } } switch (ast.getQuantifier()) { case All: return and(compiled); case Lone: return lone(compiled); case None: return not(or(compiled)); case One: return one(compiled); case Some: return or(compiled); default: throw new AstException(); } } @Override public IrExpr visit(AstLength ast, Void a) { return length(asString(compile(ast.getString()))); } @Override public IrExpr visit(AstConcat ast, Void a) { return concat(asString(compile(ast.getLeft())), asString(compile(ast.getRight()))); } @Override public IrExpr visit(AstPrefix ast, Void a) { return prefix(asString(compile(ast.getPrefix())), asString(compile(ast.getWord()))); } @Override public IrExpr visit(AstSuffix ast, Void a) { return suffix(asString(compile(ast.getSuffix())), asString(compile(ast.getWord()))); } }; /* ****************** * Build functions. ****************** */ /** * Build the child set for the Clafer. * * @param clafer the Clafer * @return the variables to represent the child relation */ private IrSetVar[] buildChildSet(AstConcreteClafer clafer) { assert Format.LowGroup.equals(getFormat(clafer)); int parentScope = getScope(clafer.getParent()); PartialSolution partialParentSolution = getPartialSolution(clafer.getParent()); int claferScope = getScope(clafer); Card card = getCard(clafer); assert card.hasHigh(); int low = 0; int high = card.getHigh(); int max = claferScope - 1; IrSetVar[] skip = new IrSetVar[parentScope]; for (int i = 0; i < skip.length; i++) { if (low <= max) { IrDomain env = boundDomain(low, Math.min(high - 1, max)); IrDomain ker = EmptyDomain; int cardLow = 0; int cardHigh = card.getHigh(); if (partialParentSolution.hasClafer(i)) { int prevHigh = high - card.getHigh(); int nextLow = low + card.getLow(); if (nextLow > prevHigh) { ker = boundDomain(prevHigh, Math.min(nextLow - 1, max)); } cardLow = card.getLow(); } cardLow = Math.max(cardLow, ker.size()); cardHigh = Math.min(cardHigh, env.size()); skip[i] = set(clafer.getName() + "#" + i, env, ker, boundDomain(cardLow, cardHigh)); } else { skip[i] = EmptySet; } if (partialParentSolution.hasClafer(i)) { low += card.getLow(); } high += card.getHigh(); } return skip; } /** * Create the parent pointers for the Clafer. * * @param clafer the Clafer * @return the variables to represent the parent relation */ private IrIntVar[] buildParentPointers(AstConcreteClafer clafer) { PartialSolution solution = getPartialSolution(clafer); boolean known = solution.parentSolutionKnown(); IrIntVar[] pointers = new IrIntVar[solution.size()]; for (int i = 0; i < pointers.length; i++) { int[] possibleParents = solution.getPossibleParents(i); pointers[i] = enumInt(clafer.getName() + "@Parent#" + i, solution.hasClafer(i) || known ? possibleParents : Util.snoc(possibleParents, getScope(clafer.getParent()))); } return pointers; } private void buildRef(AstClafer clafer) { if (clafer.hasRef()) { AstRef ref = clafer.getRef(); if (ref.getTargetType() instanceof AstStringClafer) { refStrings.put(ref, buildStrings(ref)); } else { refPointers.put(ref, buildRefPointers(ref)); } } } /** * Create the references pointers for the Clafer. * * @param ref the reference Clafer * @return the variables to represent the reference relation */ private IrIntVar[] buildRefPointers(AstRef ref) { AstClafer src = ref.getSourceType(); AstClafer tar = ref.getTargetType(); assert !(tar instanceof AstStringClafer); PartialSolution partialSolution = getPartialSolution(src); IrDomain[] partialInts = getPartialInts(ref); IrIntVar[] ivs = new IrIntVar[getScope(src)]; for (int i = 0; i < ivs.length; i++) { if (partialSolution.hasClafer(i)) { ivs[i] = domainInt(src.getName() + "@Ref" + i, partialInts[i]); } else { ivs[i] = domainInt(src.getName() + "@Ref" + i, IrUtil.add(partialInts[i], getUninitalizedRef(tar))); } } return ivs; } private IrStringVar[] buildStrings(AstRef ref) { AstClafer src = ref.getSourceType(); AstClafer tar = ref.getTargetType(); assert tar instanceof AstStringClafer; int stringLength = analysis.getScope().getStringLength(); char charLow = analysis.getScope().getCharLow(); char charHigh = analysis.getScope().getCharHigh(); IrDomain charDomain = IrUtil.add(boundDomain(charLow, charHigh), 0); IrStringVar[] svs = new IrStringVar[getScope(src)]; for (int i = 0; i < svs.length; i++) { IrIntVar[] chars = new IrIntVar[stringLength]; for (int j = 0; j < chars.length; j++) { chars[j] = domainInt(src.getName() + "@String" + i + "[" + j + "]", charDomain); } svs[i] = string(src.getName(), chars, boundInt(src.getName() + "@Length" + i, 0, stringLength)); } return svs; } /** * Enforce the size of a set to be within the cardinality. * * @param setCard the set to constrain * @param card the cardinality * @return card.low &le; |setCard| &le; card.high */ private IrBoolExpr constrainCard(IrIntExpr setCard, Card card) { if (card.isExact()) { return equal(setCard, card.getLow()); } if (card.hasLow() && card.hasHigh()) { return within(setCard, boundDomain(card.getLow(), card.getHigh())); } if (card.hasLow()) { return greaterThanEqual(setCard, card.getLow()); } if (card.hasHigh()) { return lessThanEqual(setCard, card.getHigh()); } return True; } /* ************************ * Convenience functions. ************************ */ private int getUninitalizedRef(AstClafer clafer) { return clafer instanceof AstIntClafer ? analysis.getScope().getIntHigh() + 1 : getScope(clafer); } private int getScope(AstClafer clafer) { return analysis.getScope().getScope(clafer); } private Format getFormat(AstClafer clafer) { return analysis.getFormat(clafer); } private PartialSolution getPartialSolution(AstClafer clafer) { return analysis.getPartialSolution(clafer); } private PartialSolution getPartialParentSolution(AstConcreteClafer clafer) { return getPartialSolution(clafer.getParent()); } private IrDomain[] getPartialInts(AstRef ref) { return analysis.getPartialInts(ref); } private int getOffset(AstAbstractClafer sup, AstClafer sub) { int offset = 0; for (AstClafer cur = sub; !sup.equals(cur); cur = cur.getSuperClafer()) { if (!cur.hasSuperClafer()) { throw new AstException(sub + " is not a sub clafer of " + sup); } offset += analysis.getOffsets(cur.getSuperClafer()).getOffset(cur); } return offset; } private Card getCard(AstConcreteClafer clafer) { return analysis.getCard(clafer); } private Card getGlobalCard(AstClafer clafer) { return analysis.getGlobalCard(clafer); } private Type getType(AstExpr expr) { return analysis.getType(expr); } private AstClafer getCommonSupertype(AstExpr expr) { return analysis.getCommonSupertype(expr); } private static interface Symmetry { IrExpr[] getInput(); IrExpr[] getOutput(); IrBoolExpr getConstraint(); } private static class FilterString implements Symmetry { private final IrSetExpr set; private final IrIntExpr[] string; private final IrIntExpr[] result; FilterString(IrSetExpr set, IrIntExpr[] string, IrIntExpr[] result) { this.set = set; this.string = string; this.result = result; } @Override public IrExpr[] getInput() { IrExpr[] input = new IrExpr[string.length + 1]; input[0] = set; System.arraycopy(string, 0, input, 1, string.length); return input; } @Override public IrExpr[] getOutput() { return result; } @Override public IrBoolExpr getConstraint() { return filterString(set, string, result); } } private static class LexChainChannel implements Symmetry { private final IrIntExpr[][] strings; private final IrIntExpr[] ints; LexChainChannel(IrIntExpr[][] strings, IrIntExpr[] ints) { this.strings = strings; this.ints = ints; } @Override public IrExpr[] getInput() { return Util.concat(strings); } @Override public IrExpr[] getOutput() { return ints; } @Override public IrBoolExpr getConstraint() { return sortChannel(strings, ints); } } }
Fixed intermediate variables for the sum operator not being unique.
src/main/java/org/clafer/ast/compiler/AstCompiler.java
Fixed intermediate variables for the sum operator not being unique.
Java
mit
7d68af23d9aad22a713583c3aff043c13626a8b3
0
hoholidayx/Pedometer,hoholidayx/Pedometer,hoholidayx/Pedometer
package com.hzp.pedometer.fragment; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.github.mikephil.charting.charts.CombinedChart; import com.github.mikephil.charting.components.XAxis; import com.github.mikephil.charting.components.YAxis; import com.github.mikephil.charting.data.BarData; import com.github.mikephil.charting.data.BarDataSet; import com.github.mikephil.charting.data.BarEntry; import com.github.mikephil.charting.data.CombinedData; import com.github.mikephil.charting.data.Entry; import com.github.mikephil.charting.data.LineData; import com.github.mikephil.charting.data.LineDataSet; import com.hzp.pedometer.R; import com.hzp.pedometer.entity.DailyData; import com.hzp.pedometer.persistance.db.DailyDataManager; import java.util.ArrayList; import java.util.Calendar; public class StatisticsFragment extends Fragment { private int recentDays = 7; private CombinedChart combinedChart; public StatisticsFragment() { // Required empty public constructor } public static StatisticsFragment newInstance() { StatisticsFragment fragment = new StatisticsFragment(); Bundle args = new Bundle(); fragment.setArguments(args); return fragment; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (getArguments() != null) { } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_statistics, container, false); combinedChart = (CombinedChart) view.findViewById(R.id.statistics_weekly_combined_chart); setupWeeklyBarChart(combinedChart); return view; } private void setupWeeklyBarChart(CombinedChart barChart) { barChart.setTouchEnabled(false); barChart.setScaleEnabled(false); barChart.setDoubleTapToZoomEnabled(false); barChart.setDragEnabled(true); barChart.setDescription(""); combinedChart.setDescriptionPosition(0, 0); YAxis rightAxis = combinedChart.getAxisRight(); rightAxis.setDrawGridLines(false); rightAxis.setAxisMinValue(0f); // this replaces setStartAtZero(true) YAxis leftAxis = combinedChart.getAxisLeft(); leftAxis.setDrawGridLines(false); leftAxis.setAxisMinValue(0f); // this replaces setStartAtZero(true) XAxis xAxis = combinedChart.getXAxis(); xAxis.setPosition(XAxis.XAxisPosition.BOTH_SIDED); String[] d = generateXAxisArray(); CombinedData data = new CombinedData(d); DailyData[][] dayList = getDataRecentDays(recentDays); data.setData(generateLineDataAvgPerDay(dayList)); data.setData(generateBarDataStepPerDay(dayList)); combinedChart.setData(data); combinedChart.invalidate(); } /** * 生成x轴的日期列表 */ private String[] generateXAxisArray() { Calendar ca = Calendar.getInstance(); String[] d = new String[recentDays]; ca.add(Calendar.DAY_OF_MONTH, -6); for (int i = 0; i < recentDays; i++) { d[i] = ca.get(Calendar.DAY_OF_MONTH) + "日"; ca.add(Calendar.DAY_OF_MONTH,+1); } return d; } //生成混合图表的线性数据 private LineData generateLineDataAvgPerDay(DailyData[][] dataList) { LineData d = new LineData(); ArrayList<Entry> entries = new ArrayList<Entry>(); //加载数据 for (int i = 0; i < dataList.length; i++) { int dayAvg = 0; int dayStepSum = 0; if (dataList[i] != null) { for (int j = 0; j < dataList[i].length; j++) { dayStepSum += dataList[i][j].getStepCount(); } } dayAvg = dayStepSum / (i + 1); entries.add(new BarEntry(dayAvg, i)); } LineDataSet set = new LineDataSet(entries, "近7天平均步数"); set.setLineWidth(2.5f); set.setCircleRadius(5f); set.setDrawCubic(true); set.setDrawValues(true); set.setValueTextSize(10f); set.setColor(getResources().getColor(R.color.colorGreen)); set.setCircleColor(getResources().getColor(R.color.colorGreen)); set.setAxisDependency(YAxis.AxisDependency.LEFT); d.addDataSet(set); return d; } //生成混合图表的柱状数据 private BarData generateBarDataStepPerDay(DailyData[][] dataList) { BarData d = new BarData(); ArrayList<BarEntry> entries = new ArrayList<BarEntry>(); //加载数据 for (int i = 0; i < dataList.length; i++) { int dayStepSum = 0; if (dataList[i] != null) { for (int j = 0; j < dataList[i].length; j++) { dayStepSum += dataList[i][j].getStepCount(); } entries.add(new BarEntry(dayStepSum, i)); }else{ entries.add(new BarEntry(0, i)); } } BarDataSet set = new BarDataSet(entries, "步数"); d.addDataSet(set); set.setAxisDependency(YAxis.AxisDependency.LEFT); return d; } private DailyData[][] getDataRecentDays(int days) { Calendar ca = Calendar.getInstance(); ca.add(Calendar.DAY_OF_MONTH, -days+1); DailyData[][] dataList = new DailyData[days][]; for (int i = 0; i <days; i++) { DailyData[] temp = DailyDataManager.getInstance().getDataListByDay( ca.get(Calendar.YEAR), ca.get(Calendar.MONTH), ca.get(Calendar.DAY_OF_MONTH) ); dataList[i] = temp; ca.add(Calendar.DAY_OF_MONTH, +1); } return dataList; } }
app/src/main/java/com/hzp/pedometer/fragment/StatisticsFragment.java
package com.hzp.pedometer.fragment; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.github.mikephil.charting.charts.CombinedChart; import com.github.mikephil.charting.components.XAxis; import com.github.mikephil.charting.components.YAxis; import com.github.mikephil.charting.data.BarData; import com.github.mikephil.charting.data.BarDataSet; import com.github.mikephil.charting.data.BarEntry; import com.github.mikephil.charting.data.CombinedData; import com.github.mikephil.charting.data.Entry; import com.github.mikephil.charting.data.LineData; import com.github.mikephil.charting.data.LineDataSet; import com.hzp.pedometer.R; import com.hzp.pedometer.entity.DailyData; import com.hzp.pedometer.persistance.db.DailyDataManager; import java.util.ArrayList; import java.util.Calendar; public class StatisticsFragment extends Fragment { private int recentDays = 7; private CombinedChart combinedChart; public StatisticsFragment() { // Required empty public constructor } public static StatisticsFragment newInstance() { StatisticsFragment fragment = new StatisticsFragment(); Bundle args = new Bundle(); fragment.setArguments(args); return fragment; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (getArguments() != null) { } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_statistics, container, false); combinedChart = (CombinedChart) view.findViewById(R.id.statistics_weekly_combined_chart); setupWeeklyBarChart(combinedChart); return view; } private void setupWeeklyBarChart(CombinedChart barChart) { barChart.setTouchEnabled(false); barChart.setScaleEnabled(false); barChart.setDoubleTapToZoomEnabled(false); barChart.setDragEnabled(true); barChart.setDescription(""); combinedChart.setDescriptionPosition(0, 0); YAxis rightAxis = combinedChart.getAxisRight(); rightAxis.setDrawGridLines(false); rightAxis.setAxisMinValue(0f); // this replaces setStartAtZero(true) YAxis leftAxis = combinedChart.getAxisLeft(); leftAxis.setDrawGridLines(false); leftAxis.setAxisMinValue(0f); // this replaces setStartAtZero(true) XAxis xAxis = combinedChart.getXAxis(); xAxis.setPosition(XAxis.XAxisPosition.BOTH_SIDED); String[] d = generateXAxisArray(); CombinedData data = new CombinedData(d); DailyData[][] dayList = getDataRecentDays(recentDays); data.setData(generateLineDataAvgPerDay(dayList)); data.setData(generateBarDataStepPerDay(dayList)); combinedChart.setData(data); combinedChart.invalidate(); } /** * 生成x轴的日期列表 */ private String[] generateXAxisArray() { Calendar ca = Calendar.getInstance(); String[] d = new String[recentDays]; ca.add(Calendar.DAY_OF_MONTH,-6); for (int i = 0; i < recentDays; i++) { d[i] = ca.get(Calendar.DAY_OF_MONTH) + 1 + "日"; ca.add(Calendar.DAY_OF_MONTH,+1); } return d; } //生成混合图表的线性数据 private LineData generateLineDataAvgPerDay(DailyData[][] dataList) { LineData d = new LineData(); ArrayList<Entry> entries = new ArrayList<Entry>(); //加载数据 for (int i = 0; i < dataList.length; i++) { int dayAvg = 0; int dayStepSum = 0; if (dataList[i] != null) { for (int j = 0; j < dataList[i].length; j++) { dayStepSum += dataList[i][j].getStepCount(); } dayAvg = dayStepSum / (i + 1); } entries.add(new BarEntry(dayAvg, i)); } LineDataSet set = new LineDataSet(entries, "近7天平均步数"); set.setLineWidth(2.5f); set.setCircleRadius(5f); set.setDrawCubic(true); set.setDrawValues(true); set.setValueTextSize(10f); set.setColor(getResources().getColor(R.color.colorGreen)); set.setCircleColor(getResources().getColor(R.color.colorGreen)); set.setAxisDependency(YAxis.AxisDependency.LEFT); d.addDataSet(set); return d; } //生成混合图表的柱状数据 private BarData generateBarDataStepPerDay(DailyData[][] dataList) { BarData d = new BarData(); ArrayList<BarEntry> entries = new ArrayList<BarEntry>(); //加载数据 for (int i = 0; i < dataList.length; i++) { int dayStepSum = 0; if (dataList[i] != null) { for (int j = 0; j < dataList[i].length; j++) { dayStepSum += dataList[i][j].getStepCount(); } } entries.add(new BarEntry(dayStepSum, i)); } BarDataSet set = new BarDataSet(entries, "步数"); d.addDataSet(set); set.setAxisDependency(YAxis.AxisDependency.LEFT); return d; } private DailyData[][] getDataRecentDays(int days) { Calendar ca = Calendar.getInstance(); DailyData[][] dataList = new DailyData[days][]; for (int i = 0; i < days; i++) { ca.add(Calendar.DAY_OF_WEEK, -1); DailyData[] temp = DailyDataManager.getInstance().getDataListByDay( ca.get(Calendar.YEAR), ca.get(Calendar.MONTH), ca.get(Calendar.DAY_OF_MONTH) ); dataList[i] = temp; } return dataList; } }
修复统计数据显示错误
app/src/main/java/com/hzp/pedometer/fragment/StatisticsFragment.java
修复统计数据显示错误
Java
mit
67ffebe16c0944a9ea23dc0fca9274e99397a4af
0
kyokomi/AndEngineSRPGQuest
package com.kyokomi.srpgquest.scene.part; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import org.andengine.entity.IEntity; import org.andengine.entity.modifier.ColorModifier; import org.andengine.entity.modifier.DelayModifier; import org.andengine.entity.modifier.IEntityModifier; import org.andengine.entity.modifier.LoopEntityModifier; import org.andengine.entity.modifier.MoveModifier; import org.andengine.entity.modifier.ParallelEntityModifier; import org.andengine.entity.modifier.ScaleModifier; import org.andengine.entity.modifier.SequenceEntityModifier; import org.andengine.entity.primitive.Rectangle; import org.andengine.entity.scene.Scene; import org.andengine.entity.shape.IAreaShape; import org.andengine.entity.sprite.AnimatedSprite; import org.andengine.entity.sprite.AnimatedSprite.IAnimationListener; import org.andengine.entity.sprite.Sprite; import org.andengine.entity.text.Text; import org.andengine.input.touch.TouchEvent; import org.andengine.opengl.font.Font; import org.andengine.util.color.Color; import org.andengine.util.modifier.IModifier; import org.andengine.util.modifier.ease.EaseBackInOut; import android.graphics.Typeface; import android.util.Log; import com.kyokomi.core.dto.ActorPlayerDto; import com.kyokomi.core.dto.SaveDataDto; import com.kyokomi.core.sprite.TextButton; import com.kyokomi.srpgquest.constant.BattleActorType; import com.kyokomi.srpgquest.constant.BattleMenuType; import com.kyokomi.srpgquest.constant.LayerZIndexType; import com.kyokomi.srpgquest.dto.BattleSelectDto; import com.kyokomi.srpgquest.layer.TextCutInTouchLayer; import com.kyokomi.srpgquest.logic.BattleLogic; import com.kyokomi.srpgquest.scene.SrpgBaseScene; import com.kyokomi.srpgquest.sprite.ActorBattleSprite; import com.kyokomi.srpgquest.sprite.ActorSprite; public class BattlePart extends AbstractGamePart { // ================================================== // TAG // ================================================== private static final int DAMAGE_TEXT_TAG = 1000; private static final int INFO_MESSAGE_TEXT_TAG = 1001; private static final int TARGET_CURSOR_TAG = 2000; private static final int BATTLE_MENU_TAG = 10000; private static final int BATTLE_START_CUTIN_TAG = 20000; private static final int BATTLE_END_CUTIN_TAG = 20001; private static final int ACTOR_ATTACK_EFFETC_TAG = 100000; // ================================================== private Rectangle mBaseLayer; private static int TURN_COUNT_LIMIT = 1; private int mTurnCount = 0; public enum BattleInitType { PLAYER_ATTACK(10), ENEMY_ATTACK(20) ; private Integer value; private BattleInitType(Integer value) { this.value = value; } public Integer getValue() { return value; } public static BattleInitType get(Integer value) { BattleInitType[] values = values(); for (BattleInitType type : values) { if (type.getValue() == value) { return type; } } throw new RuntimeException("find not tag type."); } } public enum BattleStateType { INIT(0), START(1), PLAYER_TURN(1000), PLAYER_TURN_TARGET_SELECT(1001), ENEMY_TURN(2000), BATTLE_START(3000), BATTLE_SELECT(4000), BATTLE_ANIMATION(5000), BATTLE_END(6000), END(9000), EXIT(9999) ; private Integer value; private BattleStateType(Integer value) { this.value = value; } public Integer getValue() { return value; } public static BattleStateType get(Integer value) { BattleStateType[] values = values(); for (BattleStateType type : values) { if (type.getValue() == value) { return type; } } throw new RuntimeException("find not tag type."); } } private BattleStateType mBattleState; public BattleStateType getBattleState() { return mBattleState; } private BattleInitType mBattleInitType; private List<ActorPlayerDto> mPlayerList; private List<ActorPlayerDto> mEnemyList; private BattleSelectDto mTempSelect; private List<BattleSelectDto> mBattleSelectList; // ================================================== // ボタンイベント // ================================================== private TextButton.OnClickListener mBattleMenuOnClickListener = new TextButton.OnClickListener() { @Override public void onClick(TextButton pTextButtonSprite, float pTouchAreaLocalX, float pTouchAreaLocalY) { Log.d("showBattleMenuLayer", "onClick"); // メニュー閉じる hideBattleMenuLayer(); if (pTextButtonSprite.getTag() == BattleMenuType.ATTACK.getValue()) { mTempSelect.setBattleMenuType(BattleMenuType.ATTACK); showInfoMessageText("攻撃対象を選択してください。"); // ターゲット選択 changeState(BattleStateType.PLAYER_TURN_TARGET_SELECT); } else if (pTextButtonSprite.getTag() == BattleMenuType.DEFENCE.getValue()) { // 行動確定 // TODO: あとで実装 // とりあえず攻撃 mTempSelect.setBattleMenuType(BattleMenuType.ATTACK); changeState(BattleStateType.PLAYER_TURN_TARGET_SELECT); } else if (pTextButtonSprite.getTag() == BattleMenuType.SKILL.getValue()) { showInfoMessageText("使用するスキルを選択してください。"); // スキルウィンドウ表示 // TODO: あとで実装 // とりあえず攻撃 mTempSelect.setBattleMenuType(BattleMenuType.ATTACK); changeState(BattleStateType.PLAYER_TURN_TARGET_SELECT); } else if (pTextButtonSprite.getTag() == BattleMenuType.ITEM.getValue()) { showInfoMessageText("使用するアイテムを選択してください。"); // アイテムウィンドウ表示 // TODO: あとで実装 // とりあえず攻撃 mTempSelect.setBattleMenuType(BattleMenuType.ATTACK); changeState(BattleStateType.PLAYER_TURN_TARGET_SELECT); } } }; // ================================================== // コンストラクタ // ================================================== public BattlePart(SrpgBaseScene pBaseScene) { super(pBaseScene); } // ================================================== // メソッド // ================================================== /** * @deprecated init(ActorPlayerDto player, ActorPlayerDto enemy)使って下さい * @param saveDataDto */ @Override public void init(SaveDataDto saveDataDto) { } public void init(ActorPlayerDto player, ActorPlayerDto enemy, BattleInitType pBattleInitType) { Log.d("BattlePart", player.toString()); Log.d("BattlePart", enemy.toString()); changeState(BattleStateType.INIT); mBattleInitType = pBattleInitType; mPlayerList = new ArrayList<ActorPlayerDto>(); mEnemyList = new ArrayList<ActorPlayerDto>(); mPlayerList.add(player); mEnemyList.add(enemy); // 上に重ねる用にBaseを用意 mBaseLayer = new Rectangle(0, 0, getBaseScene().getWindowWidth(), getBaseScene().getWindowHeight(), getBaseScene().getBaseActivity().getVertexBufferObjectManager()); mBaseLayer.setColor(Color.TRANSPARENT); initDamageText(mBaseLayer); initInfoMessageText(mBaseLayer); // 背景表示 initBackground(); // 背景画像の都合で表示位置が決まる float acotrBaseY = getBaseScene().getWindowHeight() / 2; // キャラ表示 ActorBattleSprite playerSprite = new ActorBattleSprite(player, getBaseScene(), 0, 0, 64, 64); playerSprite.setSize(64, 64); playerSprite.setTag(player.getPlayerId()); // 右上から表示 playerSprite.setPosition(getBaseScene().getWindowWidth() - (getBaseScene().getWindowWidth() / 8) - playerSprite.getWidth(), acotrBaseY); mBaseLayer.attachChild(playerSprite); // キャラ表示 ActorBattleSprite enemySprite = new ActorBattleSprite(enemy, getBaseScene(), 0, 0, 64, 64); enemySprite.setSize(64, 64); enemySprite.setTag(enemy.getPlayerId()); // 左上から表示 enemySprite.setPosition(getBaseScene().getWindowWidth() / 8, acotrBaseY); mBaseLayer.attachChild(enemySprite); // 攻撃エフェクトを用意 // TODO: ファイル固定にしているけど、wewaponImgResIdでから引っ張れるようにする AnimatedSprite playerAttackEffectSprite = getBaseScene().getResourceAnimatedSprite( "effect/rapier.png", 5, 4); playerAttackEffectSprite.setVisible(false); playerAttackEffectSprite.setTag(ACTOR_ATTACK_EFFETC_TAG + player.getEquipDto().getWeaponImgResId()); mBaseLayer.attachChild(playerAttackEffectSprite); AnimatedSprite enemyAttackEffectSprite = getBaseScene().getResourceAnimatedSprite( "effect/shock.png", 5, 2); enemyAttackEffectSprite.setVisible(false); enemyAttackEffectSprite.setColor(Color.YELLOW); enemyAttackEffectSprite.setTag(ACTOR_ATTACK_EFFETC_TAG + enemy.getEquipDto().getWeaponImgResId()); mBaseLayer.attachChild(enemyAttackEffectSprite); // ベースレイヤをattach getBaseScene().attachChild(mBaseLayer); mBaseLayer.sortChildren(); changeState(BattleStateType.START); } /** * 背景表示. */ private void initBackground() { Sprite backgroundSprite = getBaseScene().getResourceSprite("bk/main_bg.jpg"); backgroundSprite.setSize(getBaseScene().getWindowWidth(), getBaseScene().getWindowHeight()); backgroundSprite.setZIndex(-1); mBaseLayer.attachChild(backgroundSprite); } @Override public void touchEvent(Scene pScene, TouchEvent pSceneTouchEvent) { float x = pSceneTouchEvent.getX(); float y = pSceneTouchEvent.getY(); Log.d("touchEvent", "touch"); if (pSceneTouchEvent.isActionUp()) { Log.d("touchEvent", "up mBattleState = " + mBattleState); if (mBattleState == BattleStateType.START) { // カットインを非表示 TextCutInTouchLayer battleStartLayer = (TextCutInTouchLayer) mBaseLayer.getChildByTag(BATTLE_START_CUTIN_TAG); battleStartLayer.hideTouchLayer(); // バトル開始 changeState(BattleStateType.PLAYER_TURN); } else if (mBattleState == BattleStateType.END) { // カットインを非表示 TextCutInTouchLayer battleEndLayer = (TextCutInTouchLayer) mBaseLayer.getChildByTag(BATTLE_END_CUTIN_TAG); battleEndLayer.hideTouchLayer(); // バトルパート終了 end(); } else if (mBattleState == BattleStateType.PLAYER_TURN_TARGET_SELECT) { for (ActorPlayerDto enemyDto : mEnemyList) { if (enemyDto.getHitPoint() <= 0) { continue; } else { AnimatedSprite acotorSprite = findActorSprite(enemyDto.getPlayerId()).getPlayer(); if (acotorSprite.contains(x, y)) { Log.d("touchEvent", "target select end"); // タッチした時 // 攻撃対象決定 mTempSelect.setTargetDto(enemyDto); mBattleSelectList.add(mTempSelect); // ターゲットカーソルを消す hideTargetCursor(); changeState(BattleStateType.PLAYER_TURN); break; } } } } } } private void hideBattleMenuLayer() { Rectangle battleMenu = (Rectangle) mBaseLayer.getChildByTag(BATTLE_MENU_TAG); if (battleMenu != null) { battleMenu.setVisible(false); battleMenu.setPosition(getBaseScene().getWindowWidth() * 4, getBaseScene().getWindowHeight() * 4); } } private boolean showBattleMenuLayer(float x, float y) { if (mBaseLayer.getChildByTag(BATTLE_MENU_TAG) != null) { if (mBaseLayer.getChildByTag(BATTLE_MENU_TAG).isVisible()) { return false; } else { mBaseLayer.getChildByTag(BATTLE_MENU_TAG).setVisible(true); float menuWidth = ((Rectangle) mBaseLayer.getChildByTag(BATTLE_MENU_TAG)).getWidth(); float menuHeight = ((Rectangle) mBaseLayer.getChildByTag(BATTLE_MENU_TAG)).getHeight(); mBaseLayer.getChildByTag(BATTLE_MENU_TAG).setPosition(x - menuWidth / 2, y - menuHeight / 2); return true; } } Rectangle battleMenuLayer = new Rectangle( getBaseScene().getWindowWidth()/ 2, getBaseScene().getWindowHeight() / 2, getBaseScene().getWindowWidth() / 4, getBaseScene().getWindowHeight() / 2, getBaseScene().getBaseActivity().getVertexBufferObjectManager()); battleMenuLayer.setColor(Color.TRANSPARENT); Font menuFont = getBaseScene().createFont(Typeface.DEFAULT_BOLD, 20, Color.WHITE); List<TextButton> textButtonList = new ArrayList<TextButton>(); float sizeX = 0; float sizeY = 0; for (BattleMenuType menu : BattleMenuType.values()) { Text text = new Text(0, 0, menuFont, "********", getBaseScene().getBaseActivity().getVertexBufferObjectManager()); text.setText(menu.getText()); if (sizeX == 0 && sizeY == 0) { sizeX = text.getWidth(); sizeY = text.getHeight(); } else { text.setSize(sizeX, sizeY); } TextButton textButton = new TextButton(text, 0, 0, 80, 30, getBaseScene().getBaseActivity().getVertexBufferObjectManager(), mBattleMenuOnClickListener); textButton.setTag(menu.getValue()); textButtonList.add(textButton); } float startX = 0; float startY = 0; float addY = 0; float addX = 0; int index = 0; for (TextButton textButton : textButtonList) { getBaseScene().registerTouchArea(textButton); battleMenuLayer.attachChild(textButton); if (index == 0) { textButton.setX(startX); textButton.setY(startY); addX = textButton.getWidth(); addY = textButton.getHeight(); } else if (index == 1) { textButton.setX(startX + addX); textButton.setY(startY); } else if (index == 2) { textButton.setX(startX); textButton.setY(startY + addY); } else if (index == 3) { textButton.setX(startX + addX); textButton.setY(startY + addY); } index++; } battleMenuLayer.setSize(addX * 2, addY * 2); battleMenuLayer.setTag(BATTLE_MENU_TAG); battleMenuLayer.setPosition(x - battleMenuLayer.getWidth(), y - battleMenuLayer.getHeight()); mBaseLayer.attachChild(battleMenuLayer); return true; } @Override public void end() { changeState(BattleStateType.EXIT); if (mBaseLayer != null) { getBaseScene().detachEntity(mBaseLayer); } } // ================================================== // インラインメソッド // ================================================== /** * 状態変更 * @param pBattleStateType */ private void changeState(BattleStateType pBattleStateType) { Log.d("BattlePart", "battleState [" + this.mBattleState + "] -> [" + pBattleStateType + "]"); BattleStateType beforeStateType = this.mBattleState; this.mBattleState = pBattleStateType; // 開始時カットイン if (pBattleStateType == BattleStateType.START) { TextCutInTouchLayer battleStartLayer = new TextCutInTouchLayer(getBaseScene(), "バトル開始"); battleStartLayer.setTag(BATTLE_START_CUTIN_TAG); mBaseLayer.attachChild(battleStartLayer); // 表示 battleStartLayer.showTouchLayer(); // 最初またはバトルフェイズ後にプレイヤーターンになったとき } else if (pBattleStateType == BattleStateType.PLAYER_TURN) { // バトル終了か開始から来た場合 if ((beforeStateType == BattleStateType.BATTLE_END || beforeStateType == BattleStateType.START)) { // 初期化 mBattleSelectList = new ArrayList<BattleSelectDto>(); } ActorPlayerDto player = null; for (ActorPlayerDto playerDto : mPlayerList) { if (playerDto.getHitPoint() <= 0) { continue; } else { // 攻撃用意していない場合が対象 boolean isSelect = true; for (BattleSelectDto battleSelectDto : mBattleSelectList) { if (battleSelectDto.getActorPlayerDto().getPlayerId().intValue() == playerDto.getPlayerId().intValue()) { isSelect = false; break; } } if (isSelect) { player = playerDto; break; } } } if (player != null) { Log.d("BattlePart", "playerId = " + player.getPlayerId()); AnimatedSprite playerSprite = findActorSprite(player.getPlayerId()).getPlayer(); mTempSelect = new BattleSelectDto(); mTempSelect.setBattleActorType(BattleActorType.PLAYER); mTempSelect.setActorPlayerDto(player); mTempSelect.setAction(false); showInfoMessageText(player.getName() + "の行動を選択してください。"); // プレイヤーの攻撃ウィンドウを表示 showBattleMenuLayer(playerSprite.getX(), playerSprite.getY()); } else { // 敵行動選択へ changeState(BattleStateType.ENEMY_TURN); } } else if (pBattleStateType == BattleStateType.PLAYER_TURN_TARGET_SELECT) { // 攻撃可能な敵にターゲットカーソルを表示 for (ActorPlayerDto enemyDto : mEnemyList) { if (enemyDto.getHitPoint() <= 0) { continue; } else { // カーソル表示 showTargetCursor(enemyDto); } } } else if (pBattleStateType == BattleStateType.ENEMY_TURN) { BattleSelectDto battleSelect = new BattleSelectDto(); for (ActorPlayerDto enemyDto : mEnemyList) { if (enemyDto.getHitPoint() <= 0) { continue; } else { // TODO: 攻撃対象をHPの量とか強さで判断するようにする battleSelect.setActorPlayerDto(enemyDto); battleSelect.setAction(false); battleSelect.setBattleActorType(BattleActorType.ENEMY); battleSelect.setBattleMenuType(BattleMenuType.ATTACK); // 攻撃対象を選択 for (ActorPlayerDto playerDto : mPlayerList) { if (playerDto.getHitPoint() <= 0) { continue; } else { // 攻撃対象を選択 battleSelect.setTargetDto(playerDto); break; } } mBattleSelectList.add(battleSelect); } } // バトル開始 changeState(BattleStateType.BATTLE_START); } else if (pBattleStateType == BattleStateType.BATTLE_START) { // TODO: 素早い順に並び替える // TODO: とりあえず攻撃しかけた順にする final BattleActorType firstAttackActorType; if (mBattleInitType == BattleInitType.PLAYER_ATTACK) { firstAttackActorType = BattleActorType.PLAYER; } else if (mBattleInitType == BattleInitType.ENEMY_ATTACK) { firstAttackActorType = BattleActorType.ENEMY; } else { firstAttackActorType = null; } if (firstAttackActorType != null) { Collections.sort(mBattleSelectList, new Comparator<BattleSelectDto>() { @Override public int compare(BattleSelectDto p1, BattleSelectDto p2) { Log.d("sort", "p1 = " + p1.getBattleActorType() + " p2 = " + p2.getBattleActorType() + " first = " + firstAttackActorType ); if (p1.getBattleActorType() == p2.getBattleActorType()) { return 0; } else if (p1.getBattleActorType() == firstAttackActorType) { return -1; } else if (p2.getBattleActorType() == firstAttackActorType) { return 1; } else { return 0; } } }); } changeState(BattleStateType.BATTLE_SELECT); } else if (pBattleStateType == BattleStateType.BATTLE_SELECT) { for (BattleSelectDto battleSelect : mBattleSelectList) { if (battleSelect.getActorPlayerDto().getHitPoint() <= 0) { // バトルフェイズに死亡した continue; } if (battleSelect.isAction()) { // 行動済み continue; } // 攻撃対象が死亡しているときは、攻撃対象をランダムに検索 if (battleSelect.getTargetDto().getHitPoint() <= 0) { battleSelect.setTargetDto(null); List<ActorPlayerDto> targetList = new ArrayList<ActorPlayerDto>(); if (battleSelect.getBattleActorType() == BattleActorType.PLAYER) { targetList = mEnemyList; } else if (battleSelect.getBattleActorType() == BattleActorType.ENEMY) { targetList = mPlayerList; } else { continue; } for (ActorPlayerDto target : targetList) { if (target.getHitPoint() <= 0) { continue; } battleSelect.setTargetDto(target); break; } } if (battleSelect.getTargetDto() == null) { // 攻撃可能な敵がいないので諦める continue; } BattleLogic battleLogic = new BattleLogic(); int damage = battleLogic.attack(battleSelect.getActorPlayerDto(), battleSelect.getTargetDto()); Log.d("battleLogic", battleSelect.getActorPlayerDto().toString()); Log.d("battleLogic", battleSelect.getTargetDto().toString()); showInfoMessageText(battleSelect.getActorPlayerDto().getName() + "の攻撃"); // 攻撃アニメーション開始 changeState(BattleStateType.BATTLE_ANIMATION); attackAnimation(battleSelect.getActorPlayerDto(), battleSelect.getTargetDto(), damage); // 行動済みにして一旦抜ける(アニメーションのコールバックで改めてBATTLE_SELECTモードに移行する battleSelect.setAction(true); break; } // アニメーション中はバトル終了はしない if (mBattleState != BattleStateType.BATTLE_ANIMATION) { changeState(BattleStateType.BATTLE_END); } } else if (pBattleStateType == BattleStateType.BATTLE_END) { mTurnCount++; if (mTurnCount < TURN_COUNT_LIMIT) { // バトル継続 changeState(BattleStateType.PLAYER_TURN); } else { // バトル終了 changeState(BattleStateType.END); } } else if (pBattleStateType == BattleStateType.END) { // バトルパート終了 TextCutInTouchLayer battleEndLayer = new TextCutInTouchLayer(getBaseScene(), "バトル終了"); battleEndLayer.setTag(BATTLE_END_CUTIN_TAG); mBaseLayer.attachChild(battleEndLayer); // 表示 battleEndLayer.showTouchLayer(); } else if (pBattleStateType == BattleStateType.EXIT) { // 終わり } } private void attackAnimation(final ActorPlayerDto attackFrom, final ActorPlayerDto attackTo, final int damage) { final AnimatedSprite attackFromSprite = findActorSprite(attackFrom.getPlayerId()).getPlayer(); final AnimatedSprite attackToSprite = findActorSprite(attackTo.getPlayerId()).getPlayer(); // 移動方向を算出 float directionDiff_x = attackFromSprite.getX() - attackToSprite.getX(); if (directionDiff_x > 0) { // 攻撃者が右側にいる directionDiff_x = 1; } else { // 攻撃者が左側にいる directionDiff_x = -1; } // 移動先を算出 final float pFromX = attackFromSprite.getX(); final float pToX = attackToSprite.getX() + (attackToSprite.getWidth() * directionDiff_x); final float pFromY = attackFromSprite.getY(); final float pToY = attackToSprite.getY(); final float effectX = attackToSprite.getX() + attackToSprite.getWidth() / 2; final float effectY = attackToSprite.getY() + attackToSprite.getHeight() / 2; attackFromSprite.registerEntityModifier(new SequenceEntityModifier( // 攻撃対象に向かって移動するアニメーション new MoveModifier(0.5f, pFromX, pToX, pFromY, pToY, new IEntityModifier.IEntityModifierListener() { @Override public void onModifierStarted(IModifier<IEntity> pModifier, IEntity pItem) { } @Override public void onModifierFinished(IModifier<IEntity> pModifier, IEntity pItem) { } }), // 攻撃アニメーション new DelayModifier(0.1f, new IEntityModifier.IEntityModifierListener() { @Override public void onModifierStarted(IModifier<IEntity> pModifier, IEntity pItem) { attackFromSprite.animate( new long[]{100, 100, 100, 100}, new int[]{6, 7, 8, 7}, false); } @Override public void onModifierFinished(IModifier<IEntity> pModifier, IEntity pItem) { // ダメージエフェクト final Color color = attackToSprite.getColor(); final Color white2 = new Color(0.5f, 0.5f, 0.5f, 1); // 3回点滅 attackToSprite.registerEntityModifier(new LoopEntityModifier( new SequenceEntityModifier( new ColorModifier(0.1f, color, white2), new ColorModifier(0.1f, white2, color) ), 3) ); // 攻撃エフェクト AnimatedSprite effect = (AnimatedSprite) mBaseLayer.getChildByTag( ACTOR_ATTACK_EFFETC_TAG + attackFrom.getEquipDto().getWeaponImgResId()); effect.setPosition(effectX - effect.getWidth() / 2, effectY - effect.getHeight() / 2); effect.setCurrentTileIndex(0); effect.setVisible(true); effect.animate( new long[]{20, 20, 20, 20, 20, 20, 20, 20, 20, 20}, new int[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, false, new IAnimationListener() { @Override public void onAnimationStarted(AnimatedSprite pAnimatedSprite, int pInitialLoopCount) { } @Override public void onAnimationLoopFinished(AnimatedSprite pAnimatedSprite, int pRemainingLoopCount, int pInitialLoopCount) { } @Override public void onAnimationFrameChanged(AnimatedSprite pAnimatedSprite, int pOldFrameIndex, int pNewFrameIndex) { } @Override public void onAnimationFinished(AnimatedSprite pAnimatedSprite) { pAnimatedSprite.setVisible(false); } }); } }), // ダメージテキスト表示 new DelayModifier(0.5f, new IEntityModifier.IEntityModifierListener() { @Override public void onModifierStarted(IModifier<IEntity> pModifier, IEntity pItem) { showInfoMessageText(attackTo.getName() + "に" + damage + "ダメージ"); // ダメージテキスト表示 showDamageText(damage, attackToSprite); } @Override public void onModifierFinished(IModifier<IEntity> pModifier, IEntity pItem) { // 死亡してたら死亡画像にする if (attackTo.getHitPoint() <= 0) { // TODO: 死亡アニメーションとりあえず消滅 attackToSprite.setVisible(false); } } }), // 戻りアニメーション new MoveModifier(0.5f, pToX, pFromX, pToY, pFromY, new IEntityModifier.IEntityModifierListener() { @Override public void onModifierStarted(IModifier<IEntity> pModifier, IEntity pItem) { } @Override public void onModifierFinished(IModifier<IEntity> pModifier, IEntity pItem) { // アニメーション終わり changeState(BattleStateType.BATTLE_SELECT); } }) )); } /** * ダメージテキスト初期化 */ private void initDamageText(IEntity entity) { Text damageText = new Text(0, 0, getBaseScene().getFont(), "00000", getBaseScene().getBaseActivity().getVertexBufferObjectManager()); damageText.setColor(Color.TRANSPARENT); damageText.setZIndex(LayerZIndexType.TEXT_LAYER.getValue()); damageText.setTag(DAMAGE_TEXT_TAG); entity.attachChild(damageText); } /** * ダメージテキスト表示. */ private void showDamageText(int damage, final IAreaShape areaShape) { final Text damageText = (Text) mBaseLayer.getChildByTag(DAMAGE_TEXT_TAG); damageText.setScale(0.5f); // 頭の上くらいに表示 damageText.setX(areaShape.getX() + areaShape.getWidth() / 2); damageText.setY(areaShape.getY() - areaShape.getHeight() / 2); damageText.setText(String.valueOf(damage)); damageText.setColor(Color.WHITE); damageText.registerEntityModifier(new SequenceEntityModifier( new ParallelEntityModifier( new ScaleModifier(0.5f, 0.5f, 2.0f, EaseBackInOut.getInstance()), new SequenceEntityModifier( new MoveModifier(0.25f, damageText.getX(), damageText.getX(), damageText.getY(), damageText.getY() - 15, EaseBackInOut.getInstance()), new MoveModifier(0.25f, damageText.getX(), damageText.getX(), damageText.getY() - 15, damageText.getY(), EaseBackInOut.getInstance())) ), new DelayModifier(0.2f, new IEntityModifier.IEntityModifierListener() { @Override public void onModifierStarted(IModifier<IEntity> pModifier, IEntity pItem) { } @Override public void onModifierFinished(IModifier<IEntity> pModifier, IEntity pItem) { damageText.setColor(Color.TRANSPARENT); } }))); } /** * 戦闘ログテキスト初期化 */ private void initInfoMessageText(IEntity entity) { float width = getBaseScene().getWindowWidth() - (getBaseScene().getWindowWidth() / 10); float height = 40; Rectangle rectangle = getBaseScene().createRectangle(0, 0, width, height); rectangle.setColor(Color.BLACK); rectangle.setAlpha(0.5f); rectangle.setZIndex(LayerZIndexType.TEXT_LAYER.getValue()); rectangle.setTag(INFO_MESSAGE_TEXT_TAG); Text text = new Text(10, 10, getBaseScene().createFont(Typeface.DEFAULT, 20, Color.WHITE), "000000000000000000000000000000000000000000000000000000000000000", getBaseScene().getBaseActivity().getVertexBufferObjectManager()); rectangle.attachChild(text); getBaseScene().placeToCenterX(rectangle, 10); rectangle.setVisible(false); entity.attachChild(rectangle); } private void showInfoMessageText(String message) { final Rectangle infoMessageRect = (Rectangle) mBaseLayer.getChildByTag(INFO_MESSAGE_TEXT_TAG); ((Text)infoMessageRect.getChildByIndex(0)).setText(message); infoMessageRect.setVisible(true); } private void hideInfoMessageText() { final Rectangle infoMessageRect = (Rectangle) mBaseLayer.getChildByTag(INFO_MESSAGE_TEXT_TAG); infoMessageRect.setVisible(false); } /** * ターゲットカーソル表示。 * @param actorPlayerDto アクター情報 */ private void showTargetCursor(ActorPlayerDto actorPlayerDto) { // TODO: ターゲットカーソルは後で画像を用意する AnimatedSprite actorSprite = findActorSprite(actorPlayerDto.getPlayerId()).getPlayer(); Rectangle cursorRectangle = new Rectangle(actorSprite.getX(), actorSprite.getY(), 50, 50, getBaseScene().getBaseActivity().getVertexBufferObjectManager()); cursorRectangle.setColor(Color.YELLOW); cursorRectangle.setAlpha(0.5f); cursorRectangle.setTag(TARGET_CURSOR_TAG); mBaseLayer.attachChild(cursorRectangle); } /** * ターゲットカーソル非表示。 */ private void hideTargetCursor() { final Rectangle targetRectangle = (Rectangle) mBaseLayer.getChildByTag(TARGET_CURSOR_TAG); targetRectangle.detachSelf(); } /** * アクタースプライト検索 * @param actorId アクターID * @return アクタースプライト */ private ActorBattleSprite findActorSprite(int actorId) { return (ActorBattleSprite) mBaseLayer.getChildByTag(actorId); } }
src/com/kyokomi/srpgquest/scene/part/BattlePart.java
package com.kyokomi.srpgquest.scene.part; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import org.andengine.entity.IEntity; import org.andengine.entity.modifier.DelayModifier; import org.andengine.entity.modifier.IEntityModifier; import org.andengine.entity.modifier.MoveModifier; import org.andengine.entity.modifier.ParallelEntityModifier; import org.andengine.entity.modifier.ScaleModifier; import org.andengine.entity.modifier.SequenceEntityModifier; import org.andengine.entity.primitive.Rectangle; import org.andengine.entity.scene.Scene; import org.andengine.entity.shape.IAreaShape; import org.andengine.entity.sprite.AnimatedSprite; import org.andengine.entity.sprite.AnimatedSprite.IAnimationListener; import org.andengine.entity.sprite.Sprite; import org.andengine.entity.text.Text; import org.andengine.input.touch.TouchEvent; import org.andengine.opengl.font.Font; import org.andengine.util.color.Color; import org.andengine.util.modifier.IModifier; import org.andengine.util.modifier.ease.EaseBackInOut; import android.graphics.Typeface; import android.util.Log; import com.kyokomi.core.dto.ActorPlayerDto; import com.kyokomi.core.dto.SaveDataDto; import com.kyokomi.core.sprite.TextButton; import com.kyokomi.srpgquest.constant.BattleActorType; import com.kyokomi.srpgquest.constant.BattleMenuType; import com.kyokomi.srpgquest.constant.LayerZIndexType; import com.kyokomi.srpgquest.dto.BattleSelectDto; import com.kyokomi.srpgquest.layer.TextCutInTouchLayer; import com.kyokomi.srpgquest.logic.BattleLogic; import com.kyokomi.srpgquest.scene.SrpgBaseScene; import com.kyokomi.srpgquest.sprite.ActorBattleSprite; public class BattlePart extends AbstractGamePart { // ================================================== // TAG // ================================================== private static final int DAMAGE_TEXT_TAG = 1000; private static final int INFO_MESSAGE_TEXT_TAG = 1001; private static final int TARGET_CURSOR_TAG = 2000; private static final int BATTLE_MENU_TAG = 10000; private static final int BATTLE_START_CUTIN_TAG = 20000; private static final int BATTLE_END_CUTIN_TAG = 20001; private static final int ACTOR_ATTACK_EFFETC_TAG = 100000; // ================================================== private Rectangle mBaseLayer; private static int TURN_COUNT_LIMIT = 1; private int mTurnCount = 0; public enum BattleInitType { PLAYER_ATTACK(10), ENEMY_ATTACK(20) ; private Integer value; private BattleInitType(Integer value) { this.value = value; } public Integer getValue() { return value; } public static BattleInitType get(Integer value) { BattleInitType[] values = values(); for (BattleInitType type : values) { if (type.getValue() == value) { return type; } } throw new RuntimeException("find not tag type."); } } public enum BattleStateType { INIT(0), START(1), PLAYER_TURN(1000), PLAYER_TURN_TARGET_SELECT(1001), ENEMY_TURN(2000), BATTLE_START(3000), BATTLE_SELECT(4000), BATTLE_ANIMATION(5000), BATTLE_END(6000), END(9000), EXIT(9999) ; private Integer value; private BattleStateType(Integer value) { this.value = value; } public Integer getValue() { return value; } public static BattleStateType get(Integer value) { BattleStateType[] values = values(); for (BattleStateType type : values) { if (type.getValue() == value) { return type; } } throw new RuntimeException("find not tag type."); } } private BattleStateType mBattleState; public BattleStateType getBattleState() { return mBattleState; } private BattleInitType mBattleInitType; private List<ActorPlayerDto> mPlayerList; private List<ActorPlayerDto> mEnemyList; private BattleSelectDto mTempSelect; private List<BattleSelectDto> mBattleSelectList; // ================================================== // ボタンイベント // ================================================== private TextButton.OnClickListener mBattleMenuOnClickListener = new TextButton.OnClickListener() { @Override public void onClick(TextButton pTextButtonSprite, float pTouchAreaLocalX, float pTouchAreaLocalY) { Log.d("showBattleMenuLayer", "onClick"); // メニュー閉じる hideBattleMenuLayer(); if (pTextButtonSprite.getTag() == BattleMenuType.ATTACK.getValue()) { mTempSelect.setBattleMenuType(BattleMenuType.ATTACK); showInfoMessageText("攻撃対象を選択してください。"); // ターゲット選択 changeState(BattleStateType.PLAYER_TURN_TARGET_SELECT); } else if (pTextButtonSprite.getTag() == BattleMenuType.DEFENCE.getValue()) { // 行動確定 // TODO: あとで実装 // とりあえず攻撃 mTempSelect.setBattleMenuType(BattleMenuType.ATTACK); changeState(BattleStateType.PLAYER_TURN_TARGET_SELECT); } else if (pTextButtonSprite.getTag() == BattleMenuType.SKILL.getValue()) { showInfoMessageText("使用するスキルを選択してください。"); // スキルウィンドウ表示 // TODO: あとで実装 // とりあえず攻撃 mTempSelect.setBattleMenuType(BattleMenuType.ATTACK); changeState(BattleStateType.PLAYER_TURN_TARGET_SELECT); } else if (pTextButtonSprite.getTag() == BattleMenuType.ITEM.getValue()) { showInfoMessageText("使用するアイテムを選択してください。"); // アイテムウィンドウ表示 // TODO: あとで実装 // とりあえず攻撃 mTempSelect.setBattleMenuType(BattleMenuType.ATTACK); changeState(BattleStateType.PLAYER_TURN_TARGET_SELECT); } } }; // ================================================== // コンストラクタ // ================================================== public BattlePart(SrpgBaseScene pBaseScene) { super(pBaseScene); } // ================================================== // メソッド // ================================================== /** * @deprecated init(ActorPlayerDto player, ActorPlayerDto enemy)使って下さい * @param saveDataDto */ @Override public void init(SaveDataDto saveDataDto) { } public void init(ActorPlayerDto player, ActorPlayerDto enemy, BattleInitType pBattleInitType) { Log.d("BattlePart", player.toString()); Log.d("BattlePart", enemy.toString()); changeState(BattleStateType.INIT); mBattleInitType = pBattleInitType; mPlayerList = new ArrayList<ActorPlayerDto>(); mEnemyList = new ArrayList<ActorPlayerDto>(); mPlayerList.add(player); mEnemyList.add(enemy); // 上に重ねる用にBaseを用意 mBaseLayer = new Rectangle(0, 0, getBaseScene().getWindowWidth(), getBaseScene().getWindowHeight(), getBaseScene().getBaseActivity().getVertexBufferObjectManager()); mBaseLayer.setColor(Color.TRANSPARENT); initDamageText(mBaseLayer); initInfoMessageText(mBaseLayer); // 背景表示 initBackground(); // 背景画像の都合で表示位置が決まる float acotrBaseY = getBaseScene().getWindowHeight() / 2; // キャラ表示 ActorBattleSprite playerSprite = new ActorBattleSprite(player, getBaseScene(), 0, 0, 64, 64); playerSprite.setSize(64, 64); playerSprite.setTag(player.getPlayerId()); // 右上から表示 playerSprite.setPosition(getBaseScene().getWindowWidth() - (getBaseScene().getWindowWidth() / 8) - playerSprite.getWidth(), acotrBaseY); mBaseLayer.attachChild(playerSprite); // キャラ表示 ActorBattleSprite enemySprite = new ActorBattleSprite(enemy, getBaseScene(), 0, 0, 64, 64); enemySprite.setSize(64, 64); enemySprite.setTag(enemy.getPlayerId()); // 左上から表示 enemySprite.setPosition(getBaseScene().getWindowWidth() / 8, acotrBaseY); mBaseLayer.attachChild(enemySprite); // 攻撃エフェクトを用意 // TODO: ファイル固定にしているけど、wewaponImgResIdでから引っ張れるようにする AnimatedSprite playerAttackEffectSprite = getBaseScene().getResourceAnimatedSprite( "effect/rapier.png", 5, 4); playerAttackEffectSprite.setVisible(false); playerAttackEffectSprite.setTag(ACTOR_ATTACK_EFFETC_TAG + player.getEquipDto().getWeaponImgResId()); mBaseLayer.attachChild(playerAttackEffectSprite); AnimatedSprite enemyAttackEffectSprite = getBaseScene().getResourceAnimatedSprite( "effect/shock.png", 5, 2); enemyAttackEffectSprite.setVisible(false); enemyAttackEffectSprite.setColor(Color.YELLOW); enemyAttackEffectSprite.setTag(ACTOR_ATTACK_EFFETC_TAG + enemy.getEquipDto().getWeaponImgResId()); mBaseLayer.attachChild(enemyAttackEffectSprite); // ベースレイヤをattach getBaseScene().attachChild(mBaseLayer); mBaseLayer.sortChildren(); changeState(BattleStateType.START); } /** * 背景表示. */ private void initBackground() { Sprite backgroundSprite = getBaseScene().getResourceSprite("bk/main_bg.jpg"); backgroundSprite.setSize(getBaseScene().getWindowWidth(), getBaseScene().getWindowHeight()); backgroundSprite.setZIndex(-1); mBaseLayer.attachChild(backgroundSprite); } @Override public void touchEvent(Scene pScene, TouchEvent pSceneTouchEvent) { float x = pSceneTouchEvent.getX(); float y = pSceneTouchEvent.getY(); Log.d("touchEvent", "touch"); if (pSceneTouchEvent.isActionUp()) { Log.d("touchEvent", "up mBattleState = " + mBattleState); if (mBattleState == BattleStateType.START) { // カットインを非表示 TextCutInTouchLayer battleStartLayer = (TextCutInTouchLayer) mBaseLayer.getChildByTag(BATTLE_START_CUTIN_TAG); battleStartLayer.hideTouchLayer(); // バトル開始 changeState(BattleStateType.PLAYER_TURN); } else if (mBattleState == BattleStateType.END) { // カットインを非表示 TextCutInTouchLayer battleEndLayer = (TextCutInTouchLayer) mBaseLayer.getChildByTag(BATTLE_END_CUTIN_TAG); battleEndLayer.hideTouchLayer(); // バトルパート終了 end(); } else if (mBattleState == BattleStateType.PLAYER_TURN_TARGET_SELECT) { for (ActorPlayerDto enemyDto : mEnemyList) { if (enemyDto.getHitPoint() <= 0) { continue; } else { AnimatedSprite acotorSprite = findActorSprite(enemyDto.getPlayerId()).getPlayer(); if (acotorSprite.contains(x, y)) { Log.d("touchEvent", "target select end"); // タッチした時 // 攻撃対象決定 mTempSelect.setTargetDto(enemyDto); mBattleSelectList.add(mTempSelect); // ターゲットカーソルを消す hideTargetCursor(); changeState(BattleStateType.PLAYER_TURN); break; } } } } } } private void hideBattleMenuLayer() { Rectangle battleMenu = (Rectangle) mBaseLayer.getChildByTag(BATTLE_MENU_TAG); if (battleMenu != null) { battleMenu.setVisible(false); battleMenu.setPosition(getBaseScene().getWindowWidth() * 4, getBaseScene().getWindowHeight() * 4); } } private boolean showBattleMenuLayer(float x, float y) { if (mBaseLayer.getChildByTag(BATTLE_MENU_TAG) != null) { if (mBaseLayer.getChildByTag(BATTLE_MENU_TAG).isVisible()) { return false; } else { mBaseLayer.getChildByTag(BATTLE_MENU_TAG).setVisible(true); float menuWidth = ((Rectangle) mBaseLayer.getChildByTag(BATTLE_MENU_TAG)).getWidth(); float menuHeight = ((Rectangle) mBaseLayer.getChildByTag(BATTLE_MENU_TAG)).getHeight(); mBaseLayer.getChildByTag(BATTLE_MENU_TAG).setPosition(x - menuWidth / 2, y - menuHeight / 2); return true; } } Rectangle battleMenuLayer = new Rectangle( getBaseScene().getWindowWidth()/ 2, getBaseScene().getWindowHeight() / 2, getBaseScene().getWindowWidth() / 4, getBaseScene().getWindowHeight() / 2, getBaseScene().getBaseActivity().getVertexBufferObjectManager()); battleMenuLayer.setColor(Color.TRANSPARENT); Font menuFont = getBaseScene().createFont(Typeface.DEFAULT_BOLD, 20, Color.WHITE); List<TextButton> textButtonList = new ArrayList<TextButton>(); float sizeX = 0; float sizeY = 0; for (BattleMenuType menu : BattleMenuType.values()) { Text text = new Text(0, 0, menuFont, "********", getBaseScene().getBaseActivity().getVertexBufferObjectManager()); text.setText(menu.getText()); if (sizeX == 0 && sizeY == 0) { sizeX = text.getWidth(); sizeY = text.getHeight(); } else { text.setSize(sizeX, sizeY); } TextButton textButton = new TextButton(text, 0, 0, 80, 30, getBaseScene().getBaseActivity().getVertexBufferObjectManager(), mBattleMenuOnClickListener); textButton.setTag(menu.getValue()); textButtonList.add(textButton); } float startX = 0; float startY = 0; float addY = 0; float addX = 0; int index = 0; for (TextButton textButton : textButtonList) { getBaseScene().registerTouchArea(textButton); battleMenuLayer.attachChild(textButton); if (index == 0) { textButton.setX(startX); textButton.setY(startY); addX = textButton.getWidth(); addY = textButton.getHeight(); } else if (index == 1) { textButton.setX(startX + addX); textButton.setY(startY); } else if (index == 2) { textButton.setX(startX); textButton.setY(startY + addY); } else if (index == 3) { textButton.setX(startX + addX); textButton.setY(startY + addY); } index++; } battleMenuLayer.setSize(addX * 2, addY * 2); battleMenuLayer.setTag(BATTLE_MENU_TAG); battleMenuLayer.setPosition(x - battleMenuLayer.getWidth(), y - battleMenuLayer.getHeight()); mBaseLayer.attachChild(battleMenuLayer); return true; } @Override public void end() { changeState(BattleStateType.EXIT); if (mBaseLayer != null) { getBaseScene().detachEntity(mBaseLayer); } } // ================================================== // インラインメソッド // ================================================== /** * 状態変更 * @param pBattleStateType */ private void changeState(BattleStateType pBattleStateType) { Log.d("BattlePart", "battleState [" + this.mBattleState + "] -> [" + pBattleStateType + "]"); BattleStateType beforeStateType = this.mBattleState; this.mBattleState = pBattleStateType; // 開始時カットイン if (pBattleStateType == BattleStateType.START) { TextCutInTouchLayer battleStartLayer = new TextCutInTouchLayer(getBaseScene(), "バトル開始"); battleStartLayer.setTag(BATTLE_START_CUTIN_TAG); mBaseLayer.attachChild(battleStartLayer); // 表示 battleStartLayer.showTouchLayer(); // 最初またはバトルフェイズ後にプレイヤーターンになったとき } else if (pBattleStateType == BattleStateType.PLAYER_TURN) { // バトル終了か開始から来た場合 if ((beforeStateType == BattleStateType.BATTLE_END || beforeStateType == BattleStateType.START)) { // 初期化 mBattleSelectList = new ArrayList<BattleSelectDto>(); } ActorPlayerDto player = null; for (ActorPlayerDto playerDto : mPlayerList) { if (playerDto.getHitPoint() <= 0) { continue; } else { // 攻撃用意していない場合が対象 boolean isSelect = true; for (BattleSelectDto battleSelectDto : mBattleSelectList) { if (battleSelectDto.getActorPlayerDto().getPlayerId().intValue() == playerDto.getPlayerId().intValue()) { isSelect = false; break; } } if (isSelect) { player = playerDto; break; } } } if (player != null) { Log.d("BattlePart", "playerId = " + player.getPlayerId()); AnimatedSprite playerSprite = findActorSprite(player.getPlayerId()).getPlayer(); mTempSelect = new BattleSelectDto(); mTempSelect.setBattleActorType(BattleActorType.PLAYER); mTempSelect.setActorPlayerDto(player); mTempSelect.setAction(false); showInfoMessageText(player.getName() + "の行動を選択してください。"); // プレイヤーの攻撃ウィンドウを表示 showBattleMenuLayer(playerSprite.getX(), playerSprite.getY()); } else { // 敵行動選択へ changeState(BattleStateType.ENEMY_TURN); } } else if (pBattleStateType == BattleStateType.PLAYER_TURN_TARGET_SELECT) { // 攻撃可能な敵にターゲットカーソルを表示 for (ActorPlayerDto enemyDto : mEnemyList) { if (enemyDto.getHitPoint() <= 0) { continue; } else { // カーソル表示 showTargetCursor(enemyDto); } } } else if (pBattleStateType == BattleStateType.ENEMY_TURN) { BattleSelectDto battleSelect = new BattleSelectDto(); for (ActorPlayerDto enemyDto : mEnemyList) { if (enemyDto.getHitPoint() <= 0) { continue; } else { // TODO: 攻撃対象をHPの量とか強さで判断するようにする battleSelect.setActorPlayerDto(enemyDto); battleSelect.setAction(false); battleSelect.setBattleActorType(BattleActorType.ENEMY); battleSelect.setBattleMenuType(BattleMenuType.ATTACK); // 攻撃対象を選択 for (ActorPlayerDto playerDto : mPlayerList) { if (playerDto.getHitPoint() <= 0) { continue; } else { // 攻撃対象を選択 battleSelect.setTargetDto(playerDto); break; } } mBattleSelectList.add(battleSelect); } } // バトル開始 changeState(BattleStateType.BATTLE_START); } else if (pBattleStateType == BattleStateType.BATTLE_START) { // TODO: 素早い順に並び替える // TODO: とりあえず攻撃しかけた順にする final BattleActorType firstAttackActorType; if (mBattleInitType == BattleInitType.PLAYER_ATTACK) { firstAttackActorType = BattleActorType.PLAYER; } else if (mBattleInitType == BattleInitType.ENEMY_ATTACK) { firstAttackActorType = BattleActorType.ENEMY; } else { firstAttackActorType = null; } if (firstAttackActorType != null) { Collections.sort(mBattleSelectList, new Comparator<BattleSelectDto>() { @Override public int compare(BattleSelectDto p1, BattleSelectDto p2) { Log.d("sort", "p1 = " + p1.getBattleActorType() + " p2 = " + p2.getBattleActorType() + " first = " + firstAttackActorType ); if (p1.getBattleActorType() == p2.getBattleActorType()) { return 0; } else if (p1.getBattleActorType() == firstAttackActorType) { return -1; } else if (p2.getBattleActorType() == firstAttackActorType) { return 1; } else { return 0; } } }); } changeState(BattleStateType.BATTLE_SELECT); } else if (pBattleStateType == BattleStateType.BATTLE_SELECT) { for (BattleSelectDto battleSelect : mBattleSelectList) { if (battleSelect.getActorPlayerDto().getHitPoint() <= 0) { // バトルフェイズに死亡した continue; } if (battleSelect.isAction()) { // 行動済み continue; } // 攻撃対象が死亡しているときは、攻撃対象をランダムに検索 if (battleSelect.getTargetDto().getHitPoint() <= 0) { battleSelect.setTargetDto(null); List<ActorPlayerDto> targetList = new ArrayList<ActorPlayerDto>(); if (battleSelect.getBattleActorType() == BattleActorType.PLAYER) { targetList = mEnemyList; } else if (battleSelect.getBattleActorType() == BattleActorType.ENEMY) { targetList = mPlayerList; } else { continue; } for (ActorPlayerDto target : targetList) { if (target.getHitPoint() <= 0) { continue; } battleSelect.setTargetDto(target); break; } } if (battleSelect.getTargetDto() == null) { // 攻撃可能な敵がいないので諦める continue; } BattleLogic battleLogic = new BattleLogic(); int damage = battleLogic.attack(battleSelect.getActorPlayerDto(), battleSelect.getTargetDto()); Log.d("battleLogic", battleSelect.getActorPlayerDto().toString()); Log.d("battleLogic", battleSelect.getTargetDto().toString()); showInfoMessageText(battleSelect.getActorPlayerDto().getName() + "の攻撃"); // 攻撃アニメーション開始 changeState(BattleStateType.BATTLE_ANIMATION); attackAnimation(battleSelect.getActorPlayerDto(), battleSelect.getTargetDto(), damage); // 行動済みにして一旦抜ける(アニメーションのコールバックで改めてBATTLE_SELECTモードに移行する battleSelect.setAction(true); break; } // アニメーション中はバトル終了はしない if (mBattleState != BattleStateType.BATTLE_ANIMATION) { changeState(BattleStateType.BATTLE_END); } } else if (pBattleStateType == BattleStateType.BATTLE_END) { mTurnCount++; if (mTurnCount < TURN_COUNT_LIMIT) { // バトル継続 changeState(BattleStateType.PLAYER_TURN); } else { // バトル終了 changeState(BattleStateType.END); } } else if (pBattleStateType == BattleStateType.END) { // バトルパート終了 TextCutInTouchLayer battleEndLayer = new TextCutInTouchLayer(getBaseScene(), "バトル終了"); battleEndLayer.setTag(BATTLE_END_CUTIN_TAG); mBaseLayer.attachChild(battleEndLayer); // 表示 battleEndLayer.showTouchLayer(); } else if (pBattleStateType == BattleStateType.EXIT) { // 終わり } } private void attackAnimation(final ActorPlayerDto attackFrom, final ActorPlayerDto attackTo, final int damage) { final AnimatedSprite attackFromSprite = findActorSprite(attackFrom.getPlayerId()).getPlayer(); final AnimatedSprite attackToSprite = findActorSprite(attackTo.getPlayerId()).getPlayer(); // 移動方向を算出 float directionDiff_x = attackFromSprite.getX() - attackToSprite.getX(); if (directionDiff_x > 0) { // 攻撃者が右側にいる directionDiff_x = 1; } else { // 攻撃者が左側にいる directionDiff_x = -1; } // 移動先を算出 final float pFromX = attackFromSprite.getX(); final float pToX = attackToSprite.getX() + (attackToSprite.getWidth() * directionDiff_x); final float pFromY = attackFromSprite.getY(); final float pToY = attackToSprite.getY(); final float effectX = attackToSprite.getX() + attackToSprite.getWidth() / 2; final float effectY = attackToSprite.getY() + attackToSprite.getHeight() / 2; attackFromSprite.registerEntityModifier(new SequenceEntityModifier( // 攻撃対象に向かって移動するアニメーション new MoveModifier(0.5f, pFromX, pToX, pFromY, pToY, new IEntityModifier.IEntityModifierListener() { @Override public void onModifierStarted(IModifier<IEntity> pModifier, IEntity pItem) { } @Override public void onModifierFinished(IModifier<IEntity> pModifier, IEntity pItem) { } }), // 攻撃アニメーション new DelayModifier(0.1f, new IEntityModifier.IEntityModifierListener() { @Override public void onModifierStarted(IModifier<IEntity> pModifier, IEntity pItem) { attackFromSprite.animate( new long[]{100, 100, 100, 100}, new int[]{6, 7, 8, 7}, false); } @Override public void onModifierFinished(IModifier<IEntity> pModifier, IEntity pItem) { AnimatedSprite effect = (AnimatedSprite) mBaseLayer.getChildByTag( ACTOR_ATTACK_EFFETC_TAG + attackFrom.getEquipDto().getWeaponImgResId()); effect.setPosition(effectX - effect.getWidth() / 2, effectY - effect.getHeight() / 2); effect.setCurrentTileIndex(0); effect.setVisible(true); effect.animate( new long[]{20, 20, 20, 20, 20, 20, 20, 20, 20, 20}, new int[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, false, new IAnimationListener() { @Override public void onAnimationStarted(AnimatedSprite pAnimatedSprite, int pInitialLoopCount) { } @Override public void onAnimationLoopFinished(AnimatedSprite pAnimatedSprite, int pRemainingLoopCount, int pInitialLoopCount) { } @Override public void onAnimationFrameChanged(AnimatedSprite pAnimatedSprite, int pOldFrameIndex, int pNewFrameIndex) { } @Override public void onAnimationFinished(AnimatedSprite pAnimatedSprite) { pAnimatedSprite.setVisible(false); } }); } }), // ダメージテキスト表示 new DelayModifier(0.5f, new IEntityModifier.IEntityModifierListener() { @Override public void onModifierStarted(IModifier<IEntity> pModifier, IEntity pItem) { showInfoMessageText(attackTo.getName() + "に" + damage + "ダメージ"); // ダメージテキスト表示 showDamageText(damage, attackToSprite); } @Override public void onModifierFinished(IModifier<IEntity> pModifier, IEntity pItem) { // 死亡してたら死亡画像にする if (attackTo.getHitPoint() <= 0) { // TODO: 死亡アニメーションとりあえず消滅 attackToSprite.setVisible(false); } } }), // 戻りアニメーション new MoveModifier(0.5f, pToX, pFromX, pToY, pFromY, new IEntityModifier.IEntityModifierListener() { @Override public void onModifierStarted(IModifier<IEntity> pModifier, IEntity pItem) { } @Override public void onModifierFinished(IModifier<IEntity> pModifier, IEntity pItem) { // アニメーション終わり changeState(BattleStateType.BATTLE_SELECT); } }) )); } /** * ダメージテキスト初期化 */ private void initDamageText(IEntity entity) { Text damageText = new Text(0, 0, getBaseScene().getFont(), "00000", getBaseScene().getBaseActivity().getVertexBufferObjectManager()); damageText.setColor(Color.TRANSPARENT); damageText.setZIndex(LayerZIndexType.TEXT_LAYER.getValue()); damageText.setTag(DAMAGE_TEXT_TAG); entity.attachChild(damageText); } /** * ダメージテキスト表示. */ private void showDamageText(int damage, final IAreaShape areaShape) { final Text damageText = (Text) mBaseLayer.getChildByTag(DAMAGE_TEXT_TAG); damageText.setScale(0.5f); // 頭の上くらいに表示 damageText.setX(areaShape.getX() + areaShape.getWidth() / 2); damageText.setY(areaShape.getY() - areaShape.getHeight() / 2); damageText.setText(String.valueOf(damage)); damageText.setColor(Color.WHITE); damageText.registerEntityModifier(new SequenceEntityModifier( new ParallelEntityModifier( new ScaleModifier(0.5f, 0.5f, 2.0f, EaseBackInOut.getInstance()), new SequenceEntityModifier( new MoveModifier(0.25f, damageText.getX(), damageText.getX(), damageText.getY(), damageText.getY() - 15, EaseBackInOut.getInstance()), new MoveModifier(0.25f, damageText.getX(), damageText.getX(), damageText.getY() - 15, damageText.getY(), EaseBackInOut.getInstance())) ), new DelayModifier(0.2f, new IEntityModifier.IEntityModifierListener() { @Override public void onModifierStarted(IModifier<IEntity> pModifier, IEntity pItem) { } @Override public void onModifierFinished(IModifier<IEntity> pModifier, IEntity pItem) { damageText.setColor(Color.TRANSPARENT); } }))); } /** * 戦闘ログテキスト初期化 */ private void initInfoMessageText(IEntity entity) { float width = getBaseScene().getWindowWidth() - (getBaseScene().getWindowWidth() / 10); float height = 40; Rectangle rectangle = getBaseScene().createRectangle(0, 0, width, height); rectangle.setColor(Color.BLACK); rectangle.setAlpha(0.5f); rectangle.setZIndex(LayerZIndexType.TEXT_LAYER.getValue()); rectangle.setTag(INFO_MESSAGE_TEXT_TAG); Text text = new Text(10, 10, getBaseScene().createFont(Typeface.DEFAULT, 20, Color.WHITE), "000000000000000000000000000000000000000000000000000000000000000", getBaseScene().getBaseActivity().getVertexBufferObjectManager()); rectangle.attachChild(text); getBaseScene().placeToCenterX(rectangle, 10); rectangle.setVisible(false); entity.attachChild(rectangle); } private void showInfoMessageText(String message) { final Rectangle infoMessageRect = (Rectangle) mBaseLayer.getChildByTag(INFO_MESSAGE_TEXT_TAG); ((Text)infoMessageRect.getChildByIndex(0)).setText(message); infoMessageRect.setVisible(true); } private void hideInfoMessageText() { final Rectangle infoMessageRect = (Rectangle) mBaseLayer.getChildByTag(INFO_MESSAGE_TEXT_TAG); infoMessageRect.setVisible(false); } /** * ターゲットカーソル表示。 * @param actorPlayerDto アクター情報 */ private void showTargetCursor(ActorPlayerDto actorPlayerDto) { // TODO: ターゲットカーソルは後で画像を用意する AnimatedSprite actorSprite = findActorSprite(actorPlayerDto.getPlayerId()).getPlayer(); Rectangle cursorRectangle = new Rectangle(actorSprite.getX(), actorSprite.getY(), 50, 50, getBaseScene().getBaseActivity().getVertexBufferObjectManager()); cursorRectangle.setColor(Color.YELLOW); cursorRectangle.setAlpha(0.5f); cursorRectangle.setTag(TARGET_CURSOR_TAG); mBaseLayer.attachChild(cursorRectangle); } /** * ターゲットカーソル非表示。 */ private void hideTargetCursor() { final Rectangle targetRectangle = (Rectangle) mBaseLayer.getChildByTag(TARGET_CURSOR_TAG); targetRectangle.detachSelf(); } /** * アクタースプライト検索 * @param actorId アクターID * @return アクタースプライト */ private ActorBattleSprite findActorSprite(int actorId) { return (ActorBattleSprite) mBaseLayer.getChildByTag(actorId); } }
ダメージエフェクトを追加
src/com/kyokomi/srpgquest/scene/part/BattlePart.java
ダメージエフェクトを追加
Java
epl-1.0
84b3bc2e7bfeb352ba572b9a655903b31bfb872e
0
gnodet/wikitext
/******************************************************************************* * Copyright (c) 2010 Tasktop Technologies and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Tasktop Technologies - initial API and implementation *******************************************************************************/ package org.eclipse.mylyn.bugzilla.tests.ui; import junit.framework.TestCase; import org.eclipse.jface.text.Region; import org.eclipse.jface.text.hyperlink.IHyperlink; import org.eclipse.mylyn.internal.bugzilla.core.BugzillaCorePlugin; import org.eclipse.mylyn.internal.bugzilla.ui.TaskAttachmentHyperlink; import org.eclipse.mylyn.internal.bugzilla.ui.tasklist.BugzillaConnectorUi; import org.eclipse.mylyn.internal.tasks.core.TaskTask; import org.eclipse.mylyn.tasks.core.TaskRepository; import org.eclipse.mylyn.tasks.core.data.TaskAttribute; import org.eclipse.mylyn.tasks.ui.TaskHyperlink; /** * @author Steffen Pingel */ public class BugzillaHyperlinkDetectorTest extends TestCase { private BugzillaConnectorUi connector; private TaskRepository repository; private TaskTask task; private TaskAttachmentHyperlink alink(int offset, int length, String attachmentId) { TaskAttachmentHyperlink link = new TaskAttachmentHyperlink(new Region(offset, length), repository, attachmentId); return link; } private void assertHyperlinks(String string, IHyperlink... expected) { IHyperlink[] links = connector.findHyperlinks(repository, task, string, -1, 0); if (expected.length == 0) { assertNull(links); return; } assertNotNull("Expected hyperlinks in " + string, links); assertEquals(expected.length, links.length); for (int i = 0; i < links.length; i++) { assertEquals(expected[i], links[i]); } } private TaskHyperlink link(int offset, int length, String taskId) { return link(offset, length, taskId, null); } private TaskHyperlink link(int offset, int length, String taskId, String commentId) { TaskHyperlink link = new TaskHyperlink(new Region(offset, length), repository, taskId); if (commentId != null) { link.setSelection(TaskAttribute.PREFIX_COMMENT + commentId); } return link; } @Override protected void setUp() throws Exception { repository = new TaskRepository(BugzillaCorePlugin.CONNECTOR_KIND, "http://localhost"); task = new TaskTask(BugzillaCorePlugin.CONNECTOR_KIND, "http://localhost", "123"); connector = new BugzillaConnectorUi(); } public void testFindHyperlinksAttachment() { assertHyperlinks("attachment 123", alink(0, 14, "123")); assertHyperlinks("attachment 123", alink(0, 15, "123")); assertHyperlinks("attachment # 123", alink(0, 17, "123")); assertHyperlinks("attachment#1", alink(0, 12, "1")); assertHyperlinks("attachment (id=123)", alink(0, 19, "123")); assertHyperlinks("Created attachment 123", alink(0, 22, "123")); assertHyperlinks("Created an attachment 123", alink(0, 25, "123")); assertHyperlinks("Created an attachment (id=123)", alink(0, 30, "123")); } public void testFindHyperlinksBug() { assertHyperlinks("bug123", link(0, 6, "123")); assertHyperlinks("bug 123", link(0, 7, "123")); assertHyperlinks("bug 123", link(0, 8, "123")); assertHyperlinks("bug#123", link(0, 7, "123")); assertHyperlinks("bug # 123", link(0, 11, "123")); } public void testFindHyperlinksTask() { assertHyperlinks("task123", link(0, 7, "123")); } public void testFindHyperlinksDuplicateOf() { assertHyperlinks("duplicate of 123", link(0, 16, "123")); } public void testFindHyperlinksBugComment() { assertHyperlinks("bug 123 comment 12", link(0, 18, "123", "12")); assertHyperlinks("bug#123 comment 12", link(0, 18, "123", "12")); assertHyperlinks("bug 123 comment#12", link(0, 18, "123", "12")); assertHyperlinks("bug#123 comment#12", link(0, 18, "123", "12")); assertHyperlinks("bug 123 comment# 12", link(0, 22, "123", "12")); assertHyperlinks("bug456comment#1", link(0, 15, "456", "1")); } public void testFindHyperlinksBugNoComment() { assertHyperlinks("bug 123#c1", link(0, 7, "123")); assertHyperlinks("bug 123#1", link(0, 7, "123")); assertHyperlinks("bug#123#c1", link(0, 7, "123")); assertHyperlinks("bug#123#1", link(0, 7, "123")); } public void testFindHyperlinksComment() { assertHyperlinks("comment#12", link(0, 10, "123", "12")); assertHyperlinks("comment #12", link(0, 12, "123", "12")); assertHyperlinks("comment 1", link(0, 9, "123", "1")); } public void testFindHyperlinksInline() { assertHyperlinks("abc bug 123 def", link(4, 7, "123")); } public void testFindHyperlinksMultiple() { assertHyperlinks("bug 456#comment#12", link(0, 7, "456"), link(8, 10, "123", "12")); assertHyperlinks("bug 123 bug 456", link(0, 7, "123"), link(20, 7, "456")); } public void testFindHyperlinksNoAttachment() { assertHyperlinks("attachment"); assertHyperlinks("attachmen 123"); assertHyperlinks("attachment id"); assertHyperlinks("attachment id"); } public void testFindHyperlinksNoBug() { assertHyperlinks("bu 123"); assertHyperlinks("bu# 123"); assertHyperlinks("bug"); assertHyperlinks("bugcomment"); assertHyperlinks("bug#comment"); } public void testFindHyperlinksNoComment() { assertHyperlinks("c 12"); assertHyperlinks("#c12"); assertHyperlinks("comment"); } }
org.eclipse.mylyn.bugzilla.tests/src/org/eclipse/mylyn/bugzilla/tests/ui/BugzillaHyperlinkDetectorTest.java
/******************************************************************************* * Copyright (c) 2010 Tasktop Technologies and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Tasktop Technologies - initial API and implementation *******************************************************************************/ package org.eclipse.mylyn.bugzilla.tests.ui; import junit.framework.TestCase; import org.eclipse.jface.text.Region; import org.eclipse.jface.text.hyperlink.IHyperlink; import org.eclipse.mylyn.internal.bugzilla.core.BugzillaCorePlugin; import org.eclipse.mylyn.internal.bugzilla.ui.TaskAttachmentHyperlink; import org.eclipse.mylyn.internal.bugzilla.ui.tasklist.BugzillaConnectorUi; import org.eclipse.mylyn.internal.tasks.core.TaskTask; import org.eclipse.mylyn.tasks.core.TaskRepository; import org.eclipse.mylyn.tasks.core.data.TaskAttribute; import org.eclipse.mylyn.tasks.ui.TaskHyperlink; /** * @author Steffen Pingel */ public class BugzillaHyperlinkDetectorTest extends TestCase { private BugzillaConnectorUi connector; private TaskRepository repository; private TaskTask task; private TaskAttachmentHyperlink alink(int offset, int length, String attachmentId) { TaskAttachmentHyperlink link = new TaskAttachmentHyperlink(new Region(offset, length), repository, attachmentId); return link; } private void assertHyperlinks(String string, IHyperlink... expected) { IHyperlink[] links = connector.findHyperlinks(repository, task, string, -1, 0); if (expected.length == 0) { assertNull(links); return; } assertNotNull("Expected hyperlinks in " + string, links); assertEquals(expected.length, links.length); for (int i = 0; i < links.length; i++) { assertEquals(expected[i], links[i]); } } private TaskHyperlink link(int offset, int length, String taskId) { return link(offset, length, taskId, null); } private TaskHyperlink link(int offset, int length, String taskId, String commentId) { TaskHyperlink link = new TaskHyperlink(new Region(offset, length), repository, taskId); if (commentId != null) { link.setSelection(TaskAttribute.PREFIX_COMMENT + commentId); } return link; } @Override protected void setUp() throws Exception { repository = new TaskRepository(BugzillaCorePlugin.CONNECTOR_KIND, "http://localhost"); task = new TaskTask(BugzillaCorePlugin.CONNECTOR_KIND, "http://localhost", "123"); connector = new BugzillaConnectorUi(); } public void testFindHyperlinksAttachment() { assertHyperlinks("attachment 123", alink(0, 14, "123")); assertHyperlinks("attachment 123", alink(0, 15, "123")); assertHyperlinks("attachment # 123", alink(0, 17, "123")); assertHyperlinks("attachment#1", alink(0, 12, "1")); assertHyperlinks("attachment (id=123)", alink(0, 19, "123")); assertHyperlinks("Created attachment 123", alink(0, 22, "123")); assertHyperlinks("Created an attachment 123", alink(0, 25, "123")); assertHyperlinks("Created an attachment (id=123)", alink(0, 30, "123")); } public void testFindHyperlinksBug() { assertHyperlinks("bug123", link(0, 6, "123")); assertHyperlinks("bug 123", link(0, 7, "123")); assertHyperlinks("bug 123", link(0, 8, "123")); assertHyperlinks("bug#123", link(0, 7, "123")); assertHyperlinks("bug # 123", link(0, 11, "123")); } public void testFindHyperlinksTask() { assertHyperlinks("task123", link(0, 7, "123")); } public void testFindHyperlinksDuplicateOf() { assertHyperlinks("duplicate of 123", link(0, 15, "123")); } public void testFindHyperlinksBugComment() { assertHyperlinks("bug 123 comment 12", link(0, 18, "123", "12")); assertHyperlinks("bug#123 comment 12", link(0, 18, "123", "12")); assertHyperlinks("bug 123 comment#12", link(0, 18, "123", "12")); assertHyperlinks("bug#123 comment#12", link(0, 18, "123", "12")); assertHyperlinks("bug 123 comment# 12", link(0, 22, "123", "12")); assertHyperlinks("bug456comment#1", link(0, 15, "456", "1")); } public void testFindHyperlinksBugNoComment() { assertHyperlinks("bug 123#c1", link(0, 7, "123")); assertHyperlinks("bug 123#1", link(0, 7, "123")); assertHyperlinks("bug#123#c1", link(0, 7, "123")); assertHyperlinks("bug#123#1", link(0, 7, "123")); } public void testFindHyperlinksComment() { assertHyperlinks("comment#12", link(0, 10, "123", "12")); assertHyperlinks("comment #12", link(0, 12, "123", "12")); assertHyperlinks("comment 1", link(0, 9, "123", "1")); } public void testFindHyperlinksInline() { assertHyperlinks("abc bug 123 def", link(4, 7, "123")); } public void testFindHyperlinksMultiple() { assertHyperlinks("bug 456#comment#12", link(0, 7, "456"), link(8, 10, "123", "12")); assertHyperlinks("bug 123 bug 456", link(0, 7, "123"), link(20, 7, "456")); } public void testFindHyperlinksNoAttachment() { assertHyperlinks("attachment"); assertHyperlinks("attachmen 123"); assertHyperlinks("attachment id"); assertHyperlinks("attachment id"); assertHyperlinks("Create attachment 123"); } public void testFindHyperlinksNoBug() { assertHyperlinks("bu 123"); assertHyperlinks("bu# 123"); assertHyperlinks("bug"); assertHyperlinks("bugcomment"); assertHyperlinks("bug#comment"); } public void testFindHyperlinksNoComment() { assertHyperlinks("c 12"); assertHyperlinks("#c12"); assertHyperlinks("comment"); } }
REOPENED - bug 304910: [api] recognize comment#number link on bug editor without bug number too https://bugs.eclipse.org/bugs/show_bug.cgi?id=304910
org.eclipse.mylyn.bugzilla.tests/src/org/eclipse/mylyn/bugzilla/tests/ui/BugzillaHyperlinkDetectorTest.java
REOPENED - bug 304910: [api] recognize comment#number link on bug editor without bug number too https://bugs.eclipse.org/bugs/show_bug.cgi?id=304910
Java
epl-1.0
d06b77493b9e7ebaaaf8f3162a1e2ac19baff56b
0
cthiebaud/jgrapht,VoVanHai/jgrapht,arcanefoam/jgrapht,gjroelofs/jgrapht,AidanDelaney/jgrapht,feilong0309/jgrapht,kashak79/jgrapht,gjroelofs/jgrapht,WorstCase00/jgrapht,WorstCase00/jgrapht,wselwood/jgrapht,alexeykudinkin/jgrapht,Infeligo/jgrapht,kashak79/jgrapht,wselwood/jgrapht,feilong0309/jgrapht,lingeringsocket/jgrapht,WorstCase00/jgrapht,cthiebaud/jgrapht,mt0803/jgrapht,Infeligo/jgrapht,arcanefoam/jgrapht,alexeykudinkin/jgrapht,hal/jgrapht,rcpoison/jgrapht,WorstCase00/jgrapht,AidanDelaney/jgrapht,hal/jgrapht,mt0803/jgrapht
/* ========================================== * JGraphT : a free Java graph-theory library * ========================================== * * Project Info: http://jgrapht.sourceforge.net/ * Project Lead: Barak Naveh (barak_naveh@users.sourceforge.net) * * (C) Copyright 2003, by Barak Naveh and Contributors. * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this library; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. */ /* ------------------------------ * GraphGeneratorTest.java * ------------------------------ * (C) Copyright 2003, by Barak Naveh and Contributors. * * Original Author: John V. Sichi * Contributor(s): - * * $Id$ * * Changes * ------- * 17-Sept-2003 : Initial revision (JVS); * */ package org._3pq.jgrapht.alg; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import junit.framework.TestCase; import org._3pq.jgrapht.DirectedGraph; import org._3pq.jgrapht.Edge; import org._3pq.jgrapht.VertexFactory; import org._3pq.jgrapht.alg.generator.CycleGraphGenerator; import org._3pq.jgrapht.alg.generator.EmptyGraphGenerator; import org._3pq.jgrapht.alg.generator.GraphGenerator; import org._3pq.jgrapht.alg.generator.LinearGraphGenerator; import org._3pq.jgrapht.graph.DefaultDirectedGraph; /** * . * * @author John V. Sichi */ public class GraphGeneratorTest extends TestCase { private static final int DEGREE = 10; private VertexFactory m_vertexFactory = new VertexFactory( ) { private int m_i; public Object createVertex( ) { return new Integer( ++m_i ); } }; /** * . */ public void testCycleGraphGenerator( ) { GraphGenerator gen = new CycleGraphGenerator( DEGREE ); DirectedGraph g = new DefaultDirectedGraph( ); Map resultMap = new HashMap( ); gen.generateGraph( g, m_vertexFactory, resultMap ); assertEquals( DEGREE, g.vertexSet( ).size( ) ); assertEquals( DEGREE, g.edgeSet( ).size( ) ); Object startVertex = g.vertexSet( ).iterator( ).next( ); assertEquals( 1, g.outDegreeOf( startVertex ) ); Object nextVertex = startVertex; Set seen = new HashSet( ); for( int i = 0; i < DEGREE; ++i ) { Edge nextEdge = (Edge) g.outgoingEdgesOf( nextVertex ).get( 0 ); nextVertex = nextEdge.getTarget( ); assertEquals( 1, g.inDegreeOf( nextVertex ) ); assertEquals( 1, g.outDegreeOf( nextVertex ) ); assertTrue( !seen.contains( nextVertex ) ); seen.add( nextVertex ); } // do you ever get the feeling you're going in circles? assertTrue( nextVertex == startVertex ); assertTrue( resultMap.isEmpty( ) ); } /** * . */ public void testEmptyGraphGenerator( ) { GraphGenerator gen = new EmptyGraphGenerator( DEGREE ); DirectedGraph g = new DefaultDirectedGraph( ); Map resultMap = new HashMap( ); gen.generateGraph( g, m_vertexFactory, resultMap ); assertEquals( DEGREE, g.vertexSet( ).size( ) ); assertEquals( 0, g.edgeSet( ).size( ) ); assertTrue( resultMap.isEmpty( ) ); } /** * . */ public void testLinearGraphGenerator( ) { GraphGenerator gen = new LinearGraphGenerator( DEGREE ); DirectedGraph g = new DefaultDirectedGraph( ); Map resultMap = new HashMap( ); gen.generateGraph( g, m_vertexFactory, resultMap ); assertEquals( DEGREE, g.vertexSet( ).size( ) ); assertEquals( DEGREE - 1, g.edgeSet( ).size( ) ); Object startVertex = resultMap.get( LinearGraphGenerator.START_VERTEX ); Object endVertex = resultMap.get( LinearGraphGenerator.END_VERTEX ); Iterator vertexIter = g.vertexSet( ).iterator( ); while( vertexIter.hasNext( ) ) { Object vertex = vertexIter.next( ); if( vertex == startVertex ) { assertEquals( 0, g.inDegreeOf( vertex ) ); assertEquals( 1, g.outDegreeOf( vertex ) ); continue; } if( vertex == endVertex ) { assertEquals( 1, g.inDegreeOf( vertex ) ); assertEquals( 0, g.outDegreeOf( vertex ) ); continue; } assertEquals( 1, g.inDegreeOf( vertex ) ); assertEquals( 1, g.outDegreeOf( vertex ) ); } } // TODO: testWheelGraphGenerator }
testsrc/org/_3pq/jgrapht/alg/GraphGeneratorTest.java
/* ========================================== * JGraphT : a free Java graph-theory library * ========================================== * * Project Info: http://jgrapht.sourceforge.net/ * Project Lead: Barak Naveh (barak_naveh@users.sourceforge.net) * * (C) Copyright 2003, by Barak Naveh and Contributors. * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this library; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. */ /* ------------------------------ * GraphGeneratorTest.java * ------------------------------ * (C) Copyright 2003, by Barak Naveh and Contributors. * * Original Author: John V. Sichi * Contributor(s): - * * $Id$ * * Changes * ------- * 17-Sept-2003 : Initial revision (JVS); * */ package org._3pq.jgrapht.alg; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import junit.framework.TestCase; import org._3pq.jgrapht.DirectedGraph; import org._3pq.jgrapht.Edge; import org._3pq.jgrapht.VertexFactory; import org._3pq.jgrapht.alg.generator.CycleGraphGenerator; import org._3pq.jgrapht.alg.generator.EmptyGraphGenerator; import org._3pq.jgrapht.alg.generator.GraphGenerator; import org._3pq.jgrapht.alg.generator.LinearGraphGenerator; import org._3pq.jgrapht.alg.generator.WheelGraphGenerator; import org._3pq.jgrapht.graph.DefaultDirectedGraph; /** * . * * @author John V. Sichi */ public class GraphGeneratorTest extends TestCase { private static final int DEGREE = 10; private VertexFactory m_vertexFactory = new VertexFactory( ) { private int m_i; public Object createVertex( ) { return new Integer( ++m_i ); } }; /** * . */ public void testCycleGraphGenerator( ) { GraphGenerator gen = new CycleGraphGenerator( DEGREE ); DirectedGraph g = new DefaultDirectedGraph( ); Map resultMap = new HashMap( ); gen.generateGraph( g, m_vertexFactory, resultMap ); assertEquals( DEGREE, g.vertexSet( ).size( ) ); assertEquals( DEGREE, g.edgeSet( ).size( ) ); Object startVertex = g.vertexSet( ).iterator( ).next( ); assertEquals( 1, g.outDegreeOf( startVertex ) ); Object nextVertex = startVertex; Set seen = new HashSet( ); for( int i = 0; i < DEGREE; ++i ) { Edge nextEdge = (Edge) g.outgoingEdgesOf( nextVertex ).get( 0 ); nextVertex = nextEdge.getTarget( ); assertEquals( 1, g.inDegreeOf( nextVertex ) ); assertEquals( 1, g.outDegreeOf( nextVertex ) ); assertTrue( !seen.contains( nextVertex ) ); seen.add( nextVertex ); } // do you ever get the feeling you're going in circles? assertTrue( nextVertex == startVertex ); assertTrue( resultMap.isEmpty( ) ); } /** * . */ public void testEmptyGraphGenerator( ) { GraphGenerator gen = new EmptyGraphGenerator( DEGREE ); DirectedGraph g = new DefaultDirectedGraph( ); Map resultMap = new HashMap( ); gen.generateGraph( g, m_vertexFactory, resultMap ); assertEquals( DEGREE, g.vertexSet( ).size( ) ); assertEquals( 0, g.edgeSet( ).size( ) ); assertTrue( resultMap.isEmpty( ) ); } /** * . */ public void testLinearGraphGenerator( ) { GraphGenerator gen = new LinearGraphGenerator( DEGREE ); DirectedGraph g = new DefaultDirectedGraph( ); Map resultMap = new HashMap( ); gen.generateGraph( g, m_vertexFactory, resultMap ); assertEquals( DEGREE, g.vertexSet( ).size( ) ); assertEquals( DEGREE - 1, g.edgeSet( ).size( ) ); Object startVertex = resultMap.get( LinearGraphGenerator.START_VERTEX ); Object endVertex = resultMap.get( LinearGraphGenerator.END_VERTEX ); Iterator vertexIter = g.vertexSet( ).iterator( ); while( vertexIter.hasNext( ) ) { Object vertex = vertexIter.next( ); if( vertex == startVertex ) { assertEquals( 0, g.inDegreeOf( vertex ) ); assertEquals( 1, g.outDegreeOf( vertex ) ); continue; } if( vertex == endVertex ) { assertEquals( 1, g.inDegreeOf( vertex ) ); assertEquals( 0, g.outDegreeOf( vertex ) ); continue; } assertEquals( 1, g.inDegreeOf( vertex ) ); assertEquals( 1, g.outDegreeOf( vertex ) ); } } // TODO: testWheelGraphGenerator }
removed excess import git-svn-id: ea8dc44323ac57449cbfb89d58b37d570afecc0a@197 4a7320f3-3611-0410-a4d0-ef7ff04b8906
testsrc/org/_3pq/jgrapht/alg/GraphGeneratorTest.java
removed excess import
Java
mpl-2.0
382042608fa1293900c7960f7189bd67651278a4
0
WeaveTeam/WeaveJS,WeaveTeam/WeaveJS,WeaveTeam/WeaveJS,WeaveTeam/WeaveJS
/* Weave (Web-based Analysis and Visualization Environment) Copyright (C) 2008-2011 University of Massachusetts Lowell This file is a part of Weave. Weave is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License, Version 3, as published by the Free Software Foundation. Weave is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Weave. If not, see <http://www.gnu.org/licenses/>. */ package weave.servlets; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.rmi.RemoteException; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.UUID; import java.util.Vector; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import weave.beans.AdminServiceResponse; import weave.beans.UploadFileFilter; import weave.beans.UploadedFile; import weave.config.DatabaseConfig; import weave.config.DublinCoreElement; import weave.config.DublinCoreUtils; import weave.config.ISQLConfig; import weave.config.ISQLConfig.AttributeColumnInfo; import weave.config.ISQLConfig.AttributeColumnInfo.DataType; import weave.config.ISQLConfig.AttributeColumnInfo.Metadata; import weave.config.ISQLConfig.ConnectionInfo; import weave.config.ISQLConfig.DatabaseConfigInfo; import weave.config.ISQLConfig.GeometryCollectionInfo; import weave.config.SQLConfigManager; import weave.config.SQLConfigUtils; import weave.config.SQLConfigXML; import weave.geometrystream.GeometryStreamConverter; import weave.geometrystream.SHPGeometryStreamUtils; import weave.geometrystream.SQLGeometryStreamDestination; import weave.utils.CSVParser; import weave.utils.DBFUtils; import weave.utils.FileUtils; import weave.utils.ListUtils; import weave.utils.SQLResult; import weave.utils.SQLUtils; import weave.utils.XMLUtils; public class AdminService extends GenericServlet { private static final long serialVersionUID = 1L; public AdminService() { } /** * This constructor is for testing only. * @param configManager */ public AdminService(SQLConfigManager configManager) { this.configManager = configManager; } public void init(ServletConfig config) throws ServletException { super.init(config); configManager = SQLConfigManager.getInstance(config.getServletContext()); tempPath = configManager.getContextParams().getTempPath(); uploadPath = configManager.getContextParams().getUploadPath(); docrootPath = configManager.getContextParams().getDocrootPath(); } // /** // * ONLY FOR TESTING. // * @throws ServletException // */ // public void init2() throws ServletException // { // tempPath = configManager.getContextParams().getTempPath(); // uploadPath = configManager.getContextParams().getUploadPath(); // docrootPath = configManager.getContextParams().getDocrootPath(); // } private String tempPath; private String uploadPath; private String docrootPath; private static int StringType = 0; private static int IntType = 1; private static int DoubleType = 2; private SQLConfigManager configManager; synchronized public AdminServiceResponse checkSQLConfigExists() { try { if (databaseConfigExists()) return new AdminServiceResponse(true, "Configuration file exists."); } catch (RemoteException se) { se.printStackTrace(); File configFile = new File(configManager.getConfigFileName()); if (configFile.exists()) return new AdminServiceResponse(false, String.format("%s is invalid. Please edit the file and fix the problem" + " or delete it and create a new one through the admin console.\n\n%s", configFile.getName(), se.getMessage())); } return new AdminServiceResponse(false, "The configuration storage location must be specified."); } synchronized private boolean databaseConfigExists() throws RemoteException { configManager.detectConfigChanges(); ISQLConfig config = configManager.getConfig(); return config.isConnectedToDatabase(); } synchronized public boolean authenticate(String connectionName, String password) throws RemoteException { boolean result = checkPasswordAndGetConfig(connectionName, password) != null; if (!result) System.out.println(String.format("authenticate(\"%s\",\"%s\") == %s", connectionName, password, result)); return result; } synchronized private ISQLConfig checkPasswordAndGetConfig(String connectionName, String password) throws RemoteException { configManager.detectConfigChanges(); ISQLConfig config = configManager.getConfig(); ConnectionInfo info = config.getConnectionInfo(connectionName); if (info == null || !password.equals(info.pass)) throw new RemoteException("Incorrect username or password."); return config; } synchronized private void backupAndSaveConfig(ISQLConfig config) throws RemoteException { try { String fileName = configManager.getConfigFileName(); File configFile = new File(fileName); File backupFile = new File(tempPath, "sqlconfig_backup.txt"); // make a backup FileUtils.copy(configFile, backupFile); // save the new config to the file XMLUtils.getStringFromXML(config.getDocument(), SQLConfigXML.DTD_FILENAME, fileName); } catch (Exception e) { throw new RemoteException("Backup failed", e); } } /** * This creates a backup of a single config entry. * * @throws Exception */ synchronized private void createConfigEntryBackup(ISQLConfig config, String entryType, String entryName) throws RemoteException { // copy the config entry to a temp SQLConfigXML String entryXMLString = null; // create a block of code so tempConfig won't stay in memory try { SQLConfigXML tempConfig = new SQLConfigXML(); SQLConfigUtils.migrateSQLConfigEntry(config, tempConfig, entryType, entryName); entryXMLString = tempConfig.getConfigEntryXML(entryType, entryName); // stop if xml entry is blank if (entryXMLString == null || !entryXMLString.contains("/")) return; // write the config entry to a temp file File newFile = new File(tempPath, "backup_" + entryType + "_" + entryName.replaceAll("[^a-zA-Z0-9]", "") + "_" + UUID.randomUUID() + ".txt"); BufferedWriter out = new BufferedWriter(new FileWriter(newFile)); out.write(entryXMLString); out.flush(); out.close(); } catch (Exception e) { throw new RemoteException("Backup failed", e); } } // ///////////////////////////////////////////////// // functions for managing Weave client XML files // ///////////////////////////////////////////////// /** * Return a list of Client Config files from docroot * * @return A list of (xml) client config files existing in the docroot * folder. */ synchronized public String[] getWeaveFileNames(String configConnectionName, String password) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(configConnectionName, password); ConnectionInfo info = config.getConnectionInfo(configConnectionName); String path = docrootPath; if(info.folderName.length() > 0) path = path + info.folderName + "/"; File docrootFolder = new File(path); FilenameFilter xmlFilter = new FilenameFilter() { public boolean accept(File dir, String fileName) { return (fileName.endsWith(".xml")); } }; File[] files = null; List<String> listOfFiles = new ArrayList<String>(); try { docrootFolder.mkdirs(); files = docrootFolder.listFiles(xmlFilter); for (File file : files) { if (file.isFile()) { // System.out.println(file.getName()); listOfFiles.add(((info.folderName.length() > 0) ? info.folderName + "/" : "") + file.getName().toString()); } } } catch (SecurityException e) { throw new RemoteException("Permission error reading directory.",e); } return ListUtils.toStringArray(listOfFiles); } synchronized public String saveWeaveFile(String connectionName, String password, String fileContents, String xmlFile, boolean overwriteFile) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); ConnectionInfo info = config.getConnectionInfo(connectionName); // 5.2 client web page configuration file ***.xml String output = ""; try { // remove special characters xmlFile = xmlFile.replace("\\", "").replace("/", ""); if (!xmlFile.toLowerCase().endsWith(".xml")) xmlFile += ".xml"; String path = docrootPath; if(info.folderName.length() > 0) path = path + info.folderName + "/"; File file = new File(path + xmlFile); if (file.exists()) { if (!overwriteFile) return String.format("File already exists and was not changed: \"%s\"", xmlFile); if (!info.is_superuser && info.folderName.length() == 0) return String.format("User \"%s\" does not have permission to overwrite configuration files. Please save under a new filename.", connectionName); } BufferedWriter out = new BufferedWriter(new FileWriter(file)); output = fileContents; out.write(output); out.close(); } catch (IOException e) { throw new RemoteException("Error occurred while saving file", e); } return "Successfully generated " + xmlFile + "."; } /** * Delete a Client Config file from docroot * * @return A String message indicating if file was deleted. * */ synchronized public String removeWeaveFile(String configConnectionName, String password, String fileName) throws RemoteException, IllegalArgumentException { ISQLConfig config = checkPasswordAndGetConfig(configConnectionName, password); ConnectionInfo info = config.getConnectionInfo(configConnectionName); if (!config.getConnectionInfo(configConnectionName).is_superuser && info.folderName.length() == 0) return String.format("User \"%s\" does not have permission to remove configuration files.", configConnectionName); String path = docrootPath; if(info.folderName.length() > 0) path = path + info.folderName + "/"; File f = new File(path + fileName); try { // Make sure the file or directory exists and isn't write protected if (!f.exists()) throw new IllegalArgumentException("Delete: no such file or directory: " + fileName); if (!f.canWrite()) throw new IllegalArgumentException("File cannot be deleted Delete: write protected: " + fileName); // If it is a directory, make sure it is empty if (f.isDirectory()) throw new IllegalArgumentException("Cannot Delete a directory"); // Attempt to delete it boolean success = f.delete(); if (!success) throw new IllegalArgumentException("Delete: deletion failed"); return "Successfully deleted file " + fileName; } catch (SecurityException e) { throw new RemoteException("File could not be deleted", e); } } // ///////////////////////////////////////////////// // functions for managing SQL connection entries // ///////////////////////////////////////////////// synchronized public String[] getConnectionNames(String connectionName, String password) throws RemoteException { try { // only check password and superuser privileges if dbInfo is valid if (databaseConfigExists()) { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); // non-superusers can't get connection info for other users if (!config.getConnectionInfo(connectionName).is_superuser) return new String[]{connectionName}; } // otherwise, return all connection names List<String> connectionNames = configManager.getConfig().getConnectionNames(); return ListUtils.toStringArray(getSortedUniqueValues(connectionNames, false)); } catch (RemoteException se) { return new String[]{}; } } synchronized public ConnectionInfo getConnectionInfo(String loginConnectionName, String loginPassword, String connectionNameToGet) throws RemoteException { ISQLConfig config; if (databaseConfigExists()) { config = checkPasswordAndGetConfig(loginConnectionName, loginPassword); // non-superusers can't get connection info if (!config.getConnectionInfo(loginConnectionName).is_superuser) return null; } else { config = configManager.getConfig(); } ConnectionInfo info = config.getConnectionInfo(connectionNameToGet); info.pass = ""; // don't send password return info; } synchronized public String saveConnectionInfo(String currentConnectionName, String currentPassword, String newConnectionName, String dbms, String ip, String port, String database, String sqlUser, String password, String folderName, boolean grantSuperuser, boolean configOverwrite) throws RemoteException { if (newConnectionName.equals("")) throw new RemoteException("Connection name cannot be empty."); ConnectionInfo newConnectionInfo = new ConnectionInfo(); newConnectionInfo.name = newConnectionName; newConnectionInfo.dbms = dbms; newConnectionInfo.ip = ip; newConnectionInfo.port = port; newConnectionInfo.database = database; newConnectionInfo.user = sqlUser; newConnectionInfo.pass = password; newConnectionInfo.folderName = folderName; newConnectionInfo.is_superuser = true; // if the config file doesn't exist, create it String fileName = configManager.getConfigFileName(); if (!new File(fileName).exists()) { try { XMLUtils.getStringFromXML(new SQLConfigXML().getDocument(), SQLConfigXML.DTD_FILENAME, fileName); } catch (Exception e) { e.printStackTrace(); } } configManager.detectConfigChanges(); ISQLConfig config = configManager.getConfig(); // if there are existing connections and DatabaseConfigInfo exists, check the password. otherwise, allow anything. if (config.getConnectionNames().size() > 0 && config.getDatabaseConfigInfo() != null) { config = checkPasswordAndGetConfig(currentConnectionName, currentPassword); // non-superusers can't save connection info if (!config.getConnectionInfo(currentConnectionName).is_superuser) throw new RemoteException(String.format("User \"%s\" does not have permission to modify connections.", currentConnectionName)); // is_superuser for the new connection will only be false if there is an existing superuser connection and grantSuperuser is false. newConnectionInfo.is_superuser = grantSuperuser; } // test connection only - to validate parameters Connection conn = null; try { conn = newConnectionInfo.getConnection(); SQLUtils.testConnection(conn); } catch (Exception e) { throw new RemoteException(String.format("The connection named \"%s\" was not created because the server could not" + " connect to the specified database with the given parameters.", newConnectionInfo.name), e); } finally { // close the connection, as we will not use it later SQLUtils.cleanup(conn); } // if the connection already exists AND overwrite == false throw error if (!configOverwrite && ListUtils.findString(newConnectionInfo.name, config.getConnectionNames()) >= 0) { throw new RemoteException(String.format("The connection named \"%s\" already exists. Action cancelled.", newConnectionInfo.name)); } // generate config connection entry try { createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_CONNECTION, newConnectionInfo.name); // do not delete if this is the last user (which must be a superuser) List<String> connectionNames = config.getConnectionNames(); // check for number of superusers int numSuperUsers = 0; for (String name : connectionNames) { if (config.getConnectionInfo(name).is_superuser) ++numSuperUsers; if (numSuperUsers >= 2) break; } // sanity check if (currentConnectionName == newConnectionName && numSuperUsers == 1 && !newConnectionInfo.is_superuser) throw new RemoteException("Cannot remove superuser privileges from last remaining superuser."); config.removeConnection(newConnectionInfo.name); config.addConnection(newConnectionInfo); backupAndSaveConfig(config); } catch (Exception e) { e.printStackTrace(); throw new RemoteException( String.format("Unable to create connection entry named \"%s\": %s", newConnectionInfo.name, e.getMessage()),e ); } return String.format("The connection named \"%s\" was created successfully.", newConnectionName); } synchronized public String removeConnectionInfo(String loginConnectionName, String loginPassword, String connectionNameToRemove) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(loginConnectionName, loginPassword); // allow only a superuser to remove a connection ConnectionInfo loginConnectionInfo = config.getConnectionInfo(loginConnectionName); if (!loginConnectionInfo.is_superuser) throw new RemoteException("Only superusers can remove connections."); try { if (ListUtils.findString(connectionNameToRemove, config.getConnectionNames()) < 0) throw new RemoteException("Connection \"" + connectionNameToRemove + "\" does not exist."); createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_CONNECTION, connectionNameToRemove); // check for number of superusers List<String> connectionNames = config.getConnectionNames(); int numSuperUsers = 0; for (String name : connectionNames) if (config.getConnectionInfo(name).is_superuser) ++numSuperUsers; // do not allow removal of last superuser if (numSuperUsers == 1 && loginConnectionName.equals(connectionNameToRemove)) throw new RemoteException("Cannot remove the only superuser."); config.removeConnection(connectionNameToRemove); backupAndSaveConfig(config); return "Connection \"" + connectionNameToRemove + "\" was deleted."; } catch (Exception e) { e.printStackTrace(); throw new RemoteException(e.getMessage()); } } synchronized public DatabaseConfigInfo getDatabaseConfigInfo(String connectionName, String password) throws RemoteException { try { if (databaseConfigExists()) return checkPasswordAndGetConfig(connectionName, password).getDatabaseConfigInfo(); } catch (RemoteException e) { if (e.detail instanceof FileNotFoundException) return null; throw e; } return null; } synchronized public String migrateConfigToDatabase(String connectionName, String password, String schema, String geometryConfigTable, String dataConfigTable) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!config.getConnectionInfo(connectionName).is_superuser) throw new RemoteException("Unable to migrate config to database without superuser privileges."); String configFileName = configManager.getConfigFileName(); int count = 0; try { // load xmlConfig in memory SQLConfigXML xmlConfig = new SQLConfigXML(configFileName); DatabaseConfigInfo info = new DatabaseConfigInfo(); info.schema = schema; info.connection = connectionName; info.dataConfigTable = dataConfigTable; info.geometryConfigTable = geometryConfigTable; // save db config info to in-memory xmlConfig xmlConfig.setDatabaseConfigInfo(info); // migrate from in-memory xmlConfig to the db count = SQLConfigUtils.migrateSQLConfig(xmlConfig, new DatabaseConfig(xmlConfig)); // save in-memory xmlConfig to disk backupAndSaveConfig(xmlConfig); } catch (Exception e) { e.printStackTrace(); if (count > 0) throw new RemoteException("Migrated " + count + " items then failed", e); throw new RemoteException("Migration failed", e); } String result = String.format("The admin console will now use the \"%s\" connection to store configuration information.", connectionName); if (count > 0) result = String.format("%s items were copied from %s into the database. %s", count, new File(configFileName).getName(), result); return result; } // ///////////////////////////////////////////////// // functions for managing DataTable entries // ///////////////////////////////////////////////// synchronized public String[] getDataTableNames(String connectionName, String password) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); ConnectionInfo cInfo = config.getConnectionInfo(connectionName); String dataConnection; if (cInfo.is_superuser) dataConnection = null; // let it get all of the data tables else dataConnection = connectionName; // get only the ones on this connection return ListUtils.toStringArray(config.getDataTableNames(dataConnection)); } /** * Returns metadata about columns of the given data table. */ synchronized public AttributeColumnInfo[] getDataTableInfo(String connectionName, String password, String dataTableName) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); List<AttributeColumnInfo> info = config.getAttributeColumnInfo(dataTableName); return info.toArray(new AttributeColumnInfo[info.size()]); } /** * Returns the results of testing attribute column sql queries. */ synchronized public AttributeColumnInfo[] testAllQueries(String connectionName, String password, String dataTableName) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); HashMap<String, String> params = new HashMap<String, String>(); params.put(Metadata.DATATABLE.toString(), dataTableName); List<AttributeColumnInfo> infolist = config.getAttributeColumnInfo(params); for (int i = 0; i < infolist.size(); i ++) { AttributeColumnInfo attributeColumnInfo = infolist.get(i); try { String query = attributeColumnInfo.sqlQuery; System.out.println(query); SQLResult result = SQLConfigUtils.getRowSetFromQuery(config, attributeColumnInfo.connection, query); attributeColumnInfo.metadata.put(AttributeColumnInfo.SQLRESULT, String.format("Returned %s rows", result.rows.length)); } catch (Exception e) { e.printStackTrace(); attributeColumnInfo.metadata.put(AttributeColumnInfo.SQLRESULT, e.getMessage()); } } return infolist.toArray(new AttributeColumnInfo[0]); } @SuppressWarnings("unchecked") synchronized public String saveDataTableInfo(String connectionName, String password, Object[] columnMetadata) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); // first validate the information String dataTableName = null; for (Object object : columnMetadata) { Map<String, Object> metadata = (Map<String, Object>) object; String _dataTableName = (String) metadata.get(Metadata.DATATABLE.toString()); if (dataTableName == null) dataTableName = _dataTableName; else if (dataTableName != _dataTableName) throw new RemoteException("overwriteDataTableEntry(): dataTable property not consistent among column entries."); // String _dataTableConnection = (String) metadata.get(Metadata.CONNECTION.toString()); // if (dataTableConnection == null) // dataTableConnection = _dataTableConnection; // else if (dataTableConnection != _dataTableConnection) // throw new RemoteException("overwriteDataTableEntry(): " + Metadata.CONNECTION.toString() + " property not consistent among column entries."); } if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, dataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to modify DataTable \"%s\".", connectionName, dataTableName)); try { // start a block of code so tempConfig will not stay in memory { // make a new SQLConfig object and add the entry SQLConfigXML tempConfig = new SQLConfigXML(); // add all the columns to the new blank config for (int i = 0; i < columnMetadata.length; i++) { // create metadata map that AttributeColumnInfo wants Map<String, String> metadata = new HashMap<String, String>(); for (Entry<String, Object> entry : ((Map<String, Object>) columnMetadata[i]).entrySet()) { //System.out.println(entry.getKey() + ':' + (String) entry.getValue()); metadata.put(entry.getKey(), (String) entry.getValue()); } // Exclude connection & sqlQuery properties from metadata // object // because they are separate parameters to the constructor. AttributeColumnInfo columnInfo = new AttributeColumnInfo(metadata.remove(AttributeColumnInfo.CONNECTION), metadata.remove(AttributeColumnInfo.SQLQUERY), metadata); // add the column info to the temp blank config tempConfig.addAttributeColumn(columnInfo); } // backup any existing dataTable entry, then copy over the new // entry createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_DATATABLE, dataTableName); SQLConfigUtils.migrateSQLConfigEntry(tempConfig, config, ISQLConfig.ENTRYTYPE_DATATABLE, dataTableName); } backupAndSaveConfig(config); return String.format("The dataTable entry \"%s\" was saved.", dataTableName); } catch (Exception e) { e.printStackTrace(); throw new RemoteException(e.getMessage()); } } synchronized public void removeAttributeColumnInfo(String connectionName, String password, Object[] columnMetadata) throws RemoteException { } synchronized public String removeDataTableInfo(String connectionName, String password, String dataTableName) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, dataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to remove DataTable \"%s\".", connectionName, dataTableName)); try { if (ListUtils.findString(dataTableName, config.getDataTableNames(null)) < 0) throw new RemoteException("DataTable \"" + dataTableName + "\" does not exist."); createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_DATATABLE, dataTableName); config.removeDataTable(dataTableName); backupAndSaveConfig(config); return "DataTable \"" + dataTableName + "\" was deleted."; } catch (Exception e) { e.printStackTrace(); throw new RemoteException(e.getMessage()); } } // ///////////////////////////////////////////////////// // functions for managing GeometryCollection entries // ///////////////////////////////////////////////////// synchronized public String[] getGeometryCollectionNames(String connectionName, String password) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); ConnectionInfo cInfo = config.getConnectionInfo(connectionName); String geometryConnection; if (cInfo.is_superuser) geometryConnection = null; // let it get all of the geometries else geometryConnection = connectionName; // get only the ones on this connection return ListUtils.toStringArray(config.getGeometryCollectionNames(geometryConnection)); } /** * Returns metadata about the given geometry collection. */ synchronized public GeometryCollectionInfo getGeometryCollectionInfo(String connectionName, String password, String geometryCollectionName) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); return config.getGeometryCollectionInfo(geometryCollectionName); } synchronized public String saveGeometryCollectionInfo(String connectionName, String password, String geomName, String geomConnection, String geomSchema, String geomTablePrefix, String geomKeyType, String geomImportNotes, String geomProjection) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!SQLConfigUtils.userCanModifyGeometryCollection(config, connectionName, geomName)) throw new RemoteException(String.format("User \"%s\" does not have permission to modify GeometryCollection \"%s\".", connectionName, geomName)); // if this user isn't a superuser, don't allow an overwrite of an existing geometrycollection ConnectionInfo currentConnectionInfo = config.getConnectionInfo(connectionName); if (!currentConnectionInfo.is_superuser) { GeometryCollectionInfo oldGeometry = config.getGeometryCollectionInfo(geomName); if (oldGeometry != null && !oldGeometry.connection.equals(connectionName)) throw new RemoteException("An existing geometry collection with the same name exists on another connection. Unable to overwrite without superuser privileges."); } try { // start a block of code so tempConfig will not stay in memory { // make a new SQLConfig object and add the entry SQLConfigXML tempConfig = new SQLConfigXML(); // add all the columns to the new blank config GeometryCollectionInfo info = new GeometryCollectionInfo(); info.name = geomName; info.connection = geomConnection; info.schema = geomSchema; info.tablePrefix = geomTablePrefix; info.keyType = geomKeyType; info.importNotes = geomImportNotes; info.projection = geomProjection; // add the info to the temp blank config tempConfig.addGeometryCollection(info); // backup any existing dataTable entry, then copy over the new // entry createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_GEOMETRYCOLLECTION, geomName); SQLConfigUtils.migrateSQLConfigEntry(tempConfig, config, ISQLConfig.ENTRYTYPE_GEOMETRYCOLLECTION, geomName); } backupAndSaveConfig(config); return String.format("The geometryCollection entry \"%s\" was saved.", geomName); } catch (Exception e) { e.printStackTrace(); throw new RemoteException(e.getMessage()); } } synchronized public String removeGeometryCollectionInfo(String connectionName, String password, String geometryCollectionName) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!SQLConfigUtils.userCanModifyGeometryCollection(config, connectionName, geometryCollectionName)) throw new RemoteException(String.format("User \"%s\" does not have permission to remove GeometryCollection \"%s\".", connectionName, geometryCollectionName)); try { if (ListUtils.findString(geometryCollectionName, config.getGeometryCollectionNames(null)) < 0) throw new RemoteException("Geometry Collection \"" + geometryCollectionName + "\" does not exist."); createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_GEOMETRYCOLLECTION, geometryCollectionName); config.removeGeometryCollection(geometryCollectionName); backupAndSaveConfig(config); return "Geometry Collection \"" + geometryCollectionName + "\" was deleted."; } catch (Exception e) { e.printStackTrace(); throw new RemoteException(e.getMessage()); } } // /////////////////////////////////////////// // functions for getting SQL info // /////////////////////////////////////////// /** * The following functions get information about the database associated * with a given connection name. */ synchronized public String[] getSchemas(String configConnectionName, String password) throws RemoteException { checkPasswordAndGetConfig(configConnectionName, password); List<String> schemasList = getSchemasList(configConnectionName); return ListUtils.toStringArray(getSortedUniqueValues(schemasList, false)); } synchronized public String[] getTables(String configConnectionName, String password, String schemaName) throws RemoteException { checkPasswordAndGetConfig(configConnectionName, password); List<String> tablesList = getTablesList(configConnectionName, schemaName); return ListUtils.toStringArray(getSortedUniqueValues(tablesList, false)); } synchronized public String[] getColumns(String configConnectionName, String password, String schemaName, String tableName) throws RemoteException { checkPasswordAndGetConfig(configConnectionName, password); return ListUtils.toStringArray(getColumnsList(configConnectionName, schemaName, tableName)); } synchronized private List<String> getSchemasList(String connectionName) throws RemoteException { ISQLConfig config = configManager.getConfig(); List<String> schemas; try { Connection conn = SQLConfigUtils.getStaticReadOnlyConnection(config, connectionName); schemas = SQLUtils.getSchemas(conn); } catch (SQLException e) { // e.printStackTrace(); throw new RemoteException("Unable to get schema list from database.", e); } finally { // SQLUtils.cleanup(conn); } // don't want to list information_schema. ListUtils.removeIgnoreCase("information_schema", schemas); return schemas; } synchronized private List<String> getTablesList(String connectionName, String schemaName) throws RemoteException { ISQLConfig config = configManager.getConfig(); List<String> tables; try { Connection conn = SQLConfigUtils.getStaticReadOnlyConnection(config, connectionName); tables = SQLUtils.getTables(conn, schemaName); } catch (SQLException e) { // e.printStackTrace(); throw new RemoteException("Unable to get schema list from database.", e); } finally { // SQLUtils.cleanup(conn); } return tables; } synchronized private List<String> getColumnsList(String connectionName, String schemaName, String tableName) throws RemoteException { ISQLConfig config = configManager.getConfig(); List<String> columns; try { Connection conn = SQLConfigUtils.getStaticReadOnlyConnection(config, connectionName); columns = SQLUtils.getColumns(conn, schemaName, tableName); } catch (SQLException e) { // e.printStackTrace(); throw new RemoteException("Unable to get column list from database.", e); } finally { // SQLUtils.cleanup(conn); } return columns; } // /////////////////////////////////////////// // functions for getting miscellaneous info // /////////////////////////////////////////// synchronized public String[] getKeyTypes(String connectionName, String password) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); return ListUtils.toStringArray(getSortedUniqueValues(config.getKeyTypes(), true)); } synchronized public UploadedFile[] getUploadedCSVFiles() throws RemoteException { File directory = new File(uploadPath); List<UploadedFile> list = new ArrayList<UploadedFile>(); File[] listOfFiles = null; try { if( directory.isDirectory() ) { listOfFiles = directory.listFiles(new UploadFileFilter("csv")); for( File file : listOfFiles ) { if( file.isFile() ) { UploadedFile uploadedFile = new UploadedFile( file.getName(), file.length(), file.lastModified() ); list.add(uploadedFile); } } } } catch(Exception e) { throw new RemoteException(e.getMessage()); } int n = list.size(); return list.toArray(new UploadedFile[n]); } synchronized public UploadedFile[] getUploadedShapeFiles() throws RemoteException { File directory = new File(uploadPath); List<UploadedFile> list = new ArrayList<UploadedFile>(); File[] listOfFiles = null; try { if( directory.isDirectory() ) { listOfFiles = directory.listFiles(new UploadFileFilter("shp")); for( File file : listOfFiles ) { if( file.isFile() ) { UploadedFile uploadedFile = new UploadedFile( file.getName(), file.length(), file.lastModified() ); list.add(uploadedFile); } } } } catch(Exception e) { throw new RemoteException(e.getMessage()); } int n = list.size(); return list.toArray(new UploadedFile[n]); } /** * Read a list of csv files and return common header columns. * * @param A * list of csv file names. * @return A list of common header files or null if none exist encoded using * */ synchronized public String[] getCSVColumnNames(String csvFile) throws RemoteException { String[] headerLine = null; try { String csvData = org.apache.commons.io.FileUtils.readFileToString(new File(uploadPath, csvFile)); // Read first line only (header line). int index = csvData.indexOf("\r"); int index2 = csvData.indexOf("\n"); if (index2 < index && index2 >= 0) index = index2; String header = index < 0 ? csvData : csvData.substring(0, index); csvData = null; // don't need this in memory anymore String[][] rows = CSVParser.defaultParser.parseCSV(header); headerLine = rows[0]; } catch (FileNotFoundException e) { throw new RemoteException(e.getMessage()); } catch (Exception e) { throw new RemoteException(e.getMessage()); } return headerLine; } synchronized public String[] listDBFFileColumns(String dbfFileName) throws RemoteException { try { List<String> names = DBFUtils.getAttributeNames(new File(uploadPath, correctFileNameCase(dbfFileName))); return ListUtils.toStringArray(names); } catch (IOException e) { throw new RemoteException("IOException", e); } } synchronized private String correctFileNameCase(String fileName) { try { File directory = new File(uploadPath); if( directory.isDirectory() ) { for( String file : directory.list() ) { if( file.equalsIgnoreCase(fileName) ) return file; } } } catch( Exception e ) {} return fileName; } /** * getSortedUniqueValues * * @param values * A list of string values which may contain duplicates. * @param moveEmptyStringToEnd * If set to true and "" is at the front of the list, "" is moved * to the end. * @return A sorted list of unique values found in the given list. */ private List<String> getSortedUniqueValues(List<String> values, boolean moveEmptyStringToEnd) { Set<String> uniqueValues = new HashSet<String>(); uniqueValues.addAll(values); Vector<String> result = new Vector<String>(uniqueValues); Collections.sort(result, String.CASE_INSENSITIVE_ORDER); // if empty string is at beginning of sorted list, move it to the end of // the list if (moveEmptyStringToEnd && result.size() > 0 && result.get(0).equals("")) result.add(result.remove(0)); return result; } // /////////////////////////////////////////// // functions for importing data // /////////////////////////////////////////// /** * This function accepts an uploaded file. * @param fileName The name of the file. * @param content The file content. */ public void uploadFile(String fileName, InputStream content) throws RemoteException { // make sure the upload folder exists (new File(uploadPath)).mkdirs(); String filePath = uploadPath + fileName; try { FileUtils.copy(content, new FileOutputStream(filePath)); } catch (Exception e) { throw new RemoteException("File upload failed.", e); } } /** * Return a list of files existing in the csv upload folder on the server. * * @return A list of files existing in the csv upload folder. */ synchronized public List<String> getUploadedFileNames() throws RemoteException { File uploadFolder = new File(uploadPath); File[] files = null; List<String> listOfFiles = new ArrayList<String>(); try { files = uploadFolder.listFiles(); for (File file : files) { if (file.isFile()) { // System.out.println(file.getName()); listOfFiles.add(file.getName().toString()); } } } catch (SecurityException e) { throw new RemoteException("Permission error reading directory."); } return listOfFiles; } private boolean valueIsInt(String value) { boolean retVal = true; try { Integer.parseInt(value); } catch (Exception e) { retVal = false; } return retVal; } private boolean valueIsDouble(String value) { boolean retVal = true; try { Double.parseDouble(value); } catch (Exception e) { retVal = false; } return retVal; } private boolean valueHasLeadingZero(String value) { boolean temp = valueIsInt(value); if (!temp) return false; if (value.length() < 2) return false; if (value.charAt(0) == '0' && value.charAt(1) != '.') return true; return false; } synchronized public String importCSV(String connectionName, String password, String csvFile, String csvKeyColumn, String csvSecondaryKeyColumn, String sqlSchema, String sqlTable, boolean sqlOverwrite, String configDataTableName, boolean configOverwrite, String configGeometryCollectionName, String configKeyType, String[] nullValues, String[] filterColumnNames) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); ConnectionInfo connInfo = config.getConnectionInfo(connectionName); if (sqlOverwrite && !connInfo.is_superuser) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite SQL tables.", connectionName)); if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, configDataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite DataTable \"%s\".", connectionName, configDataTableName)); Connection conn = null; Statement stmt = null; try { conn = SQLConfigUtils.getConnection(config, connectionName); sqlTable = sqlTable.toLowerCase(); // fix for MySQL running under Linux String[] columnNames = null; String[] originalColumnNames = null; int fieldLengths[] = null; // Load the CSV file and reformat it String formatted_CSV_path = tempPath + "temp.csv"; int[] types = null; int i = 0; int j = 0; int num = 1; String outputNullValue = SQLUtils.getCSVNullValue(conn); boolean ignoreKeyColumnQueries = false; String csvData = org.apache.commons.io.FileUtils.readFileToString(new File(uploadPath, csvFile)); String[][] rows = CSVParser.defaultParser.parseCSV(csvData); if (rows.length == 0) throw new RemoteException("CSV file is empty: " + csvFile); // if there is no key column, we need to append a unique Row ID column if ("".equals(csvKeyColumn)) { ignoreKeyColumnQueries = true; // get the maximum number of rows in a column int maxNumRows = 0; for (i = 0; i < rows.length; ++i) { String[] column = rows[i]; int numRows = column.length; // this includes the column name in row 0 if (numRows > maxNumRows) maxNumRows = numRows; } csvKeyColumn = "row_id"; for (i = 0; i < rows.length; ++i) { String[] row = rows[i]; String[] newRow = new String[row.length + 1]; System.arraycopy(row, 0, newRow, 0, row.length); if (i == 0) newRow[newRow.length - 1] = csvKeyColumn; else newRow[newRow.length - 1] = "row" + i; rows[i] = newRow; } } // Read the column names columnNames = rows[0]; originalColumnNames = new String[columnNames.length]; fieldLengths = new int[columnNames.length]; // converge the column name to meet the requirement of mySQL. for (i = 0; i < columnNames.length; i++) { String colName = columnNames[i]; if (colName.length() == 0) colName = "Column " + (i+1); // save original column name originalColumnNames[i] = colName; // if the column name has "/", "\", ".", "<", ">". colName = colName.replace("/", ""); colName = colName.replace("\\", ""); colName = colName.replace(".", ""); colName = colName.replace("<", "less than"); colName = colName.replace(">", "more than"); // if the length of the column name is longer than the 64-character limit int maxColNameLength = 64; int halfMaxColNameLength = 30; boolean isKeyCol = csvKeyColumn.equalsIgnoreCase(colName); if (colName.length() >= maxColNameLength) { colName = colName.replace(" ", ""); if (colName.length() >= maxColNameLength) { colName = colName.substring(0, halfMaxColNameLength) + "_" + colName.substring(colName.length() - halfMaxColNameLength); } } // copy new name if key column changed if (isKeyCol) csvKeyColumn = colName; // if find the column names are repetitive for (j = 0; j < i; j++) { if (colName.equalsIgnoreCase(columnNames[j])) { colName += "_" + num; num++; } } // save the new name columnNames[i] = colName; } // Initialize the types of columns as int (will be changed inside loop if necessary) types = new int[columnNames.length]; for (i = 0; i < columnNames.length; i++) { fieldLengths[i] = 0; types[i] = IntType; } // Read the data and get the column type for (int iRow = 1; iRow < rows.length; iRow++) { String[] nextLine = rows[iRow]; // Format each line for (i = 0; i < columnNames.length && i < nextLine.length; i++) { // keep track of the longest String value found in this column fieldLengths[i] = Math.max(fieldLengths[i], nextLine[i].length()); // Change missing data into NULL, later add more cases to deal with missing data. String[] nullValuesStandard = new String[]{"", ".", "..", " ", "-", "\"NULL\"", "NULL", "NaN"}; for(String[] values : new String[][] {nullValuesStandard, nullValues }) { for (String nullValue : values) { if (nextLine[i].equalsIgnoreCase(nullValue)) { nextLine[i] = outputNullValue; break; } } } if (nextLine[i].equals(outputNullValue)) continue; // 3.3.2 is a string, update the type. // 04 is a string (but Integer.parseInt would not throw an exception) try { String value = nextLine[i]; while (value.indexOf(',') > 0) value = value.replace(",", ""); // valid input format // if the value is an int or double with an extraneous leading zero, it's defined to be a string if (valueHasLeadingZero(value)) types[i] = StringType; // if the type was determined to be a string before (or just above), continue if (types[i] == StringType) continue; // if the type is an int if (types[i] == IntType) { // check that it's still an int if (valueIsInt(value)) continue; } // it either wasn't an int or is no longer an int, check for a double if (valueIsDouble(value)) { types[i] = DoubleType; continue; } // if we're down here, it must be a string types[i] = StringType; } catch (Exception e) { // this shouldn't happen, but it's just to be safe types[i] = StringType; } } } // now we need to remove commas from any numeric values because the SQL drivers don't like it for (int iRow = 1; iRow < rows.length; iRow++) { String[] nextLine = rows[iRow]; // Format each line for (i = 0; i < columnNames.length && i < nextLine.length; i++) { String value = nextLine[i]; if (types[i] == IntType || types[i] == DoubleType) { while (value.indexOf(",") >= 0) value = value.replace(",", ""); nextLine[i] = value; } } } // save modified CSV BufferedWriter out = new BufferedWriter(new FileWriter(formatted_CSV_path)); boolean quoteEmptyStrings = outputNullValue.length() > 0; String temp = CSVParser.defaultParser.createCSVFromArrays(rows, quoteEmptyStrings); out.write(temp); out.close(); // Import the CSV file into SQL. // Drop the table if it exists. if (sqlOverwrite) { SQLUtils.dropTableIfExists(conn, sqlSchema, sqlTable); } else { if (ListUtils.findIgnoreCase(sqlTable, getTablesList(connectionName, sqlSchema)) >= 0) throw new RemoteException("CSV not imported.\nSQL table already exists."); } if (!configOverwrite) { if (ListUtils.findIgnoreCase(configDataTableName, config.getDataTableNames(null)) >= 0) throw new RemoteException(String.format( "CSV not imported.\nDataTable \"%s\" already exists in the configuration.", configDataTableName)); } // create a list of the column types List<String> columnTypesList = new Vector<String>(); for (i = 0; i < columnNames.length; i++) { if (types[i] == StringType || csvKeyColumn.equalsIgnoreCase(columnNames[i])) columnTypesList.add(SQLUtils.getVarcharTypeString(conn, fieldLengths[i])); else if (types[i] == IntType) columnTypesList.add(SQLUtils.getIntTypeString(conn)); else if (types[i] == DoubleType) columnTypesList.add(SQLUtils.getDoubleTypeString(conn)); } // create the table SQLUtils.createTable(conn, sqlSchema, sqlTable, Arrays.asList(columnNames), columnTypesList); // import the data SQLUtils.copyCsvToDatabase(conn, formatted_CSV_path, sqlSchema, sqlTable); return addConfigDataTable(config, configOverwrite, configDataTableName, connectionName, configGeometryCollectionName, configKeyType, csvKeyColumn, csvSecondaryKeyColumn, originalColumnNames, columnNames, sqlSchema, sqlTable, ignoreKeyColumnQueries, filterColumnNames); } catch (RemoteException e) // required since RemoteException extends IOException { throw e; } catch (SQLException e) { throw new RemoteException("Import failed.", e); } catch (FileNotFoundException e) { e.printStackTrace(); throw new RemoteException("File not found: " + csvFile); } catch (IOException e) { e.printStackTrace(); throw new RemoteException("Cannot read file: " + csvFile); } finally { // close everything in reverse order SQLUtils.cleanup(stmt); SQLUtils.cleanup(conn); } } synchronized public String addConfigDataTableFromDatabase(String connectionName, String password, String schemaName, String tableName, String keyColumnName, String secondaryKeyColumnName, String configDataTableName, boolean configOverwrite, String geometryCollectionName, String keyType, String[] filterColumnNames) throws RemoteException { // use lower case sql table names (fix for mysql linux problems) //tableName = tableName.toLowerCase(); ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); String[] columnNames = getColumnsList(connectionName, schemaName, tableName).toArray(new String[0]); return addConfigDataTable(config, configOverwrite, configDataTableName, connectionName, geometryCollectionName, keyType, keyColumnName, secondaryKeyColumnName, columnNames, columnNames, schemaName, tableName, false, filterColumnNames); } synchronized private String addConfigDataTable(ISQLConfig config, boolean configOverwrite, String configDataTableName, String connectionName, String geometryCollectionName, String keyType, String keyColumnName, String secondarySqlKeyColumn, String[] configColumnNames, String[] sqlColumnNames, String sqlSchema, String sqlTable, boolean ignoreKeyColumnQueries, String[] filterColumnNames) throws RemoteException { ConnectionInfo info = config.getConnectionInfo(connectionName); if (info == null) throw new RemoteException(String.format("Connection named \"%s\" does not exist.", connectionName)); String dbms = info.dbms; if (sqlColumnNames == null) sqlColumnNames = new String[0]; // if key column is actually the name of a column, put quotes around it. // otherwise, don't. int iKey = ListUtils.findIgnoreCase(keyColumnName, sqlColumnNames); int iSecondaryKey = ListUtils.findIgnoreCase(secondarySqlKeyColumn, sqlColumnNames); String sqlKeyColumn; // save the original column name if (iKey >= 0) { sqlKeyColumn = keyColumnName; // before quoting, save the column name keyColumnName = SQLUtils.quoteSymbol(dbms, sqlColumnNames[iKey]); } else { sqlKeyColumn = SQLUtils.unquoteSymbol(dbms, keyColumnName); // get the original columnname } if (iSecondaryKey >= 0) secondarySqlKeyColumn = SQLUtils.quoteSymbol(dbms, sqlColumnNames[iSecondaryKey]); // Write SQL statements into sqlconfig. if (!configOverwrite) { if (ListUtils.findIgnoreCase(configDataTableName, config.getDataTableNames(null)) >= 0) throw new RemoteException(String.format("DataTable \"%s\" already exists in the configuration.", configDataTableName)); } else { if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, configDataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite DataTable \"%s\".", connectionName, configDataTableName)); } // connect to database, generate and test each query before modifying // config file List<String> titles = new LinkedList<String>(); List<String> queries = new Vector<String>(); List<String> dataTypes = new Vector<String>(); String query = null; Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, connectionName); SQLResult filteredValues = null; if (filterColumnNames != null && filterColumnNames.length > 0) { // get a list of unique combinations of filter values String columnList = ""; for (int i = 0; i < filterColumnNames.length; i++) { if (i > 0) columnList += ","; columnList += SQLUtils.quoteSymbol(conn, filterColumnNames[i]); } query = String.format( "select distinct %s from %s order by %s", columnList, SQLUtils.quoteSchemaTable(conn, sqlSchema, sqlTable), columnList ); filteredValues = SQLUtils.getRowSetFromQuery(conn, query); // System.out.println(query); // System.out.println(filteredValues); } for (int iCol = 0; iCol < sqlColumnNames.length; iCol++) { String sqlColumn = sqlColumnNames[iCol]; // System.out.println("columnName: " + columnName + "\tkeyColumnName: " + keyColumnName + "\toriginalKeyCol: " + originalKeyColumName); if (ignoreKeyColumnQueries && sqlKeyColumn.equals(sqlColumn)) continue; sqlColumn = SQLUtils.quoteSymbol(dbms, sqlColumn); // hack if (secondarySqlKeyColumn != null && secondarySqlKeyColumn.length() > 0) sqlColumn += "," + secondarySqlKeyColumn; // generate column query query = String.format("SELECT %s,%s FROM %s", keyColumnName, sqlColumn, SQLUtils.quoteSchemaTable(dbms, sqlSchema, sqlTable)); if (filteredValues != null) { // generate one query per unique filter value combination for (int iRow = 0 ; iRow < filteredValues.rows.length ; iRow++ ) { String filteredQuery = buildFilteredQuery(conn, query, filteredValues, iRow); titles.add(buildFilteredColumnTitle(configColumnNames[iCol], filteredValues, iRow)); queries.add(filteredQuery); dataTypes.add(testQueryAndGetDataType(conn, filteredQuery)); } } else { titles.add(configColumnNames[iCol]); queries.add(query); dataTypes.add(testQueryAndGetDataType(conn, query)); } } // done generating queries // generate config DataTable entry createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_DATATABLE, configDataTableName); config.removeDataTable(configDataTableName); Map<String, String> metadata = new HashMap<String, String>(); metadata.put(Metadata.DATATABLE.toString(), configDataTableName); metadata.put(Metadata.KEYTYPE.toString(), keyType); metadata.put(Metadata.GEOMETRYCOLLECTION.toString(), geometryCollectionName); int numberSqlColumns = titles.size(); for (int i = 0; i < numberSqlColumns; i++) { metadata.put(Metadata.NAME.toString(), titles.get(i)); metadata.put(Metadata.DATATYPE.toString(), dataTypes.get(i)); AttributeColumnInfo attrInfo = new AttributeColumnInfo(connectionName, queries.get(i), metadata); config.addAttributeColumn(attrInfo); } backupAndSaveConfig(config); } catch (SQLException e) { throw new RemoteException(String.format("Failed to add DataTable \"%s\" to the configuration.\n", configDataTableName), e); } catch (RemoteException e) { throw new RemoteException(String.format("Failed to add DataTable \"%s\" to the configuration.\n", configDataTableName), e); } if (sqlColumnNames.length == 0) throw new RemoteException("No columns were found."); return String.format("DataTable \"%s\" was added to the configuration with %s generated attribute column queries.\n", configDataTableName, titles.size()); } private String testQueryAndGetDataType(Connection conn, String query) throws RemoteException { Statement stmt = null; ResultSet rs = null; DataType dataType = null; try { String dbms = conn.getMetaData().getDatabaseProductName(); if (!dbms.equalsIgnoreCase(SQLUtils.SQLSERVER) && !dbms.equalsIgnoreCase(SQLUtils.ORACLE)) query += " LIMIT 1"; stmt = conn.createStatement(); rs = stmt.executeQuery(query); dataType = DataType.fromSQLType(rs.getMetaData().getColumnType(2)); } catch (SQLException e) { throw new RemoteException("Unable to execute generated query:\n" + query, e); } finally { SQLUtils.cleanup(rs); SQLUtils.cleanup(stmt); } return dataType.toString(); } private String buildFilteredColumnTitle(String columnName, SQLResult filteredValues, int filteredValueRow) { String columnTitle = columnName + " ("; for (int j = 0 ; j < filteredValues.rows[filteredValueRow].length ; j++ ) { if (j > 0) columnTitle += " "; boolean isNull = filteredValues.rows[filteredValueRow][j] == null; String value; if (isNull) value = "NULL"; else value = filteredValues.rows[filteredValueRow][j].toString(); columnTitle += isNull ? "NULL" : value; } columnTitle += ")"; return columnTitle; } private String buildFilteredQuery(Connection conn, String unfilteredQuery, SQLResult filteredValues, int filteredValueRow) throws IllegalArgumentException, SQLException { String query = unfilteredQuery + " where "; for (int j = 0 ; j < filteredValues.rows[filteredValueRow].length ; j++ ) { if (j > 0) query += " and "; boolean isNull = filteredValues.rows[filteredValueRow][j] == null; String value; if (isNull) value = "NULL"; else value = filteredValues.rows[filteredValueRow][j].toString(); query += String.format( "%s=%s", SQLUtils.quoteSymbol(conn, filteredValues.columnNames[j]), isNull ? "NULL" : SQLUtils.quoteString(conn, value) ); } return query; } /** * The following functions involve getting shapes into the database and into * the config file. */ synchronized public String convertShapefileToSQLStream(String configConnectionName, String password, String[] fileNameWithoutExtension, String[] keyColumns, String sqlSchema, String sqlTablePrefix, boolean sqlOverwrite, String configGeometryCollectionName, boolean configOverwrite, String configKeyType, String projectionSRS, String[] nullValues) throws RemoteException { // use lower case sql table names (fix for mysql linux problems) sqlTablePrefix = sqlTablePrefix.toLowerCase(); ISQLConfig config = checkPasswordAndGetConfig(configConnectionName, password); ConnectionInfo connInfo = config.getConnectionInfo(configConnectionName); if (sqlOverwrite && !connInfo.is_superuser) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite SQL tables.", configConnectionName)); if (!SQLConfigUtils.userCanModifyGeometryCollection(config, configConnectionName, configGeometryCollectionName)) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite GeometryCollection \"%s\".", configConnectionName, configGeometryCollectionName)); if (!configOverwrite) { if (ListUtils.findIgnoreCase(configGeometryCollectionName, config.getGeometryCollectionNames(null)) >= 0) throw new RemoteException(String.format( "Shapes not imported. SQLConfig geometryCollection \"%s\" already exists.", configGeometryCollectionName)); } String dbfTableName = sqlTablePrefix + "_dbfdata"; Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, configConnectionName); // store dbf data to database storeDBFDataToDatabase(configConnectionName, password, fileNameWithoutExtension, sqlSchema, dbfTableName, sqlOverwrite, nullValues); GeometryStreamConverter converter = new GeometryStreamConverter( new SQLGeometryStreamDestination(conn, sqlSchema, sqlTablePrefix, sqlOverwrite) ); for (String file : fileNameWithoutExtension) { // convert shape data to streaming sql format String shpfile = uploadPath + file + ".shp"; SHPGeometryStreamUtils.convertShapefile(converter, shpfile, Arrays.asList(keyColumns)); } converter.flushAndCommitAll(); } catch (Exception e) { e.printStackTrace(); throw new RemoteException("Shapefile import failed", e); } finally { SQLUtils.cleanup(conn); } String fileList = Arrays.asList(fileNameWithoutExtension).toString(); if (fileList.length() > 103) fileList = fileList.substring(0, 50) + "..." + fileList.substring(fileList.length() - 50); String importNotes = String.format("file: %s, keyColumns: %s", fileList, keyColumns); // get key column SQL code String keyColumnsString; if (keyColumns.length == 1) { keyColumnsString = keyColumns[0]; } else { keyColumnsString = "CONCAT("; for (int i = 0; i < keyColumns.length; i++) { if (i > 0) keyColumnsString += ","; keyColumnsString += "CAST(" + keyColumns[i] + " AS CHAR)"; } keyColumnsString += ")"; } // add SQL statements to sqlconfig String[] columnNames = getColumnsList(configConnectionName, sqlSchema, dbfTableName).toArray(new String[0]); String resultAddSQL = addConfigDataTable(config, configOverwrite, configGeometryCollectionName, configConnectionName, configGeometryCollectionName, configKeyType, keyColumnsString, null, columnNames, columnNames, sqlSchema, dbfTableName, false, null); return resultAddSQL + "\n\n" + addConfigGeometryCollection(configOverwrite, configConnectionName, password, configGeometryCollectionName, configKeyType, sqlSchema, sqlTablePrefix, projectionSRS, importNotes); } synchronized public String storeDBFDataToDatabase(String configConnectionName, String password, String[] fileNameWithoutExtension, String sqlSchema, String sqlTableName, boolean sqlOverwrite, String[] nullValues) throws RemoteException { // use lower case sql table names (fix for mysql linux problems) sqlTableName = sqlTableName.toLowerCase(); ISQLConfig config = checkPasswordAndGetConfig(configConnectionName, password); ConnectionInfo connInfo = config.getConnectionInfo(configConnectionName); if (sqlOverwrite && !connInfo.is_superuser) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite SQL tables.", configConnectionName)); Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, configConnectionName); File[] files = new File[fileNameWithoutExtension.length]; for (int i = 0; i < files.length; i++) files[i] = new File(uploadPath + fileNameWithoutExtension[i] + ".dbf"); DBFUtils.storeAttributes(files, conn, sqlSchema, sqlTableName, sqlOverwrite, nullValues); } catch (Exception e) { e.printStackTrace(); throw new RemoteException("DBF import failed", e); } finally { SQLUtils.cleanup(conn); } // String importNotes = String.format("file: %s, keyColumns: %s", // fileNameWithoutExtension, keyColumns); return "DBF Data stored successfully"; } synchronized public String addConfigGeometryCollection(boolean configOverwrite, String configConnectionName, String password, String configGeometryCollectionName, String configKeyType, String sqlSchema, String sqlTablePrefix, String projectionSRS, String importNotes) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(configConnectionName, password); if (!configOverwrite) { if (ListUtils.findIgnoreCase(configGeometryCollectionName, config.getGeometryCollectionNames(null)) >= 0) throw new RemoteException(String.format("GeometryCollection \"%s\" already exists in the configuration.", configGeometryCollectionName)); } else { if (!SQLConfigUtils.userCanModifyGeometryCollection(config, configConnectionName, configGeometryCollectionName)) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite GeometryCollection \"%s\".", configConnectionName, configGeometryCollectionName)); } // add geometry collection GeometryCollectionInfo info = new GeometryCollectionInfo(); info.name = configGeometryCollectionName; info.connection = configConnectionName; info.schema = sqlSchema; info.tablePrefix = sqlTablePrefix; info.keyType = configKeyType; info.importNotes = importNotes; info.projection = projectionSRS; createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_GEOMETRYCOLLECTION, info.name); config.removeGeometryCollection(info.name); config.addGeometryCollection(info); backupAndSaveConfig(config); return String.format("GeometryCollection \"%s\" was added to the configuration", configGeometryCollectionName); } // ////////////////////////////////////////////// // functions for managing dublin core metadata // ////////////////////////////////////////////// /** * Adds Dublin Core Elements to the metadata store in association with the * given dataset.. * * @param connectionName * the name of the connection to use * @param password * the password for the given connection * @param dataTableName * the name of the dataset to associate the given elements with * @param elements * the key-value pairs defining the new Dublin Core elements to * add. Keys are expected to be like "dc:title" and * "dc:description", values are expected to be Strings. * @throws RemoteException */ synchronized public void addDCElements(String connectionName, String password, String dataTableName, Map<String, Object> elements) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, dataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to modify DataTable \"%s\".", connectionName, dataTableName)); DatabaseConfigInfo configInfo = config.getDatabaseConfigInfo(); String configConnectionName = configInfo.connection; Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, configConnectionName); } catch (SQLException e) { throw new RemoteException("addDCElements failed", e); } String schema = configInfo.schema; DublinCoreUtils.addDCElements(conn, schema, dataTableName, elements); // System.out.println("in addDCElements"); // int i = 0; // for (Map.Entry<String, Object> e : elements.entrySet()) // System.out.println(" elements[" + (i++) + "] = {" + e.getKey() // + " = " + e.getValue()); } /** * Queries the database for the Dublin Core metadata elements associated * with the data set with the given name and returns the result. The result * is returned as a Map whose keys are Dublin Core property names and whose * values are the values for those properties (for the given data set) * stored in the metadata store. * * If an error occurs, a map is returned with a single key-value pair whose * key is "error". */ synchronized public DublinCoreElement[] listDCElements(String connectionName, String password, String dataTableName) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); DatabaseConfigInfo configInfo = config.getDatabaseConfigInfo(); String configConnectionName = configInfo.connection; Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, configConnectionName); } catch (SQLException e) { throw new RemoteException("listDCElements failed", e); } String schema = configInfo.schema; List<DublinCoreElement> list = DublinCoreUtils.listDCElements(conn, schema, dataTableName); int n = list.size(); return list.toArray(new DublinCoreElement[n]); // DublinCoreElement[] result = new DublinCoreElement[n]; // for (int i = 0; i < n; i++) // { // result[i] = list.get(i); // System.out.println("list.get(i).element = " + list.get(i).element + // " list.get(i).value = " + list.get(i).value); // } // return result; } /** * Deletes the specified metadata entries. */ synchronized public void deleteDCElements(String connectionName, String password, String dataTableName, List<Map<String, String>> elements) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, dataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to modify DataTable \"%s\".", connectionName, dataTableName)); DatabaseConfigInfo configInfo = config.getDatabaseConfigInfo(); String configConnectionName = configInfo.connection; Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, configConnectionName); } catch (SQLException e) { throw new RemoteException("deleteDCElements failed", e); } String schema = configInfo.schema; DublinCoreUtils.deleteDCElements(conn, schema, dataTableName, elements); } /** * Saves an edited metadata row to the server. */ synchronized public void updateEditedDCElement(String connectionName, String password, String dataTableName, Map<String, String> object) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, dataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to modify DataTable \"%s\".", connectionName, dataTableName)); DatabaseConfigInfo configInfo = config.getDatabaseConfigInfo(); String configConnectionName = configInfo.connection; Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, configConnectionName); } catch (SQLException e) { throw new RemoteException("updateEditedDCElement failed", e); } String schema = configInfo.schema; DublinCoreUtils.updateEditedDCElement(conn, schema, dataTableName, object); } synchronized public String saveReportDefinitionFile(String fileName, String fileContents) throws RemoteException { File reportDefFile; try { File docrootDir = new File(docrootPath); if (!docrootDir.exists()) throw new RemoteException("Unable to find docroot directory"); File reportsDir = new File(docrootDir, "\\WeaveReports"); if (!reportsDir.exists()) reportsDir.mkdir(); if (!reportsDir.exists()) throw new RemoteException("Unable to access reports directory"); reportDefFile = new File(reportsDir, fileName); BufferedWriter writer = new BufferedWriter(new FileWriter(reportDefFile)); writer.write(fileContents); writer.close(); } catch (Exception e) { throw new RemoteException("Error writing report definition file: " + fileName, e); } return "Successfully wrote the report definition file: " + reportDefFile.getAbsolutePath(); } }
WeaveServices/src/weave/servlets/AdminService.java
/* Weave (Web-based Analysis and Visualization Environment) Copyright (C) 2008-2011 University of Massachusetts Lowell This file is a part of Weave. Weave is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License, Version 3, as published by the Free Software Foundation. Weave is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Weave. If not, see <http://www.gnu.org/licenses/>. */ package weave.servlets; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.rmi.RemoteException; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.UUID; import java.util.Vector; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import weave.beans.AdminServiceResponse; import weave.beans.UploadFileFilter; import weave.beans.UploadedFile; import weave.config.DatabaseConfig; import weave.config.DublinCoreElement; import weave.config.DublinCoreUtils; import weave.config.ISQLConfig; import weave.config.ISQLConfig.AttributeColumnInfo; import weave.config.ISQLConfig.AttributeColumnInfo.DataType; import weave.config.ISQLConfig.AttributeColumnInfo.Metadata; import weave.config.ISQLConfig.ConnectionInfo; import weave.config.ISQLConfig.DatabaseConfigInfo; import weave.config.ISQLConfig.GeometryCollectionInfo; import weave.config.SQLConfigManager; import weave.config.SQLConfigUtils; import weave.config.SQLConfigXML; import weave.geometrystream.GeometryStreamConverter; import weave.geometrystream.SHPGeometryStreamUtils; import weave.geometrystream.SQLGeometryStreamDestination; import weave.utils.CSVParser; import weave.utils.DBFUtils; import weave.utils.FileUtils; import weave.utils.ListUtils; import weave.utils.SQLResult; import weave.utils.SQLUtils; import weave.utils.XMLUtils; public class AdminService extends GenericServlet { private static final long serialVersionUID = 1L; public AdminService() { } /** * This constructor is for testing only. * @param configManager */ public AdminService(SQLConfigManager configManager) { this.configManager = configManager; } public void init(ServletConfig config) throws ServletException { super.init(config); configManager = SQLConfigManager.getInstance(config.getServletContext()); tempPath = configManager.getContextParams().getTempPath(); uploadPath = configManager.getContextParams().getUploadPath(); docrootPath = configManager.getContextParams().getDocrootPath(); } // /** // * ONLY FOR TESTING. // * @throws ServletException // */ // public void init2() throws ServletException // { // tempPath = configManager.getContextParams().getTempPath(); // uploadPath = configManager.getContextParams().getUploadPath(); // docrootPath = configManager.getContextParams().getDocrootPath(); // } private String tempPath; private String uploadPath; private String docrootPath; private static int StringType = 0; private static int IntType = 1; private static int DoubleType = 2; private SQLConfigManager configManager; synchronized public AdminServiceResponse checkSQLConfigExists() { try { if (databaseConfigExists()) return new AdminServiceResponse(true, "Configuration file exists."); } catch (RemoteException se) { se.printStackTrace(); File configFile = new File(configManager.getConfigFileName()); if (configFile.exists()) return new AdminServiceResponse(false, String.format("%s is invalid. Please edit the file and fix the problem" + " or delete it and create a new one through the admin console.\n\n%s", configFile.getName(), se.getMessage())); } return new AdminServiceResponse(false, "The configuration storage location must be specified."); } synchronized private boolean databaseConfigExists() throws RemoteException { configManager.detectConfigChanges(); ISQLConfig config = configManager.getConfig(); return config.isConnectedToDatabase(); } synchronized public boolean authenticate(String connectionName, String password) throws RemoteException { boolean result = checkPasswordAndGetConfig(connectionName, password) != null; if (!result) System.out.println(String.format("authenticate(\"%s\",\"%s\") == %s", connectionName, password, result)); return result; } synchronized private ISQLConfig checkPasswordAndGetConfig(String connectionName, String password) throws RemoteException { configManager.detectConfigChanges(); ISQLConfig config = configManager.getConfig(); ConnectionInfo info = config.getConnectionInfo(connectionName); if (info == null || !password.equals(info.pass)) throw new RemoteException("Incorrect username or password."); return config; } synchronized private void backupAndSaveConfig(ISQLConfig config) throws RemoteException { try { String fileName = configManager.getConfigFileName(); File configFile = new File(fileName); File backupFile = new File(tempPath, "sqlconfig_backup.txt"); // make a backup FileUtils.copy(configFile, backupFile); // save the new config to the file XMLUtils.getStringFromXML(config.getDocument(), SQLConfigXML.DTD_FILENAME, fileName); } catch (Exception e) { throw new RemoteException("Backup failed", e); } } /** * This creates a backup of a single config entry. * * @throws Exception */ synchronized private void createConfigEntryBackup(ISQLConfig config, String entryType, String entryName) throws RemoteException { // copy the config entry to a temp SQLConfigXML String entryXMLString = null; // create a block of code so tempConfig won't stay in memory try { SQLConfigXML tempConfig = new SQLConfigXML(); SQLConfigUtils.migrateSQLConfigEntry(config, tempConfig, entryType, entryName); entryXMLString = tempConfig.getConfigEntryXML(entryType, entryName); // stop if xml entry is blank if (entryXMLString == null || !entryXMLString.contains("/")) return; // write the config entry to a temp file File newFile = new File(tempPath, "backup_" + entryType + "_" + entryName.replaceAll("[^a-zA-Z0-9]", "") + "_" + UUID.randomUUID() + ".txt"); BufferedWriter out = new BufferedWriter(new FileWriter(newFile)); out.write(entryXMLString); out.flush(); out.close(); } catch (Exception e) { throw new RemoteException("Backup failed", e); } } // ///////////////////////////////////////////////// // functions for managing Weave client XML files // ///////////////////////////////////////////////// /** * Return a list of Client Config files from docroot * * @return A list of (xml) client config files existing in the docroot * folder. */ synchronized public String[] getWeaveFileNames(String configConnectionName, String password) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(configConnectionName, password); ConnectionInfo info = config.getConnectionInfo(configConnectionName); String path = docrootPath; if(info.folderName.length() > 0) path = path + info.folderName + "/"; File docrootFolder = new File(path); FilenameFilter xmlFilter = new FilenameFilter() { public boolean accept(File dir, String fileName) { return (fileName.endsWith(".xml")); } }; File[] files = null; List<String> listOfFiles = new ArrayList<String>(); try { docrootFolder.mkdirs(); files = docrootFolder.listFiles(xmlFilter); for (File file : files) { if (file.isFile()) { // System.out.println(file.getName()); listOfFiles.add(((info.folderName.length() > 0) ? info.folderName + "/" : "") + file.getName().toString()); } } } catch (SecurityException e) { throw new RemoteException("Permission error reading directory.",e); } return ListUtils.toStringArray(listOfFiles); } synchronized public String saveWeaveFile(String connectionName, String password, String fileContents, String xmlFile, boolean overwriteFile) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); ConnectionInfo info = config.getConnectionInfo(connectionName); // 5.2 client web page configuration file ***.xml String output = ""; try { // remove special characters xmlFile = xmlFile.replace("\\", "").replace("/", ""); if (!xmlFile.toLowerCase().endsWith(".xml")) xmlFile += ".xml"; String path = docrootPath; if(info.folderName.length() > 0) path = path + info.folderName + "/"; File file = new File(path + xmlFile); if (file.exists()) { if (!overwriteFile) return String.format("File already exists and was not changed: \"%s\"", xmlFile); if (!info.is_superuser && info.folderName.length() == 0) return String.format("User \"%s\" does not have permission to overwrite configuration files. Please save under a new filename.", connectionName); } BufferedWriter out = new BufferedWriter(new FileWriter(file)); output = fileContents; out.write(output); out.close(); } catch (IOException e) { throw new RemoteException("Error occurred while saving file", e); } return "Successfully generated " + xmlFile + "."; } /** * Delete a Client Config file from docroot * * @return A String message indicating if file was deleted. * */ synchronized public String removeWeaveFile(String configConnectionName, String password, String fileName) throws RemoteException, IllegalArgumentException { ISQLConfig config = checkPasswordAndGetConfig(configConnectionName, password); ConnectionInfo info = config.getConnectionInfo(configConnectionName); if (!config.getConnectionInfo(configConnectionName).is_superuser) return String.format("User \"%s\" does not have permission to remove configuration files.", configConnectionName); String path = docrootPath; if(info.folderName.length() > 0) path = path + info.folderName + "/"; File f = new File(path + fileName); try { // Make sure the file or directory exists and isn't write protected if (!f.exists()) throw new IllegalArgumentException("Delete: no such file or directory: " + fileName); if (!f.canWrite()) throw new IllegalArgumentException("File cannot be deleted Delete: write protected: " + fileName); // If it is a directory, make sure it is empty if (f.isDirectory()) throw new IllegalArgumentException("Cannot Delete a directory"); // Attempt to delete it boolean success = f.delete(); if (!success) throw new IllegalArgumentException("Delete: deletion failed"); return "Successfully deleted file " + fileName; } catch (SecurityException e) { throw new RemoteException("File could not be deleted", e); } } // ///////////////////////////////////////////////// // functions for managing SQL connection entries // ///////////////////////////////////////////////// synchronized public String[] getConnectionNames(String connectionName, String password) throws RemoteException { try { // only check password and superuser privileges if dbInfo is valid if (databaseConfigExists()) { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); // non-superusers can't get connection info for other users if (!config.getConnectionInfo(connectionName).is_superuser) return new String[]{connectionName}; } // otherwise, return all connection names List<String> connectionNames = configManager.getConfig().getConnectionNames(); return ListUtils.toStringArray(getSortedUniqueValues(connectionNames, false)); } catch (RemoteException se) { return new String[]{}; } } synchronized public ConnectionInfo getConnectionInfo(String loginConnectionName, String loginPassword, String connectionNameToGet) throws RemoteException { ISQLConfig config; if (databaseConfigExists()) { config = checkPasswordAndGetConfig(loginConnectionName, loginPassword); // non-superusers can't get connection info if (!config.getConnectionInfo(loginConnectionName).is_superuser) return null; } else { config = configManager.getConfig(); } ConnectionInfo info = config.getConnectionInfo(connectionNameToGet); info.pass = ""; // don't send password return info; } synchronized public String saveConnectionInfo(String currentConnectionName, String currentPassword, String newConnectionName, String dbms, String ip, String port, String database, String sqlUser, String password, String folderName, boolean grantSuperuser, boolean configOverwrite) throws RemoteException { if (newConnectionName.equals("")) throw new RemoteException("Connection name cannot be empty."); ConnectionInfo newConnectionInfo = new ConnectionInfo(); newConnectionInfo.name = newConnectionName; newConnectionInfo.dbms = dbms; newConnectionInfo.ip = ip; newConnectionInfo.port = port; newConnectionInfo.database = database; newConnectionInfo.user = sqlUser; newConnectionInfo.pass = password; newConnectionInfo.folderName = folderName; newConnectionInfo.is_superuser = true; // if the config file doesn't exist, create it String fileName = configManager.getConfigFileName(); if (!new File(fileName).exists()) { try { XMLUtils.getStringFromXML(new SQLConfigXML().getDocument(), SQLConfigXML.DTD_FILENAME, fileName); } catch (Exception e) { e.printStackTrace(); } } configManager.detectConfigChanges(); ISQLConfig config = configManager.getConfig(); // if there are existing connections and DatabaseConfigInfo exists, check the password. otherwise, allow anything. if (config.getConnectionNames().size() > 0 && config.getDatabaseConfigInfo() != null) { config = checkPasswordAndGetConfig(currentConnectionName, currentPassword); // non-superusers can't save connection info if (!config.getConnectionInfo(currentConnectionName).is_superuser) throw new RemoteException(String.format("User \"%s\" does not have permission to modify connections.", currentConnectionName)); // is_superuser for the new connection will only be false if there is an existing superuser connection and grantSuperuser is false. newConnectionInfo.is_superuser = grantSuperuser; } // test connection only - to validate parameters Connection conn = null; try { conn = newConnectionInfo.getConnection(); SQLUtils.testConnection(conn); } catch (Exception e) { throw new RemoteException(String.format("The connection named \"%s\" was not created because the server could not" + " connect to the specified database with the given parameters.", newConnectionInfo.name), e); } finally { // close the connection, as we will not use it later SQLUtils.cleanup(conn); } // if the connection already exists AND overwrite == false throw error if (!configOverwrite && ListUtils.findString(newConnectionInfo.name, config.getConnectionNames()) >= 0) { throw new RemoteException(String.format("The connection named \"%s\" already exists. Action cancelled.", newConnectionInfo.name)); } // generate config connection entry try { createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_CONNECTION, newConnectionInfo.name); // do not delete if this is the last user (which must be a superuser) List<String> connectionNames = config.getConnectionNames(); // check for number of superusers int numSuperUsers = 0; for (String name : connectionNames) { if (config.getConnectionInfo(name).is_superuser) ++numSuperUsers; if (numSuperUsers >= 2) break; } // sanity check if (currentConnectionName == newConnectionName && numSuperUsers == 1 && !newConnectionInfo.is_superuser) throw new RemoteException("Cannot remove superuser privileges from last remaining superuser."); config.removeConnection(newConnectionInfo.name); config.addConnection(newConnectionInfo); backupAndSaveConfig(config); } catch (Exception e) { e.printStackTrace(); throw new RemoteException( String.format("Unable to create connection entry named \"%s\": %s", newConnectionInfo.name, e.getMessage()),e ); } return String.format("The connection named \"%s\" was created successfully.", newConnectionName); } synchronized public String removeConnectionInfo(String loginConnectionName, String loginPassword, String connectionNameToRemove) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(loginConnectionName, loginPassword); // allow only a superuser to remove a connection ConnectionInfo loginConnectionInfo = config.getConnectionInfo(loginConnectionName); if (!loginConnectionInfo.is_superuser) throw new RemoteException("Only superusers can remove connections."); try { if (ListUtils.findString(connectionNameToRemove, config.getConnectionNames()) < 0) throw new RemoteException("Connection \"" + connectionNameToRemove + "\" does not exist."); createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_CONNECTION, connectionNameToRemove); // check for number of superusers List<String> connectionNames = config.getConnectionNames(); int numSuperUsers = 0; for (String name : connectionNames) if (config.getConnectionInfo(name).is_superuser) ++numSuperUsers; // do not allow removal of last superuser if (numSuperUsers == 1 && loginConnectionName.equals(connectionNameToRemove)) throw new RemoteException("Cannot remove the only superuser."); config.removeConnection(connectionNameToRemove); backupAndSaveConfig(config); return "Connection \"" + connectionNameToRemove + "\" was deleted."; } catch (Exception e) { e.printStackTrace(); throw new RemoteException(e.getMessage()); } } synchronized public DatabaseConfigInfo getDatabaseConfigInfo(String connectionName, String password) throws RemoteException { try { if (databaseConfigExists()) return checkPasswordAndGetConfig(connectionName, password).getDatabaseConfigInfo(); } catch (RemoteException e) { if (e.detail instanceof FileNotFoundException) return null; throw e; } return null; } synchronized public String migrateConfigToDatabase(String connectionName, String password, String schema, String geometryConfigTable, String dataConfigTable) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!config.getConnectionInfo(connectionName).is_superuser) throw new RemoteException("Unable to migrate config to database without superuser privileges."); String configFileName = configManager.getConfigFileName(); int count = 0; try { // load xmlConfig in memory SQLConfigXML xmlConfig = new SQLConfigXML(configFileName); DatabaseConfigInfo info = new DatabaseConfigInfo(); info.schema = schema; info.connection = connectionName; info.dataConfigTable = dataConfigTable; info.geometryConfigTable = geometryConfigTable; // save db config info to in-memory xmlConfig xmlConfig.setDatabaseConfigInfo(info); // migrate from in-memory xmlConfig to the db count = SQLConfigUtils.migrateSQLConfig(xmlConfig, new DatabaseConfig(xmlConfig)); // save in-memory xmlConfig to disk backupAndSaveConfig(xmlConfig); } catch (Exception e) { e.printStackTrace(); if (count > 0) throw new RemoteException("Migrated " + count + " items then failed", e); throw new RemoteException("Migration failed", e); } String result = String.format("The admin console will now use the \"%s\" connection to store configuration information.", connectionName); if (count > 0) result = String.format("%s items were copied from %s into the database. %s", count, new File(configFileName).getName(), result); return result; } // ///////////////////////////////////////////////// // functions for managing DataTable entries // ///////////////////////////////////////////////// synchronized public String[] getDataTableNames(String connectionName, String password) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); ConnectionInfo cInfo = config.getConnectionInfo(connectionName); String dataConnection; if (cInfo.is_superuser) dataConnection = null; // let it get all of the data tables else dataConnection = connectionName; // get only the ones on this connection return ListUtils.toStringArray(config.getDataTableNames(dataConnection)); } /** * Returns metadata about columns of the given data table. */ synchronized public AttributeColumnInfo[] getDataTableInfo(String connectionName, String password, String dataTableName) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); List<AttributeColumnInfo> info = config.getAttributeColumnInfo(dataTableName); return info.toArray(new AttributeColumnInfo[info.size()]); } /** * Returns the results of testing attribute column sql queries. */ synchronized public AttributeColumnInfo[] testAllQueries(String connectionName, String password, String dataTableName) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); HashMap<String, String> params = new HashMap<String, String>(); params.put(Metadata.DATATABLE.toString(), dataTableName); List<AttributeColumnInfo> infolist = config.getAttributeColumnInfo(params); for (int i = 0; i < infolist.size(); i ++) { AttributeColumnInfo attributeColumnInfo = infolist.get(i); try { String query = attributeColumnInfo.sqlQuery; System.out.println(query); SQLResult result = SQLConfigUtils.getRowSetFromQuery(config, attributeColumnInfo.connection, query); attributeColumnInfo.metadata.put(AttributeColumnInfo.SQLRESULT, String.format("Returned %s rows", result.rows.length)); } catch (Exception e) { e.printStackTrace(); attributeColumnInfo.metadata.put(AttributeColumnInfo.SQLRESULT, e.getMessage()); } } return infolist.toArray(new AttributeColumnInfo[0]); } @SuppressWarnings("unchecked") synchronized public String saveDataTableInfo(String connectionName, String password, Object[] columnMetadata) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); // first validate the information String dataTableName = null; for (Object object : columnMetadata) { Map<String, Object> metadata = (Map<String, Object>) object; String _dataTableName = (String) metadata.get(Metadata.DATATABLE.toString()); if (dataTableName == null) dataTableName = _dataTableName; else if (dataTableName != _dataTableName) throw new RemoteException("overwriteDataTableEntry(): dataTable property not consistent among column entries."); // String _dataTableConnection = (String) metadata.get(Metadata.CONNECTION.toString()); // if (dataTableConnection == null) // dataTableConnection = _dataTableConnection; // else if (dataTableConnection != _dataTableConnection) // throw new RemoteException("overwriteDataTableEntry(): " + Metadata.CONNECTION.toString() + " property not consistent among column entries."); } if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, dataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to modify DataTable \"%s\".", connectionName, dataTableName)); try { // start a block of code so tempConfig will not stay in memory { // make a new SQLConfig object and add the entry SQLConfigXML tempConfig = new SQLConfigXML(); // add all the columns to the new blank config for (int i = 0; i < columnMetadata.length; i++) { // create metadata map that AttributeColumnInfo wants Map<String, String> metadata = new HashMap<String, String>(); for (Entry<String, Object> entry : ((Map<String, Object>) columnMetadata[i]).entrySet()) { //System.out.println(entry.getKey() + ':' + (String) entry.getValue()); metadata.put(entry.getKey(), (String) entry.getValue()); } // Exclude connection & sqlQuery properties from metadata // object // because they are separate parameters to the constructor. AttributeColumnInfo columnInfo = new AttributeColumnInfo(metadata.remove(AttributeColumnInfo.CONNECTION), metadata.remove(AttributeColumnInfo.SQLQUERY), metadata); // add the column info to the temp blank config tempConfig.addAttributeColumn(columnInfo); } // backup any existing dataTable entry, then copy over the new // entry createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_DATATABLE, dataTableName); SQLConfigUtils.migrateSQLConfigEntry(tempConfig, config, ISQLConfig.ENTRYTYPE_DATATABLE, dataTableName); } backupAndSaveConfig(config); return String.format("The dataTable entry \"%s\" was saved.", dataTableName); } catch (Exception e) { e.printStackTrace(); throw new RemoteException(e.getMessage()); } } synchronized public void removeAttributeColumnInfo(String connectionName, String password, Object[] columnMetadata) throws RemoteException { } synchronized public String removeDataTableInfo(String connectionName, String password, String dataTableName) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, dataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to remove DataTable \"%s\".", connectionName, dataTableName)); try { if (ListUtils.findString(dataTableName, config.getDataTableNames(null)) < 0) throw new RemoteException("DataTable \"" + dataTableName + "\" does not exist."); createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_DATATABLE, dataTableName); config.removeDataTable(dataTableName); backupAndSaveConfig(config); return "DataTable \"" + dataTableName + "\" was deleted."; } catch (Exception e) { e.printStackTrace(); throw new RemoteException(e.getMessage()); } } // ///////////////////////////////////////////////////// // functions for managing GeometryCollection entries // ///////////////////////////////////////////////////// synchronized public String[] getGeometryCollectionNames(String connectionName, String password) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); ConnectionInfo cInfo = config.getConnectionInfo(connectionName); String geometryConnection; if (cInfo.is_superuser) geometryConnection = null; // let it get all of the geometries else geometryConnection = connectionName; // get only the ones on this connection return ListUtils.toStringArray(config.getGeometryCollectionNames(geometryConnection)); } /** * Returns metadata about the given geometry collection. */ synchronized public GeometryCollectionInfo getGeometryCollectionInfo(String connectionName, String password, String geometryCollectionName) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); return config.getGeometryCollectionInfo(geometryCollectionName); } synchronized public String saveGeometryCollectionInfo(String connectionName, String password, String geomName, String geomConnection, String geomSchema, String geomTablePrefix, String geomKeyType, String geomImportNotes, String geomProjection) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!SQLConfigUtils.userCanModifyGeometryCollection(config, connectionName, geomName)) throw new RemoteException(String.format("User \"%s\" does not have permission to modify GeometryCollection \"%s\".", connectionName, geomName)); // if this user isn't a superuser, don't allow an overwrite of an existing geometrycollection ConnectionInfo currentConnectionInfo = config.getConnectionInfo(connectionName); if (!currentConnectionInfo.is_superuser) { GeometryCollectionInfo oldGeometry = config.getGeometryCollectionInfo(geomName); if (oldGeometry != null && !oldGeometry.connection.equals(connectionName)) throw new RemoteException("An existing geometry collection with the same name exists on another connection. Unable to overwrite without superuser privileges."); } try { // start a block of code so tempConfig will not stay in memory { // make a new SQLConfig object and add the entry SQLConfigXML tempConfig = new SQLConfigXML(); // add all the columns to the new blank config GeometryCollectionInfo info = new GeometryCollectionInfo(); info.name = geomName; info.connection = geomConnection; info.schema = geomSchema; info.tablePrefix = geomTablePrefix; info.keyType = geomKeyType; info.importNotes = geomImportNotes; info.projection = geomProjection; // add the info to the temp blank config tempConfig.addGeometryCollection(info); // backup any existing dataTable entry, then copy over the new // entry createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_GEOMETRYCOLLECTION, geomName); SQLConfigUtils.migrateSQLConfigEntry(tempConfig, config, ISQLConfig.ENTRYTYPE_GEOMETRYCOLLECTION, geomName); } backupAndSaveConfig(config); return String.format("The geometryCollection entry \"%s\" was saved.", geomName); } catch (Exception e) { e.printStackTrace(); throw new RemoteException(e.getMessage()); } } synchronized public String removeGeometryCollectionInfo(String connectionName, String password, String geometryCollectionName) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!SQLConfigUtils.userCanModifyGeometryCollection(config, connectionName, geometryCollectionName)) throw new RemoteException(String.format("User \"%s\" does not have permission to remove GeometryCollection \"%s\".", connectionName, geometryCollectionName)); try { if (ListUtils.findString(geometryCollectionName, config.getGeometryCollectionNames(null)) < 0) throw new RemoteException("Geometry Collection \"" + geometryCollectionName + "\" does not exist."); createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_GEOMETRYCOLLECTION, geometryCollectionName); config.removeGeometryCollection(geometryCollectionName); backupAndSaveConfig(config); return "Geometry Collection \"" + geometryCollectionName + "\" was deleted."; } catch (Exception e) { e.printStackTrace(); throw new RemoteException(e.getMessage()); } } // /////////////////////////////////////////// // functions for getting SQL info // /////////////////////////////////////////// /** * The following functions get information about the database associated * with a given connection name. */ synchronized public String[] getSchemas(String configConnectionName, String password) throws RemoteException { checkPasswordAndGetConfig(configConnectionName, password); List<String> schemasList = getSchemasList(configConnectionName); return ListUtils.toStringArray(getSortedUniqueValues(schemasList, false)); } synchronized public String[] getTables(String configConnectionName, String password, String schemaName) throws RemoteException { checkPasswordAndGetConfig(configConnectionName, password); List<String> tablesList = getTablesList(configConnectionName, schemaName); return ListUtils.toStringArray(getSortedUniqueValues(tablesList, false)); } synchronized public String[] getColumns(String configConnectionName, String password, String schemaName, String tableName) throws RemoteException { checkPasswordAndGetConfig(configConnectionName, password); return ListUtils.toStringArray(getColumnsList(configConnectionName, schemaName, tableName)); } synchronized private List<String> getSchemasList(String connectionName) throws RemoteException { ISQLConfig config = configManager.getConfig(); List<String> schemas; try { Connection conn = SQLConfigUtils.getStaticReadOnlyConnection(config, connectionName); schemas = SQLUtils.getSchemas(conn); } catch (SQLException e) { // e.printStackTrace(); throw new RemoteException("Unable to get schema list from database.", e); } finally { // SQLUtils.cleanup(conn); } // don't want to list information_schema. ListUtils.removeIgnoreCase("information_schema", schemas); return schemas; } synchronized private List<String> getTablesList(String connectionName, String schemaName) throws RemoteException { ISQLConfig config = configManager.getConfig(); List<String> tables; try { Connection conn = SQLConfigUtils.getStaticReadOnlyConnection(config, connectionName); tables = SQLUtils.getTables(conn, schemaName); } catch (SQLException e) { // e.printStackTrace(); throw new RemoteException("Unable to get schema list from database.", e); } finally { // SQLUtils.cleanup(conn); } return tables; } synchronized private List<String> getColumnsList(String connectionName, String schemaName, String tableName) throws RemoteException { ISQLConfig config = configManager.getConfig(); List<String> columns; try { Connection conn = SQLConfigUtils.getStaticReadOnlyConnection(config, connectionName); columns = SQLUtils.getColumns(conn, schemaName, tableName); } catch (SQLException e) { // e.printStackTrace(); throw new RemoteException("Unable to get column list from database.", e); } finally { // SQLUtils.cleanup(conn); } return columns; } // /////////////////////////////////////////// // functions for getting miscellaneous info // /////////////////////////////////////////// synchronized public String[] getKeyTypes(String connectionName, String password) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); return ListUtils.toStringArray(getSortedUniqueValues(config.getKeyTypes(), true)); } synchronized public UploadedFile[] getUploadedCSVFiles() throws RemoteException { File directory = new File(uploadPath); List<UploadedFile> list = new ArrayList<UploadedFile>(); File[] listOfFiles = null; try { if( directory.isDirectory() ) { listOfFiles = directory.listFiles(new UploadFileFilter("csv")); for( File file : listOfFiles ) { if( file.isFile() ) { UploadedFile uploadedFile = new UploadedFile( file.getName(), file.length(), file.lastModified() ); list.add(uploadedFile); } } } } catch(Exception e) { throw new RemoteException(e.getMessage()); } int n = list.size(); return list.toArray(new UploadedFile[n]); } synchronized public UploadedFile[] getUploadedShapeFiles() throws RemoteException { File directory = new File(uploadPath); List<UploadedFile> list = new ArrayList<UploadedFile>(); File[] listOfFiles = null; try { if( directory.isDirectory() ) { listOfFiles = directory.listFiles(new UploadFileFilter("shp")); for( File file : listOfFiles ) { if( file.isFile() ) { UploadedFile uploadedFile = new UploadedFile( file.getName(), file.length(), file.lastModified() ); list.add(uploadedFile); } } } } catch(Exception e) { throw new RemoteException(e.getMessage()); } int n = list.size(); return list.toArray(new UploadedFile[n]); } /** * Read a list of csv files and return common header columns. * * @param A * list of csv file names. * @return A list of common header files or null if none exist encoded using * */ synchronized public String[] getCSVColumnNames(String csvFile) throws RemoteException { String[] headerLine = null; try { String csvData = org.apache.commons.io.FileUtils.readFileToString(new File(uploadPath, csvFile)); // Read first line only (header line). int index = csvData.indexOf("\r"); int index2 = csvData.indexOf("\n"); if (index2 < index && index2 >= 0) index = index2; String header = index < 0 ? csvData : csvData.substring(0, index); csvData = null; // don't need this in memory anymore String[][] rows = CSVParser.defaultParser.parseCSV(header); headerLine = rows[0]; } catch (FileNotFoundException e) { throw new RemoteException(e.getMessage()); } catch (Exception e) { throw new RemoteException(e.getMessage()); } return headerLine; } synchronized public String[] listDBFFileColumns(String dbfFileName) throws RemoteException { try { List<String> names = DBFUtils.getAttributeNames(new File(uploadPath, correctFileNameCase(dbfFileName))); return ListUtils.toStringArray(names); } catch (IOException e) { throw new RemoteException("IOException", e); } } synchronized private String correctFileNameCase(String fileName) { try { File directory = new File(uploadPath); if( directory.isDirectory() ) { for( String file : directory.list() ) { if( file.equalsIgnoreCase(fileName) ) return file; } } } catch( Exception e ) {} return fileName; } /** * getSortedUniqueValues * * @param values * A list of string values which may contain duplicates. * @param moveEmptyStringToEnd * If set to true and "" is at the front of the list, "" is moved * to the end. * @return A sorted list of unique values found in the given list. */ private List<String> getSortedUniqueValues(List<String> values, boolean moveEmptyStringToEnd) { Set<String> uniqueValues = new HashSet<String>(); uniqueValues.addAll(values); Vector<String> result = new Vector<String>(uniqueValues); Collections.sort(result, String.CASE_INSENSITIVE_ORDER); // if empty string is at beginning of sorted list, move it to the end of // the list if (moveEmptyStringToEnd && result.size() > 0 && result.get(0).equals("")) result.add(result.remove(0)); return result; } // /////////////////////////////////////////// // functions for importing data // /////////////////////////////////////////// /** * This function accepts an uploaded file. * @param fileName The name of the file. * @param content The file content. */ public void uploadFile(String fileName, InputStream content) throws RemoteException { // make sure the upload folder exists (new File(uploadPath)).mkdirs(); String filePath = uploadPath + fileName; try { FileUtils.copy(content, new FileOutputStream(filePath)); } catch (Exception e) { throw new RemoteException("File upload failed.", e); } } /** * Return a list of files existing in the csv upload folder on the server. * * @return A list of files existing in the csv upload folder. */ synchronized public List<String> getUploadedFileNames() throws RemoteException { File uploadFolder = new File(uploadPath); File[] files = null; List<String> listOfFiles = new ArrayList<String>(); try { files = uploadFolder.listFiles(); for (File file : files) { if (file.isFile()) { // System.out.println(file.getName()); listOfFiles.add(file.getName().toString()); } } } catch (SecurityException e) { throw new RemoteException("Permission error reading directory."); } return listOfFiles; } private boolean valueIsInt(String value) { boolean retVal = true; try { Integer.parseInt(value); } catch (Exception e) { retVal = false; } return retVal; } private boolean valueIsDouble(String value) { boolean retVal = true; try { Double.parseDouble(value); } catch (Exception e) { retVal = false; } return retVal; } private boolean valueHasLeadingZero(String value) { boolean temp = valueIsInt(value); if (!temp) return false; if (value.length() < 2) return false; if (value.charAt(0) == '0' && value.charAt(1) != '.') return true; return false; } synchronized public String importCSV(String connectionName, String password, String csvFile, String csvKeyColumn, String csvSecondaryKeyColumn, String sqlSchema, String sqlTable, boolean sqlOverwrite, String configDataTableName, boolean configOverwrite, String configGeometryCollectionName, String configKeyType, String[] nullValues, String[] filterColumnNames) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); ConnectionInfo connInfo = config.getConnectionInfo(connectionName); if (sqlOverwrite && !connInfo.is_superuser) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite SQL tables.", connectionName)); if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, configDataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite DataTable \"%s\".", connectionName, configDataTableName)); Connection conn = null; Statement stmt = null; try { conn = SQLConfigUtils.getConnection(config, connectionName); sqlTable = sqlTable.toLowerCase(); // fix for MySQL running under Linux String[] columnNames = null; String[] originalColumnNames = null; int fieldLengths[] = null; // Load the CSV file and reformat it String formatted_CSV_path = tempPath + "temp.csv"; int[] types = null; int i = 0; int j = 0; int num = 1; String outputNullValue = SQLUtils.getCSVNullValue(conn); boolean ignoreKeyColumnQueries = false; String csvData = org.apache.commons.io.FileUtils.readFileToString(new File(uploadPath, csvFile)); String[][] rows = CSVParser.defaultParser.parseCSV(csvData); if (rows.length == 0) throw new RemoteException("CSV file is empty: " + csvFile); // if there is no key column, we need to append a unique Row ID column if ("".equals(csvKeyColumn)) { ignoreKeyColumnQueries = true; // get the maximum number of rows in a column int maxNumRows = 0; for (i = 0; i < rows.length; ++i) { String[] column = rows[i]; int numRows = column.length; // this includes the column name in row 0 if (numRows > maxNumRows) maxNumRows = numRows; } csvKeyColumn = "row_id"; for (i = 0; i < rows.length; ++i) { String[] row = rows[i]; String[] newRow = new String[row.length + 1]; System.arraycopy(row, 0, newRow, 0, row.length); if (i == 0) newRow[newRow.length - 1] = csvKeyColumn; else newRow[newRow.length - 1] = "row" + i; rows[i] = newRow; } } // Read the column names columnNames = rows[0]; originalColumnNames = new String[columnNames.length]; fieldLengths = new int[columnNames.length]; // converge the column name to meet the requirement of mySQL. for (i = 0; i < columnNames.length; i++) { String colName = columnNames[i]; if (colName.length() == 0) colName = "Column " + (i+1); // save original column name originalColumnNames[i] = colName; // if the column name has "/", "\", ".", "<", ">". colName = colName.replace("/", ""); colName = colName.replace("\\", ""); colName = colName.replace(".", ""); colName = colName.replace("<", "less than"); colName = colName.replace(">", "more than"); // if the length of the column name is longer than the 64-character limit int maxColNameLength = 64; int halfMaxColNameLength = 30; boolean isKeyCol = csvKeyColumn.equalsIgnoreCase(colName); if (colName.length() >= maxColNameLength) { colName = colName.replace(" ", ""); if (colName.length() >= maxColNameLength) { colName = colName.substring(0, halfMaxColNameLength) + "_" + colName.substring(colName.length() - halfMaxColNameLength); } } // copy new name if key column changed if (isKeyCol) csvKeyColumn = colName; // if find the column names are repetitive for (j = 0; j < i; j++) { if (colName.equalsIgnoreCase(columnNames[j])) { colName += "_" + num; num++; } } // save the new name columnNames[i] = colName; } // Initialize the types of columns as int (will be changed inside loop if necessary) types = new int[columnNames.length]; for (i = 0; i < columnNames.length; i++) { fieldLengths[i] = 0; types[i] = IntType; } // Read the data and get the column type for (int iRow = 1; iRow < rows.length; iRow++) { String[] nextLine = rows[iRow]; // Format each line for (i = 0; i < columnNames.length && i < nextLine.length; i++) { // keep track of the longest String value found in this column fieldLengths[i] = Math.max(fieldLengths[i], nextLine[i].length()); // Change missing data into NULL, later add more cases to deal with missing data. String[] nullValuesStandard = new String[]{"", ".", "..", " ", "-", "\"NULL\"", "NULL", "NaN"}; for(String[] values : new String[][] {nullValuesStandard, nullValues }) { for (String nullValue : values) { if (nextLine[i].equalsIgnoreCase(nullValue)) { nextLine[i] = outputNullValue; break; } } } if (nextLine[i].equals(outputNullValue)) continue; // 3.3.2 is a string, update the type. // 04 is a string (but Integer.parseInt would not throw an exception) try { String value = nextLine[i]; while (value.indexOf(',') > 0) value = value.replace(",", ""); // valid input format // if the value is an int or double with an extraneous leading zero, it's defined to be a string if (valueHasLeadingZero(value)) types[i] = StringType; // if the type was determined to be a string before (or just above), continue if (types[i] == StringType) continue; // if the type is an int if (types[i] == IntType) { // check that it's still an int if (valueIsInt(value)) continue; } // it either wasn't an int or is no longer an int, check for a double if (valueIsDouble(value)) { types[i] = DoubleType; continue; } // if we're down here, it must be a string types[i] = StringType; } catch (Exception e) { // this shouldn't happen, but it's just to be safe types[i] = StringType; } } } // now we need to remove commas from any numeric values because the SQL drivers don't like it for (int iRow = 1; iRow < rows.length; iRow++) { String[] nextLine = rows[iRow]; // Format each line for (i = 0; i < columnNames.length && i < nextLine.length; i++) { String value = nextLine[i]; if (types[i] == IntType || types[i] == DoubleType) { while (value.indexOf(",") >= 0) value = value.replace(",", ""); nextLine[i] = value; } } } // save modified CSV BufferedWriter out = new BufferedWriter(new FileWriter(formatted_CSV_path)); boolean quoteEmptyStrings = outputNullValue.length() > 0; String temp = CSVParser.defaultParser.createCSVFromArrays(rows, quoteEmptyStrings); out.write(temp); out.close(); // Import the CSV file into SQL. // Drop the table if it exists. if (sqlOverwrite) { SQLUtils.dropTableIfExists(conn, sqlSchema, sqlTable); } else { if (ListUtils.findIgnoreCase(sqlTable, getTablesList(connectionName, sqlSchema)) >= 0) throw new RemoteException("CSV not imported.\nSQL table already exists."); } if (!configOverwrite) { if (ListUtils.findIgnoreCase(configDataTableName, config.getDataTableNames(null)) >= 0) throw new RemoteException(String.format( "CSV not imported.\nDataTable \"%s\" already exists in the configuration.", configDataTableName)); } // create a list of the column types List<String> columnTypesList = new Vector<String>(); for (i = 0; i < columnNames.length; i++) { if (types[i] == StringType || csvKeyColumn.equalsIgnoreCase(columnNames[i])) columnTypesList.add(SQLUtils.getVarcharTypeString(conn, fieldLengths[i])); else if (types[i] == IntType) columnTypesList.add(SQLUtils.getIntTypeString(conn)); else if (types[i] == DoubleType) columnTypesList.add(SQLUtils.getDoubleTypeString(conn)); } // create the table SQLUtils.createTable(conn, sqlSchema, sqlTable, Arrays.asList(columnNames), columnTypesList); // import the data SQLUtils.copyCsvToDatabase(conn, formatted_CSV_path, sqlSchema, sqlTable); return addConfigDataTable(config, configOverwrite, configDataTableName, connectionName, configGeometryCollectionName, configKeyType, csvKeyColumn, csvSecondaryKeyColumn, originalColumnNames, columnNames, sqlSchema, sqlTable, ignoreKeyColumnQueries, filterColumnNames); } catch (RemoteException e) // required since RemoteException extends IOException { throw e; } catch (SQLException e) { throw new RemoteException("Import failed.", e); } catch (FileNotFoundException e) { e.printStackTrace(); throw new RemoteException("File not found: " + csvFile); } catch (IOException e) { e.printStackTrace(); throw new RemoteException("Cannot read file: " + csvFile); } finally { // close everything in reverse order SQLUtils.cleanup(stmt); SQLUtils.cleanup(conn); } } synchronized public String addConfigDataTableFromDatabase(String connectionName, String password, String schemaName, String tableName, String keyColumnName, String secondaryKeyColumnName, String configDataTableName, boolean configOverwrite, String geometryCollectionName, String keyType, String[] filterColumnNames) throws RemoteException { // use lower case sql table names (fix for mysql linux problems) //tableName = tableName.toLowerCase(); ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); String[] columnNames = getColumnsList(connectionName, schemaName, tableName).toArray(new String[0]); return addConfigDataTable(config, configOverwrite, configDataTableName, connectionName, geometryCollectionName, keyType, keyColumnName, secondaryKeyColumnName, columnNames, columnNames, schemaName, tableName, false, filterColumnNames); } synchronized private String addConfigDataTable(ISQLConfig config, boolean configOverwrite, String configDataTableName, String connectionName, String geometryCollectionName, String keyType, String keyColumnName, String secondarySqlKeyColumn, String[] configColumnNames, String[] sqlColumnNames, String sqlSchema, String sqlTable, boolean ignoreKeyColumnQueries, String[] filterColumnNames) throws RemoteException { ConnectionInfo info = config.getConnectionInfo(connectionName); if (info == null) throw new RemoteException(String.format("Connection named \"%s\" does not exist.", connectionName)); String dbms = info.dbms; if (sqlColumnNames == null) sqlColumnNames = new String[0]; // if key column is actually the name of a column, put quotes around it. // otherwise, don't. int iKey = ListUtils.findIgnoreCase(keyColumnName, sqlColumnNames); int iSecondaryKey = ListUtils.findIgnoreCase(secondarySqlKeyColumn, sqlColumnNames); String sqlKeyColumn; // save the original column name if (iKey >= 0) { sqlKeyColumn = keyColumnName; // before quoting, save the column name keyColumnName = SQLUtils.quoteSymbol(dbms, sqlColumnNames[iKey]); } else { sqlKeyColumn = SQLUtils.unquoteSymbol(dbms, keyColumnName); // get the original columnname } if (iSecondaryKey >= 0) secondarySqlKeyColumn = SQLUtils.quoteSymbol(dbms, sqlColumnNames[iSecondaryKey]); // Write SQL statements into sqlconfig. if (!configOverwrite) { if (ListUtils.findIgnoreCase(configDataTableName, config.getDataTableNames(null)) >= 0) throw new RemoteException(String.format("DataTable \"%s\" already exists in the configuration.", configDataTableName)); } else { if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, configDataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite DataTable \"%s\".", connectionName, configDataTableName)); } // connect to database, generate and test each query before modifying // config file List<String> titles = new LinkedList<String>(); List<String> queries = new Vector<String>(); List<String> dataTypes = new Vector<String>(); String query = null; Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, connectionName); SQLResult filteredValues = null; if (filterColumnNames != null && filterColumnNames.length > 0) { // get a list of unique combinations of filter values String columnList = ""; for (int i = 0; i < filterColumnNames.length; i++) { if (i > 0) columnList += ","; columnList += SQLUtils.quoteSymbol(conn, filterColumnNames[i]); } query = String.format( "select distinct %s from %s order by %s", columnList, SQLUtils.quoteSchemaTable(conn, sqlSchema, sqlTable), columnList ); filteredValues = SQLUtils.getRowSetFromQuery(conn, query); // System.out.println(query); // System.out.println(filteredValues); } for (int iCol = 0; iCol < sqlColumnNames.length; iCol++) { String sqlColumn = sqlColumnNames[iCol]; // System.out.println("columnName: " + columnName + "\tkeyColumnName: " + keyColumnName + "\toriginalKeyCol: " + originalKeyColumName); if (ignoreKeyColumnQueries && sqlKeyColumn.equals(sqlColumn)) continue; sqlColumn = SQLUtils.quoteSymbol(dbms, sqlColumn); // hack if (secondarySqlKeyColumn != null && secondarySqlKeyColumn.length() > 0) sqlColumn += "," + secondarySqlKeyColumn; // generate column query query = String.format("SELECT %s,%s FROM %s", keyColumnName, sqlColumn, SQLUtils.quoteSchemaTable(dbms, sqlSchema, sqlTable)); if (filteredValues != null) { // generate one query per unique filter value combination for (int iRow = 0 ; iRow < filteredValues.rows.length ; iRow++ ) { String filteredQuery = buildFilteredQuery(conn, query, filteredValues, iRow); titles.add(buildFilteredColumnTitle(configColumnNames[iCol], filteredValues, iRow)); queries.add(filteredQuery); dataTypes.add(testQueryAndGetDataType(conn, filteredQuery)); } } else { titles.add(configColumnNames[iCol]); queries.add(query); dataTypes.add(testQueryAndGetDataType(conn, query)); } } // done generating queries // generate config DataTable entry createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_DATATABLE, configDataTableName); config.removeDataTable(configDataTableName); Map<String, String> metadata = new HashMap<String, String>(); metadata.put(Metadata.DATATABLE.toString(), configDataTableName); metadata.put(Metadata.KEYTYPE.toString(), keyType); metadata.put(Metadata.GEOMETRYCOLLECTION.toString(), geometryCollectionName); int numberSqlColumns = titles.size(); for (int i = 0; i < numberSqlColumns; i++) { metadata.put(Metadata.NAME.toString(), titles.get(i)); metadata.put(Metadata.DATATYPE.toString(), dataTypes.get(i)); AttributeColumnInfo attrInfo = new AttributeColumnInfo(connectionName, queries.get(i), metadata); config.addAttributeColumn(attrInfo); } backupAndSaveConfig(config); } catch (SQLException e) { throw new RemoteException(String.format("Failed to add DataTable \"%s\" to the configuration.\n", configDataTableName), e); } catch (RemoteException e) { throw new RemoteException(String.format("Failed to add DataTable \"%s\" to the configuration.\n", configDataTableName), e); } if (sqlColumnNames.length == 0) throw new RemoteException("No columns were found."); return String.format("DataTable \"%s\" was added to the configuration with %s generated attribute column queries.\n", configDataTableName, titles.size()); } private String testQueryAndGetDataType(Connection conn, String query) throws RemoteException { Statement stmt = null; ResultSet rs = null; DataType dataType = null; try { String dbms = conn.getMetaData().getDatabaseProductName(); if (!dbms.equalsIgnoreCase(SQLUtils.SQLSERVER) && !dbms.equalsIgnoreCase(SQLUtils.ORACLE)) query += " LIMIT 1"; stmt = conn.createStatement(); rs = stmt.executeQuery(query); dataType = DataType.fromSQLType(rs.getMetaData().getColumnType(2)); } catch (SQLException e) { throw new RemoteException("Unable to execute generated query:\n" + query, e); } finally { SQLUtils.cleanup(rs); SQLUtils.cleanup(stmt); } return dataType.toString(); } private String buildFilteredColumnTitle(String columnName, SQLResult filteredValues, int filteredValueRow) { String columnTitle = columnName + " ("; for (int j = 0 ; j < filteredValues.rows[filteredValueRow].length ; j++ ) { if (j > 0) columnTitle += " "; boolean isNull = filteredValues.rows[filteredValueRow][j] == null; String value; if (isNull) value = "NULL"; else value = filteredValues.rows[filteredValueRow][j].toString(); columnTitle += isNull ? "NULL" : value; } columnTitle += ")"; return columnTitle; } private String buildFilteredQuery(Connection conn, String unfilteredQuery, SQLResult filteredValues, int filteredValueRow) throws IllegalArgumentException, SQLException { String query = unfilteredQuery + " where "; for (int j = 0 ; j < filteredValues.rows[filteredValueRow].length ; j++ ) { if (j > 0) query += " and "; boolean isNull = filteredValues.rows[filteredValueRow][j] == null; String value; if (isNull) value = "NULL"; else value = filteredValues.rows[filteredValueRow][j].toString(); query += String.format( "%s=%s", SQLUtils.quoteSymbol(conn, filteredValues.columnNames[j]), isNull ? "NULL" : SQLUtils.quoteString(conn, value) ); } return query; } /** * The following functions involve getting shapes into the database and into * the config file. */ synchronized public String convertShapefileToSQLStream(String configConnectionName, String password, String[] fileNameWithoutExtension, String[] keyColumns, String sqlSchema, String sqlTablePrefix, boolean sqlOverwrite, String configGeometryCollectionName, boolean configOverwrite, String configKeyType, String projectionSRS, String[] nullValues) throws RemoteException { // use lower case sql table names (fix for mysql linux problems) sqlTablePrefix = sqlTablePrefix.toLowerCase(); ISQLConfig config = checkPasswordAndGetConfig(configConnectionName, password); ConnectionInfo connInfo = config.getConnectionInfo(configConnectionName); if (sqlOverwrite && !connInfo.is_superuser) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite SQL tables.", configConnectionName)); if (!SQLConfigUtils.userCanModifyGeometryCollection(config, configConnectionName, configGeometryCollectionName)) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite GeometryCollection \"%s\".", configConnectionName, configGeometryCollectionName)); if (!configOverwrite) { if (ListUtils.findIgnoreCase(configGeometryCollectionName, config.getGeometryCollectionNames(null)) >= 0) throw new RemoteException(String.format( "Shapes not imported. SQLConfig geometryCollection \"%s\" already exists.", configGeometryCollectionName)); } String dbfTableName = sqlTablePrefix + "_dbfdata"; Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, configConnectionName); // store dbf data to database storeDBFDataToDatabase(configConnectionName, password, fileNameWithoutExtension, sqlSchema, dbfTableName, sqlOverwrite, nullValues); GeometryStreamConverter converter = new GeometryStreamConverter( new SQLGeometryStreamDestination(conn, sqlSchema, sqlTablePrefix, sqlOverwrite) ); for (String file : fileNameWithoutExtension) { // convert shape data to streaming sql format String shpfile = uploadPath + file + ".shp"; SHPGeometryStreamUtils.convertShapefile(converter, shpfile, Arrays.asList(keyColumns)); } converter.flushAndCommitAll(); } catch (Exception e) { e.printStackTrace(); throw new RemoteException("Shapefile import failed", e); } finally { SQLUtils.cleanup(conn); } String fileList = Arrays.asList(fileNameWithoutExtension).toString(); if (fileList.length() > 103) fileList = fileList.substring(0, 50) + "..." + fileList.substring(fileList.length() - 50); String importNotes = String.format("file: %s, keyColumns: %s", fileList, keyColumns); // get key column SQL code String keyColumnsString; if (keyColumns.length == 1) { keyColumnsString = keyColumns[0]; } else { keyColumnsString = "CONCAT("; for (int i = 0; i < keyColumns.length; i++) { if (i > 0) keyColumnsString += ","; keyColumnsString += "CAST(" + keyColumns[i] + " AS CHAR)"; } keyColumnsString += ")"; } // add SQL statements to sqlconfig String[] columnNames = getColumnsList(configConnectionName, sqlSchema, dbfTableName).toArray(new String[0]); String resultAddSQL = addConfigDataTable(config, configOverwrite, configGeometryCollectionName, configConnectionName, configGeometryCollectionName, configKeyType, keyColumnsString, null, columnNames, columnNames, sqlSchema, dbfTableName, false, null); return resultAddSQL + "\n\n" + addConfigGeometryCollection(configOverwrite, configConnectionName, password, configGeometryCollectionName, configKeyType, sqlSchema, sqlTablePrefix, projectionSRS, importNotes); } synchronized public String storeDBFDataToDatabase(String configConnectionName, String password, String[] fileNameWithoutExtension, String sqlSchema, String sqlTableName, boolean sqlOverwrite, String[] nullValues) throws RemoteException { // use lower case sql table names (fix for mysql linux problems) sqlTableName = sqlTableName.toLowerCase(); ISQLConfig config = checkPasswordAndGetConfig(configConnectionName, password); ConnectionInfo connInfo = config.getConnectionInfo(configConnectionName); if (sqlOverwrite && !connInfo.is_superuser) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite SQL tables.", configConnectionName)); Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, configConnectionName); File[] files = new File[fileNameWithoutExtension.length]; for (int i = 0; i < files.length; i++) files[i] = new File(uploadPath + fileNameWithoutExtension[i] + ".dbf"); DBFUtils.storeAttributes(files, conn, sqlSchema, sqlTableName, sqlOverwrite, nullValues); } catch (Exception e) { e.printStackTrace(); throw new RemoteException("DBF import failed", e); } finally { SQLUtils.cleanup(conn); } // String importNotes = String.format("file: %s, keyColumns: %s", // fileNameWithoutExtension, keyColumns); return "DBF Data stored successfully"; } synchronized public String addConfigGeometryCollection(boolean configOverwrite, String configConnectionName, String password, String configGeometryCollectionName, String configKeyType, String sqlSchema, String sqlTablePrefix, String projectionSRS, String importNotes) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(configConnectionName, password); if (!configOverwrite) { if (ListUtils.findIgnoreCase(configGeometryCollectionName, config.getGeometryCollectionNames(null)) >= 0) throw new RemoteException(String.format("GeometryCollection \"%s\" already exists in the configuration.", configGeometryCollectionName)); } else { if (!SQLConfigUtils.userCanModifyGeometryCollection(config, configConnectionName, configGeometryCollectionName)) throw new RemoteException(String.format("User \"%s\" does not have permission to overwrite GeometryCollection \"%s\".", configConnectionName, configGeometryCollectionName)); } // add geometry collection GeometryCollectionInfo info = new GeometryCollectionInfo(); info.name = configGeometryCollectionName; info.connection = configConnectionName; info.schema = sqlSchema; info.tablePrefix = sqlTablePrefix; info.keyType = configKeyType; info.importNotes = importNotes; info.projection = projectionSRS; createConfigEntryBackup(config, ISQLConfig.ENTRYTYPE_GEOMETRYCOLLECTION, info.name); config.removeGeometryCollection(info.name); config.addGeometryCollection(info); backupAndSaveConfig(config); return String.format("GeometryCollection \"%s\" was added to the configuration", configGeometryCollectionName); } // ////////////////////////////////////////////// // functions for managing dublin core metadata // ////////////////////////////////////////////// /** * Adds Dublin Core Elements to the metadata store in association with the * given dataset.. * * @param connectionName * the name of the connection to use * @param password * the password for the given connection * @param dataTableName * the name of the dataset to associate the given elements with * @param elements * the key-value pairs defining the new Dublin Core elements to * add. Keys are expected to be like "dc:title" and * "dc:description", values are expected to be Strings. * @throws RemoteException */ synchronized public void addDCElements(String connectionName, String password, String dataTableName, Map<String, Object> elements) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, dataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to modify DataTable \"%s\".", connectionName, dataTableName)); DatabaseConfigInfo configInfo = config.getDatabaseConfigInfo(); String configConnectionName = configInfo.connection; Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, configConnectionName); } catch (SQLException e) { throw new RemoteException("addDCElements failed", e); } String schema = configInfo.schema; DublinCoreUtils.addDCElements(conn, schema, dataTableName, elements); // System.out.println("in addDCElements"); // int i = 0; // for (Map.Entry<String, Object> e : elements.entrySet()) // System.out.println(" elements[" + (i++) + "] = {" + e.getKey() // + " = " + e.getValue()); } /** * Queries the database for the Dublin Core metadata elements associated * with the data set with the given name and returns the result. The result * is returned as a Map whose keys are Dublin Core property names and whose * values are the values for those properties (for the given data set) * stored in the metadata store. * * If an error occurs, a map is returned with a single key-value pair whose * key is "error". */ synchronized public DublinCoreElement[] listDCElements(String connectionName, String password, String dataTableName) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); DatabaseConfigInfo configInfo = config.getDatabaseConfigInfo(); String configConnectionName = configInfo.connection; Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, configConnectionName); } catch (SQLException e) { throw new RemoteException("listDCElements failed", e); } String schema = configInfo.schema; List<DublinCoreElement> list = DublinCoreUtils.listDCElements(conn, schema, dataTableName); int n = list.size(); return list.toArray(new DublinCoreElement[n]); // DublinCoreElement[] result = new DublinCoreElement[n]; // for (int i = 0; i < n; i++) // { // result[i] = list.get(i); // System.out.println("list.get(i).element = " + list.get(i).element + // " list.get(i).value = " + list.get(i).value); // } // return result; } /** * Deletes the specified metadata entries. */ synchronized public void deleteDCElements(String connectionName, String password, String dataTableName, List<Map<String, String>> elements) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, dataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to modify DataTable \"%s\".", connectionName, dataTableName)); DatabaseConfigInfo configInfo = config.getDatabaseConfigInfo(); String configConnectionName = configInfo.connection; Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, configConnectionName); } catch (SQLException e) { throw new RemoteException("deleteDCElements failed", e); } String schema = configInfo.schema; DublinCoreUtils.deleteDCElements(conn, schema, dataTableName, elements); } /** * Saves an edited metadata row to the server. */ synchronized public void updateEditedDCElement(String connectionName, String password, String dataTableName, Map<String, String> object) throws RemoteException { ISQLConfig config = checkPasswordAndGetConfig(connectionName, password); if (!SQLConfigUtils.userCanModifyDataTable(config, connectionName, dataTableName)) throw new RemoteException(String.format("User \"%s\" does not have permission to modify DataTable \"%s\".", connectionName, dataTableName)); DatabaseConfigInfo configInfo = config.getDatabaseConfigInfo(); String configConnectionName = configInfo.connection; Connection conn = null; try { conn = SQLConfigUtils.getConnection(config, configConnectionName); } catch (SQLException e) { throw new RemoteException("updateEditedDCElement failed", e); } String schema = configInfo.schema; DublinCoreUtils.updateEditedDCElement(conn, schema, dataTableName, object); } synchronized public String saveReportDefinitionFile(String fileName, String fileContents) throws RemoteException { File reportDefFile; try { File docrootDir = new File(docrootPath); if (!docrootDir.exists()) throw new RemoteException("Unable to find docroot directory"); File reportsDir = new File(docrootDir, "\\WeaveReports"); if (!reportsDir.exists()) reportsDir.mkdir(); if (!reportsDir.exists()) throw new RemoteException("Unable to access reports directory"); reportDefFile = new File(reportsDir, fileName); BufferedWriter writer = new BufferedWriter(new FileWriter(reportDefFile)); writer.write(fileContents); writer.close(); } catch (Exception e) { throw new RemoteException("Error writing report definition file: " + fileName, e); } return "Successfully wrote the report definition file: " + reportDefFile.getAbsolutePath(); } }
AdminService: when non-superuser has a docroot folder specified, they can now remove files.
WeaveServices/src/weave/servlets/AdminService.java
AdminService: when non-superuser has a docroot folder specified, they can now remove files.
Java
agpl-3.0
293a336c2fd6eab7b12eb0ff354b91d27f064e2d
0
fviale/programming,fviale/programming,PaulKh/scale-proactive,ow2-proactive/programming,mnip91/proactive-component-monitoring,jrochas/scale-proactive,lpellegr/programming,paraita/programming,lpellegr/programming,acontes/scheduling,paraita/programming,jrochas/scale-proactive,PaulKh/scale-proactive,lpellegr/programming,ow2-proactive/programming,lpellegr/programming,jrochas/scale-proactive,acontes/programming,mnip91/programming-multiactivities,jrochas/scale-proactive,acontes/programming,acontes/scheduling,mnip91/programming-multiactivities,ow2-proactive/programming,acontes/scheduling,mnip91/proactive-component-monitoring,mnip91/proactive-component-monitoring,mnip91/programming-multiactivities,ow2-proactive/programming,mnip91/proactive-component-monitoring,ow2-proactive/programming,acontes/programming,PaulKh/scale-proactive,paraita/programming,lpellegr/programming,mnip91/programming-multiactivities,acontes/programming,PaulKh/scale-proactive,paraita/programming,mnip91/programming-multiactivities,jrochas/scale-proactive,fviale/programming,lpellegr/programming,acontes/programming,acontes/programming,acontes/scheduling,mnip91/proactive-component-monitoring,paraita/programming,acontes/scheduling,fviale/programming,PaulKh/scale-proactive,jrochas/scale-proactive,PaulKh/scale-proactive,ow2-proactive/programming,fviale/programming,mnip91/programming-multiactivities,acontes/programming,mnip91/proactive-component-monitoring,jrochas/scale-proactive,paraita/programming,fviale/programming,acontes/scheduling,acontes/scheduling,PaulKh/scale-proactive
package org.objectweb.proactive.ic2d.gui.jobmonitor; import java.rmi.registry.LocateRegistry; import java.rmi.registry.Registry; import java.util.*; import org.objectweb.proactive.core.body.rmi.RemoteBodyAdapter; import org.objectweb.proactive.core.runtime.ProActiveRuntime; import org.objectweb.proactive.core.runtime.rmi.RemoteProActiveRuntime; import org.objectweb.proactive.core.runtime.rmi.RemoteProActiveRuntimeAdapter; import org.objectweb.proactive.ic2d.gui.jobmonitor.data.BasicMonitoredObject; import org.objectweb.proactive.ic2d.gui.jobmonitor.data.DataAssociation; public class NodeExploration implements JobMonitorConstants { private static final String PA_JVM = "PA_JVM"; private int maxDepth; private DataAssociation asso; private Vector filteredJobs; private Map aos; private Set visitedVM; private Map runtimes; public NodeExploration(DataAssociation asso, Vector filteredJobs) { this.maxDepth = 10; this.asso = asso; this.filteredJobs = filteredJobs; this.aos = new HashMap(); this.runtimes = new HashMap(); } public int getMaxDepth() { return maxDepth; } public void setMaxDepth(int maxDepth) { if (maxDepth > 0) { this.maxDepth = maxDepth; } } /* url : "//host/object" */ private ProActiveRuntime resolveURL(String url) throws Exception { StringTokenizer tokenizer = new StringTokenizer(url, "/"); String host = tokenizer.nextToken(); String name = tokenizer.nextToken(); Registry registry = LocateRegistry.getRegistry(host); RemoteProActiveRuntime r = (RemoteProActiveRuntime) registry.lookup(name); return new RemoteProActiveRuntimeAdapter(r); } private ProActiveRuntime urlToRuntime(String url) throws Exception { ProActiveRuntime rt = (ProActiveRuntime) runtimes.get(url); if (rt != null) { return rt; } rt = resolveURL(url); if (rt != null) { runtimes.put(url, rt); } return rt; } private List getKnownRuntimes(ProActiveRuntime from) throws Exception { ProActiveRuntime[] registered; registered = from.getProActiveRuntimes(); List known = new LinkedList(Arrays.asList(registered)); String[] parents = from.getParents(); for (int i = 0; i < parents.length; i++) { ProActiveRuntime rt = urlToRuntime(parents[i]); if (rt != null) { known.add(urlToRuntime(parents[i])); } } return known; } public void exploreHost(String hostname, int port) throws Exception { try { visitedVM = new TreeSet(); Registry registry = LocateRegistry.getRegistry(hostname, port); String[] list = registry.list(); for (int idx = 0; idx < list.length; ++idx) { String id = list[idx]; if (id.indexOf(PA_JVM) != -1) { RemoteProActiveRuntime r = (RemoteProActiveRuntime) registry.lookup(id); List x = new ArrayList(); ProActiveRuntime part = new RemoteProActiveRuntimeAdapter(r); x.add(part); ProActiveRuntime[] runtimes = r.getProActiveRuntimes(); x.addAll(Arrays.asList(runtimes)); for (int i = 0, size = x.size(); i < size; ++i) handleProActiveRuntime((ProActiveRuntime) x.get(i), 1); } } } catch (Exception e) { throw e; } finally { visitedVM = null; } } private void addChild(int fromKey, String fromName, int toKey, String toName) { asso.addChild(BasicMonitoredObject.create(fromKey, fromName), BasicMonitoredObject.create(toKey, toName)); } private void handleProActiveRuntime(ProActiveRuntime pr, int depth) throws Exception { if (pr instanceof RemoteProActiveRuntime && !(pr instanceof RemoteProActiveRuntimeAdapter)) { pr = new RemoteProActiveRuntimeAdapter((RemoteProActiveRuntime) pr); } String vmName = pr.getVMInformation().getName(); if (isJobFiltered(pr.getJobID()) || visitedVM.contains(vmName)) { return; } visitedVM.add(vmName); String jobId = pr.getJobID(); String hostname = pr.getVMInformation().getInetAddress() .getCanonicalHostName(); addChild(HOST, hostname, JVM, vmName); addChild(JOB, pr.getJobID(), JVM, vmName); String[] nodes = pr.getLocalNodeNames(); // System.out.println ("Found " + nodes.length + " nodes on this runtime"); for (int i = 0; i < nodes.length; ++i) { String nodeName = nodes[i]; String vnName = pr.getVNName(nodeName); ArrayList activeObjects = null; activeObjects = pr.getActiveObjects(nodeName); addChild(JVM, vmName, NODE, nodeName); addChild(JOB, pr.getJobID(pr.getURL() + "/" + nodeName), NODE, nodeName); if (vnName != null) { addChild(VN, vnName, NODE, nodeName); // Currently broken in ProActiveRuntimeImpl // asso.addChild(JOB, pr.getVirtualNode(vnName).getJobID(), VN, vnName); } if (activeObjects != null) { handleActiveObjects(nodeName, activeObjects); } } if (depth < maxDepth) { List known = getKnownRuntimes(pr); Iterator iter = known.iterator(); while (iter.hasNext()) handleProActiveRuntime((ProActiveRuntime) iter.next(), depth + 1); } } private void handleActiveObjects(String nodeName, ArrayList activeObjects) { for (int i = 0, size = activeObjects.size(); i < size; ++i) { ArrayList aoWrapper = (ArrayList) activeObjects.get(i); RemoteBodyAdapter rba = (RemoteBodyAdapter) aoWrapper.get(0); // System.out.println ("Active object " + (i + 1) + " / " + size + " class: " + aoWrapper.get (1)); String className = (String) aoWrapper.get(1); if (className.equalsIgnoreCase( "org.objectweb.proactive.ic2d.spy.Spy")) { continue; } className = className.substring(className.lastIndexOf(".") + 1); String aoName = (String) aos.get(rba.getID()); if (aoName == null) { aoName = className + "#" + (aos.size() + 1); aos.put(rba.getID(), aoName); } addChild(NODE, nodeName, AO, aoName); // The Body/Job modifications are not yet committed // addChild(JOB, rba.getJobID(), AO, aoName); } } private boolean isJobFiltered(String jobId) { for (int i = 0, size = filteredJobs.size(); i < size; ++i) { String job = (String) filteredJobs.get(i); if (job.equalsIgnoreCase(jobId)) { return true; } } return false; } }
src/org/objectweb/proactive/ic2d/gui/jobmonitor/NodeExploration.java
package org.objectweb.proactive.ic2d.gui.jobmonitor; import java.rmi.registry.LocateRegistry; import java.rmi.registry.Registry; import java.util.*; import org.objectweb.proactive.core.body.rmi.RemoteBodyAdapter; import org.objectweb.proactive.core.runtime.ProActiveRuntime; import org.objectweb.proactive.core.runtime.rmi.RemoteProActiveRuntime; import org.objectweb.proactive.core.runtime.rmi.RemoteProActiveRuntimeAdapter; import org.objectweb.proactive.ic2d.gui.jobmonitor.data.BasicMonitoredObject; import org.objectweb.proactive.ic2d.gui.jobmonitor.data.DataAssociation; public class NodeExploration implements JobMonitorConstants { private static final String PA_JVM = "PA_JVM"; private int maxDepth; private DataAssociation asso; private Vector filteredJobs; private Map aos; private Set visitedVM; private Map runtimes; public NodeExploration(DataAssociation asso, Vector filteredJobs) { this.maxDepth = 10; this.asso = asso; this.filteredJobs = filteredJobs; this.aos = new HashMap(); this.runtimes = new HashMap(); } public int getMaxDepth() { return maxDepth; } public void setMaxDepth(int maxDepth) { if (maxDepth > 0) { this.maxDepth = maxDepth; } } /* url : "//host/object" */ private ProActiveRuntime resolveURL(String url) throws Exception { StringTokenizer tokenizer = new StringTokenizer(url, "/"); String host = tokenizer.nextToken(); String name = tokenizer.nextToken(); Registry registry = LocateRegistry.getRegistry(host); RemoteProActiveRuntime r = (RemoteProActiveRuntime) registry.lookup(name); return new RemoteProActiveRuntimeAdapter(r); } private ProActiveRuntime urlToRuntime(String url) throws Exception { ProActiveRuntime rt = (ProActiveRuntime) runtimes.get(url); if (rt != null) { return rt; } rt = resolveURL(url); if (rt != null) { runtimes.put(url, rt); } return rt; } private List getKnownRuntimes(ProActiveRuntime from) throws Exception { ProActiveRuntime[] registered; registered = from.getProActiveRuntimes(); List known = new LinkedList(Arrays.asList(registered)); String[] parents = from.getParents(); for (int i = 0; i < parents.length; i++) { ProActiveRuntime rt = urlToRuntime(parents[i]); if (rt != null) { known.add(urlToRuntime(parents[i])); } } return known; } public void exploreHost(String hostname, int port) throws Exception { try { visitedVM = new TreeSet(); Registry registry = LocateRegistry.getRegistry(hostname, port); String[] list = registry.list(); for (int idx = 0; idx < list.length; ++idx) { String id = list[idx]; if (id.indexOf(PA_JVM) != -1) { RemoteProActiveRuntime r = (RemoteProActiveRuntime) registry.lookup(id); List x = new ArrayList(); ProActiveRuntime part = new RemoteProActiveRuntimeAdapter(r); x.add(part); ProActiveRuntime[] runtimes = r.getProActiveRuntimes(); x.addAll(Arrays.asList(runtimes)); for (int i = 0, size = x.size(); i < size; ++i) handleProActiveRuntime((ProActiveRuntime) x.get(i), 1); } } } catch (Exception e) { throw e; } finally { visitedVM = null; } } private void addChild(int fromKey, String fromName, int toKey, String toName) { asso.addChild(BasicMonitoredObject.create(fromKey, fromName), BasicMonitoredObject.create(toKey, toName)); } private void handleProActiveRuntime(ProActiveRuntime pr, int depth) throws Exception { if (pr instanceof RemoteProActiveRuntime && !(pr instanceof RemoteProActiveRuntimeAdapter)) { pr = new RemoteProActiveRuntimeAdapter((RemoteProActiveRuntime) pr); } String vmName = pr.getVMInformation().getName(); if (isJobFiltered(pr.getJobID()) || visitedVM.contains(vmName)) { return; } visitedVM.add(vmName); String jobId = pr.getJobID(); String hostname = pr.getVMInformation().getInetAddress() .getCanonicalHostName(); addChild(HOST, hostname, JVM, vmName); addChild(JOB, pr.getJobID(), JVM, vmName); String[] nodes = pr.getLocalNodeNames(); // System.out.println ("Found " + nodes.length + " nodes on this runtime"); for (int i = 0; i < nodes.length; ++i) { String nodeName = nodes[i]; String vnName = pr.getVNName(nodeName); ArrayList activeObjects = null; activeObjects = pr.getActiveObjects(nodeName); addChild(JVM, vmName, NODE, nodeName); addChild(JOB, pr.getJobID(pr.getURL() + "/" + nodeName), NODE, nodeName); if (vnName != null) { addChild(VN, vnName, NODE, nodeName); // Currently broken in ProActiveRuntimeImpl // asso.addChild(JOB, pr.getVirtualNode(vnName).getJobID(), VN, vnName); } if (activeObjects != null) { handleActiveObjects(nodeName, activeObjects); } } if (depth < maxDepth) { List known = getKnownRuntimes(pr); Iterator iter = known.iterator(); while (iter.hasNext()) handleProActiveRuntime((ProActiveRuntime) iter.next(), depth + 1); } } private void handleActiveObjects(String nodeName, ArrayList activeObjects) { for (int i = 0, size = activeObjects.size(); i < size; ++i) { ArrayList aoWrapper = (ArrayList) activeObjects.get(i); RemoteBodyAdapter rba = (RemoteBodyAdapter) aoWrapper.get(0); // System.out.println ("Active object " + (i + 1) + " / " + size + " class: " + aoWrapper.get (1)); String className = (String) aoWrapper.get(1); if (className.equalsIgnoreCase( "org.objectweb.proactive.ic2d.spy.Spy")) { continue; } className = className.substring(className.lastIndexOf(".") + 1); String aoName = (String) aos.get(rba.getID()); if (aoName == null) { aoName = className + "#" + (aos.size() + 1); aos.put(rba.getID(), aoName); } addChild(NODE, nodeName, AO, aoName); addChild(JOB, rba.getJobID(), AO, aoName); } } private boolean isJobFiltered(String jobId) { for (int i = 0, size = filteredJobs.size(); i < size; ++i) { String job = (String) filteredJobs.get(i); if (job.equalsIgnoreCase(jobId)) { return true; } } return false; } }
The Body/Job modifications are not yet committed git-svn-id: 9146c88ff6d39b48099bf954d15d68f687b3fa69@1319 28e8926c-6b08-0410-baaa-805c5e19b8d6
src/org/objectweb/proactive/ic2d/gui/jobmonitor/NodeExploration.java
The Body/Job modifications are not yet committed
Java
agpl-3.0
8b73b83ef44280b131c3087426e94553fa0dc528
0
flybird119/voltdb,deerwalk/voltdb,simonzhangsm/voltdb,ingted/voltdb,flybird119/voltdb,simonzhangsm/voltdb,migue/voltdb,wolffcm/voltdb,flybird119/voltdb,ingted/voltdb,ingted/voltdb,kumarrus/voltdb,zuowang/voltdb,wolffcm/voltdb,kobronson/cs-voltdb,zuowang/voltdb,paulmartel/voltdb,simonzhangsm/voltdb,simonzhangsm/voltdb,ingted/voltdb,VoltDB/voltdb,ingted/voltdb,simonzhangsm/voltdb,paulmartel/voltdb,kobronson/cs-voltdb,paulmartel/voltdb,kobronson/cs-voltdb,migue/voltdb,kobronson/cs-voltdb,simonzhangsm/voltdb,kumarrus/voltdb,deerwalk/voltdb,paulmartel/voltdb,deerwalk/voltdb,flybird119/voltdb,deerwalk/voltdb,kumarrus/voltdb,kobronson/cs-voltdb,migue/voltdb,creative-quant/voltdb,wolffcm/voltdb,deerwalk/voltdb,creative-quant/voltdb,paulmartel/voltdb,creative-quant/voltdb,migue/voltdb,kumarrus/voltdb,wolffcm/voltdb,flybird119/voltdb,VoltDB/voltdb,deerwalk/voltdb,zuowang/voltdb,kumarrus/voltdb,ingted/voltdb,simonzhangsm/voltdb,deerwalk/voltdb,VoltDB/voltdb,zuowang/voltdb,migue/voltdb,kobronson/cs-voltdb,creative-quant/voltdb,ingted/voltdb,kobronson/cs-voltdb,VoltDB/voltdb,creative-quant/voltdb,zuowang/voltdb,kumarrus/voltdb,paulmartel/voltdb,kumarrus/voltdb,kumarrus/voltdb,simonzhangsm/voltdb,creative-quant/voltdb,flybird119/voltdb,VoltDB/voltdb,zuowang/voltdb,migue/voltdb,migue/voltdb,wolffcm/voltdb,deerwalk/voltdb,wolffcm/voltdb,wolffcm/voltdb,flybird119/voltdb,ingted/voltdb,VoltDB/voltdb,creative-quant/voltdb,wolffcm/voltdb,VoltDB/voltdb,paulmartel/voltdb,kobronson/cs-voltdb,flybird119/voltdb,paulmartel/voltdb,creative-quant/voltdb,zuowang/voltdb,migue/voltdb,zuowang/voltdb
/* This file is part of VoltDB. * Copyright (C) 2008-2010 VoltDB L.L.C. * * VoltDB is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * VoltDB is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb.compiler; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.LineNumberReader; import java.io.PrintStream; import java.util.HashMap; import java.util.List; import java.util.Map.Entry; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.hsqldb.HSQLInterface; import org.hsqldb.HSQLInterface.HSQLParseException; import org.voltdb.VoltType; import org.voltdb.catalog.Catalog; import org.voltdb.catalog.Column; import org.voltdb.catalog.ColumnRef; import org.voltdb.catalog.Constraint; import org.voltdb.catalog.ConstraintRef; import org.voltdb.catalog.Database; import org.voltdb.catalog.Index; import org.voltdb.catalog.MaterializedViewInfo; import org.voltdb.catalog.Table; import org.voltdb.compiler.VoltCompiler.VoltCompilerException; import org.voltdb.expressions.AbstractExpression; import org.voltdb.expressions.TupleValueExpression; import org.voltdb.planner.AbstractParsedStmt; import org.voltdb.planner.ParsedSelectStmt; import org.voltdb.types.ConstraintType; import org.voltdb.types.ExpressionType; import org.voltdb.types.IndexType; import org.voltdb.utils.BuildDirectoryUtils; import org.voltdb.utils.CatalogUtil; import org.voltdb.utils.Encoder; import org.voltdb.utils.StringInputStream; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; /** * Compiles schema (SQL DDL) text files and stores the results in a given catalog. * */ public class DDLCompiler { static final int MAX_COLUMNS = 1024; static final int MAX_ROW_SIZE = 1024 * 1024 * 2; HSQLInterface m_hsql; VoltCompiler m_compiler; String m_fullDDL = ""; HashMap<String, Column> columnMap = new HashMap<String, Column>(); HashMap<String, Index> indexMap = new HashMap<String, Index>(); HashMap<Table, String> matViewMap = new HashMap<Table, String>(); private class DDLStatement { String statement; int lineNo; } public DDLCompiler(VoltCompiler compiler, HSQLInterface hsql) { assert(hsql != null); this.m_hsql = hsql; this.m_compiler = compiler; } /** * Compile a DDL schema from a file on disk * @param path * @throws VoltCompiler.VoltCompilerException */ public void loadSchema(String path) throws VoltCompiler.VoltCompilerException { File inputFile = new File(path); FileReader fr = null; LineNumberReader reader = null; try { fr = new FileReader(inputFile); reader = new LineNumberReader(fr); } catch (FileNotFoundException e) { throw m_compiler.new VoltCompilerException("Unable to open schema file for reading"); } this.loadSchema(path, reader); } /** * Compile a file from an open input stream * @param path * @param reader * @throws VoltCompiler.VoltCompilerException */ public void loadSchema(String path, LineNumberReader reader) throws VoltCompiler.VoltCompilerException { DDLStatement stmt = getNextStatement(reader, m_compiler); while (stmt != null) { try { m_fullDDL += stmt.statement + " "; m_hsql.runDDLCommand(stmt.statement); stmt = getNextStatement(reader, m_compiler); } catch (HSQLParseException e) { String msg = "DDL Error: \"" + e.getMessage() + "\" in statement ending on lineno: " + stmt.lineNo; throw m_compiler.new VoltCompilerException(msg, stmt.lineNo); } } try { reader.close(); } catch (IOException e) { throw m_compiler.new VoltCompilerException("Error closing schema file"); } } public void compileToCatalog(Catalog catalog, Database db) throws VoltCompilerException { String hexDDL = Encoder.hexEncode(m_fullDDL); catalog.execute("set " + db.getPath() + " schema \"" + hexDDL + "\""); String xmlCatalog; try { xmlCatalog = m_hsql.getXMLFromCatalog(); } catch (HSQLParseException e) { String msg = "DDL Error: " + e.getMessage(); throw m_compiler.new VoltCompilerException(msg); } // output the xml catalog to disk PrintStream ddlXmlOutput = BuildDirectoryUtils.getDebugOutputPrintStream( "schema-xml", "hsql-catalog-output.xml"); ddlXmlOutput.println(xmlCatalog); ddlXmlOutput.close(); // build the local catalog from the xml catalog fillCatalogFromXML(catalog, db, xmlCatalog); } DDLStatement getNextStatement(LineNumberReader reader, VoltCompiler compiler) throws VoltCompiler.VoltCompilerException { DDLStatement retval = new DDLStatement(); try { String stmt = ""; // skip over any empty lines to read first real line while (stmt.equals("") || stmt.startsWith("--")) { stmt = reader.readLine(); if (stmt == null) return null; stmt = stmt.trim(); } // record the line number retval.lineNo = reader.getLineNumber(); // add all lines until one ends with a semicolon while((stmt.endsWith(";") == false) && (stmt.endsWith(";\n") == false)) { String newline = reader.readLine(); if (newline == null) { String msg = "Schema file ended mid statment (no semicolon found)"; throw compiler.new VoltCompilerException(msg, retval.lineNo); } newline = newline.trim(); if (newline.equals("")) continue; if (newline.startsWith("--")) continue; stmt += " " + newline + "\n"; } retval.statement = stmt; } catch (IOException e) { throw compiler.new VoltCompilerException("Unable to read from file"); } return retval; } public void fillCatalogFromXML(Catalog catalog, Database db, String xml) throws VoltCompiler.VoltCompilerException { StringInputStream xmlStream = new StringInputStream(xml); Document doc = null; DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setValidating(true); try { DocumentBuilder builder = factory.newDocumentBuilder(); doc = builder.parse(xmlStream); } catch (SAXParseException sxe) { m_compiler.addErr(sxe.getMessage(), sxe.getLineNumber()); } catch (SAXException sxe) { m_compiler.addErr(sxe.getMessage()); } catch (ParserConfigurationException e) { m_compiler.addErr(e.getMessage()); } catch (IOException e) { m_compiler.addErr(e.getMessage()); } if ((doc == null) || m_compiler.hasErrors()) throw m_compiler.new VoltCompilerException("Unable to parse catalog xml file from hsqldb"); Node root = doc.getDocumentElement(); assert root.getNodeName().equals("databaseschema"); NodeList tableNodes = root.getChildNodes(); for (int i = 0; i < tableNodes.getLength(); i++) { Node node = tableNodes.item(i); if (node.getNodeName().equals("table")) addTableToCatalog(catalog, db, node); } processMaterializedViews(db); } void addTableToCatalog(Catalog catalog, Database db, Node node) throws VoltCompilerException { assert node.getNodeName().equals("table"); // clear these maps, as they're table specific columnMap.clear(); indexMap.clear(); NamedNodeMap attrs = node.getAttributes(); String name = attrs.getNamedItem("name").getNodeValue(); Table table = db.getTables().add(name); // handle the case where this is a materialized view Node queryAttr = attrs.getNamedItem("query"); if (queryAttr != null) { String query = queryAttr.getNodeValue(); assert(query.length() > 0); matViewMap.put(table, query); } // all tables start replicated // if a partition is found in the project file later, // then this is reversed table.setIsreplicated(true); NodeList childNodes = node.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node subNode = childNodes.item(i); if (subNode.getNodeName().equals("columns")) { NodeList columnNodes = subNode.getChildNodes(); int colIndex = 0; for (int j = 0; j < columnNodes.getLength(); j++) { Node columnNode = columnNodes.item(j); if (columnNode.getNodeName().equals("column")) addColumnToCatalog(table, columnNode, colIndex++); } // limit the total number of columns in a table if (colIndex > MAX_COLUMNS) { String msg = "Table " + name + " has " + colIndex + " columns (max is " + MAX_COLUMNS + ")"; throw m_compiler.new VoltCompilerException(msg); } } if (subNode.getNodeName().equals("indexes")) { NodeList indexNodes = subNode.getChildNodes(); for (int j = 0; j < indexNodes.getLength(); j++) { Node indexNode = indexNodes.item(j); if (indexNode.getNodeName().equals("index")) addIndexToCatalog(table, indexNode); } } if (subNode.getNodeName().equals("constraints")) { NodeList constraintNodes = subNode.getChildNodes(); for (int j = 0; j < constraintNodes.getLength(); j++) { Node constraintNode = constraintNodes.item(j); if (constraintNode.getNodeName().equals("constraint")) addConstraintToCatalog(table, constraintNode); } } } /* * Validate that the total size */ int maxRowSize = 0; for (Column c : columnMap.values()) { VoltType t = VoltType.get((byte)c.getType()); if (t == VoltType.STRING) { if (c.getSize() > 1024 * 1024) { throw m_compiler.new VoltCompilerException("Table name " + name + " column " + c.getName() + " has a maximum size of " + c.getSize() + " bytes" + " but the maximum supported size is " + VoltType.MAX_VALUE_LENGTH_STR); } maxRowSize += 4 + c.getSize(); } else { maxRowSize += t.getLengthInBytesForFixedTypes(); } } if (maxRowSize > MAX_ROW_SIZE) { throw m_compiler.new VoltCompilerException("Table name " + name + " has a maximum row size of " + maxRowSize + " but the maximum supported row size is " + MAX_ROW_SIZE); } } void addColumnToCatalog(Table table, Node node, int index) throws VoltCompilerException { assert node.getNodeName().equals("column"); NamedNodeMap attrs = node.getAttributes(); String name = attrs.getNamedItem("name").getNodeValue(); String typename = attrs.getNamedItem("type").getNodeValue(); String nullable = attrs.getNamedItem("nullable").getNodeValue(); String sizeString = attrs.getNamedItem("size").getNodeValue(); String defaultvalue = null; String defaulttype = null; // throws an exception if string isn't an int (i think) Integer.parseInt(sizeString); // Default Value NodeList children = node.getChildNodes(); for (int i = 0, cnt = children.getLength(); i < cnt; i++) { Node child = children.item(i); if (child.getNodeName().equals("default")) { NodeList inner_children = child.getChildNodes(); for (int j = 0; j < inner_children.getLength(); j++) { Node inner_child = inner_children.item(j); attrs = inner_child.getAttributes(); // Value if (inner_child.getNodeName().equals("value")) { defaultvalue = attrs.getNamedItem("value").getNodeValue(); defaulttype = attrs.getNamedItem("type").getNodeValue(); } // Function /*else if (inner_child.getNodeName().equals("function")) { defaultvalue = attrs.getNamedItem("name").getNodeValue(); defaulttype = VoltType.VOLTFUNCTION.name(); }*/ if (defaultvalue != null) break; } } } if (defaultvalue != null && defaultvalue.equals("NULL")) defaultvalue = null; if (defaulttype != null) defaulttype = Integer.toString(VoltType.typeFromString(defaulttype).getValue()); VoltType type = VoltType.typeFromString(typename); int size = Integer.parseInt(sizeString); // check valid length if varchar if (type == VoltType.STRING) { if ((size == 0) || (size > VoltType.MAX_VALUE_LENGTH)) { String msg = "VARCHAR Column " + name + " in table " + table.getTypeName() + " has unsupported length " + sizeString; throw m_compiler.new VoltCompilerException(msg); } } Column column = table.getColumns().add(name); // need to set other column data here (default, nullable, etc) column.setName(name); column.setIndex(index); column.setType(type.getValue()); column.setNullable(nullable.toLowerCase().startsWith("t") ? true : false); column.setSize(size); column.setDefaultvalue(defaultvalue); if (defaulttype != null) column.setDefaulttype(Integer.parseInt(defaulttype)); columnMap.put(name, column); } void addIndexToCatalog(Table table, Node node) throws VoltCompilerException { assert node.getNodeName().equals("index"); NamedNodeMap attrs = node.getAttributes(); String name = attrs.getNamedItem("name").getNodeValue(); // this won't work for multi-column indices String colList = attrs.getNamedItem("columns").getNodeValue(); String[] colNames = colList.split(","); Column[] columns = new Column[colNames.length]; for (int i = 0; i < colNames.length; i++) { columns[i] = columnMap.get(colNames[i]); if (columns[i] == null) { //String msg = "Index " + name + " references column " + colNames[i] + // " which doesn't exist"; //throw compiler.new VoltCompilerException(msg); return; } } Index index = table.getIndexes().add(name); // all indexes default to hash tables // if they are used in a non-equality lookup, the planner // will change this to a binary tree // set the type of the index based on it's name (giant hack) String indexNameNoCase = name.toLowerCase(); if (indexNameNoCase.contains("tree")) index.setType(IndexType.BALANCED_TREE.getValue()); else if (indexNameNoCase.contains("array")) index.setType(IndexType.ARRAY.getValue()); else index.setType(IndexType.HASH_TABLE.getValue()); // need to set other index data here (column, etc) for (int i = 0; i < columns.length; i++) { ColumnRef cref = index.getColumns().add(columns[i].getTypeName()); cref.setColumn(columns[i]); cref.setIndex(i); } indexMap.put(name, index); } /** * Add a constraint on a given table to the catalog * @param table * @param node * @throws VoltCompilerException */ void addConstraintToCatalog(Table table, Node node) throws VoltCompilerException { assert node.getNodeName().equals("constraint"); NamedNodeMap attrs = node.getAttributes(); String name = attrs.getNamedItem("name").getNodeValue(); String typeName = attrs.getNamedItem("type").getNodeValue(); ConstraintType type = ConstraintType.valueOf(typeName); if (type == null) { throw this.m_compiler.new VoltCompilerException("Invalid constraint type '" + typeName + "'"); } // The constraint is backed by an index, therefore we need to create it // TODO: We need to be able to use indexes for foreign keys. I am purposely // leaving those out right now because HSQLDB just makes too many of them. Constraint catalog_const = null; if (attrs.getNamedItem("index") != null) { String indexName = attrs.getNamedItem("index") .getNodeValue(); Index catalog_index = indexMap.get(indexName); // if the constraint name contains index type hints, exercise them (giant hack) if (catalog_index != null) { String constraintNameNoCase = name.toLowerCase(); if (constraintNameNoCase.contains("tree")) catalog_index.setType(IndexType.BALANCED_TREE.getValue()); if (constraintNameNoCase.contains("array")) catalog_index.setType(IndexType.ARRAY.getValue()); } catalog_const = table.getConstraints().add(name); if (catalog_index != null) { catalog_const.setIndex(catalog_index); catalog_index.setUnique(type == ConstraintType.UNIQUE || type == ConstraintType.PRIMARY_KEY); } } else { catalog_const = table.getConstraints().add(name); } catalog_const.setType(type.getValue()); // Foreign Keys if (type == ConstraintType.FOREIGN_KEY) { String fkey_table_name = attrs.getNamedItem("foreignkeytable").getNodeValue(); Table catalog_fkey_tbl = ((Database)table.getParent()).getTables().getIgnoreCase(fkey_table_name); if (catalog_fkey_tbl == null) { throw this.m_compiler.new VoltCompilerException("Invalid foreign key table '" + fkey_table_name + "'"); } catalog_const.setForeignkeytable(catalog_fkey_tbl); // Column mappings NodeList children = node.getChildNodes(); for (int i = 0, cnt = children.getLength(); i < cnt; i++) { Node child = children.item(i); if (child.getNodeName().equals("reference")) { attrs = child.getAttributes(); String from_colname = attrs.getNamedItem("from").getNodeValue(); Column from_col = table.getColumns().get(from_colname); String to_colname = attrs.getNamedItem("to").getNodeValue(); Column to_col = catalog_fkey_tbl.getColumns().get(to_colname); // Make a reference in the fromcolumn to their column in the constraint // We store the name of from_olumn as the name of the reference in the catalog ColumnRef cref = catalog_const.getForeignkeycols().add(from_col.getTypeName()); cref.setColumn(to_col); // Add a ConstraintRef for the from_column ConstraintRef const_ref = from_col.getConstraints().add(catalog_const.getTypeName()); const_ref.setConstraint(catalog_const); } } // All other constraints } else { // Nothing for now... } return; } /** * Add materialized view info to the catalog for the tables that are * materialized views. */ void processMaterializedViews(Database db) throws VoltCompiler.VoltCompilerException { for (Entry<Table, String> entry : matViewMap.entrySet()) { Table destTable = entry.getKey(); String query = entry.getValue(); // get the xml for the query String xmlquery = null; try { xmlquery = m_hsql.getXMLCompiledStatement(query); } catch (HSQLParseException e) { e.printStackTrace(); } assert(xmlquery != null); // parse the xml like any other sql statement ParsedSelectStmt stmt = null; try { stmt = (ParsedSelectStmt) AbstractParsedStmt.parse(query, xmlquery, db); } catch (Exception e) { throw m_compiler.new VoltCompilerException(e.getMessage()); } assert(stmt != null); // throw an error if the view isn't withing voltdb's limited worldview checkViewMeetsSpec(destTable.getTypeName(), stmt); // create the materializedviewinfo catalog node for the source table Table srcTable = stmt.tableList.get(0); MaterializedViewInfo matviewinfo = srcTable.getViews().add(destTable.getTypeName()); matviewinfo.setDest(destTable); if (stmt.where == null) matviewinfo.setPredicate(""); else { String hex = Encoder.hexEncode(stmt.where.toJSONString()); matviewinfo.setPredicate(hex); } destTable.setMaterializer(srcTable); List<Column> srcColumnArray = CatalogUtil.getSortedCatalogItems(srcTable.getColumns(), "index"); List<Column> destColumnArray = CatalogUtil.getSortedCatalogItems(destTable.getColumns(), "index"); // add the group by columns from the src table for (int i = 0; i < stmt.groupByColumns.size(); i++) { ParsedSelectStmt.ParsedColInfo gbcol = stmt.groupByColumns.get(i); Column srcCol = srcColumnArray.get(gbcol.index); ColumnRef cref = matviewinfo.getGroupbycols().add(srcCol.getTypeName()); cref.setColumn(srcCol); } ParsedSelectStmt.ParsedColInfo countCol = stmt.displayColumns.get(stmt.groupByColumns.size()); assert(countCol.expression.getExpressionType() == ExpressionType.AGGREGATE_COUNT); assert(countCol.expression.getLeft() == null); processMaterializedViewColumn(matviewinfo, srcTable, destTable, destColumnArray.get(stmt.groupByColumns.size()), ExpressionType.AGGREGATE_COUNT, null); // create an index and constraint for the table Index pkIndex = destTable.getIndexes().add("MATVIEW_PK_INDEX"); pkIndex.setType(IndexType.BALANCED_TREE.getValue()); pkIndex.setUnique(true); // add the group by columns from the src table // assume index 1 throuh #grpByCols + 1 are the cols for (int i = 0; i < stmt.groupByColumns.size(); i++) { ColumnRef c = pkIndex.getColumns().add(String.valueOf(i)); c.setColumn(destColumnArray.get(i)); c.setIndex(i); } Constraint pkConstraint = destTable.getConstraints().add("MATVIEW_PK_CONSTRAINT"); pkConstraint.setType(ConstraintType.PRIMARY_KEY.getValue()); pkConstraint.setIndex(pkIndex); // parse out the group by columns into the dest table for (int i = 0; i < stmt.groupByColumns.size(); i++) { ParsedSelectStmt.ParsedColInfo col = stmt.displayColumns.get(i); Column destColumn = destColumnArray.get(i); processMaterializedViewColumn(matviewinfo, srcTable, destTable, destColumn, ExpressionType.VALUE_TUPLE, (TupleValueExpression)col.expression); } // parse out the aggregation columns into the dest table for (int i = stmt.groupByColumns.size() + 1; i < stmt.displayColumns.size(); i++) { ParsedSelectStmt.ParsedColInfo col = stmt.displayColumns.get(i); Column destColumn = destColumnArray.get(i); AbstractExpression colExpr = col.expression.getLeft(); assert(colExpr.getExpressionType() == ExpressionType.VALUE_TUPLE); processMaterializedViewColumn(matviewinfo, srcTable, destTable, destColumn, col.expression.getExpressionType(), (TupleValueExpression)colExpr); // Correctly set the type of the column so that it's consistent. // Otherwise HSQLDB might promote types differently than Volt. destColumn.setType(col.expression.getValueType().getValue()); } } } /** * Verify the materialized view meets our arcane rules about what can and can't * go in a materialized view. Throw hopefully helpful error messages when these * rules are inevitably borked. * * @param viewName The name of the view being checked. * @param stmt The output from the parser describing the select statement that creates the view. * @throws VoltCompilerException */ private void checkViewMeetsSpec(String viewName, ParsedSelectStmt stmt) throws VoltCompilerException { int groupColCount = stmt.groupByColumns.size(); int displayColCount = stmt.displayColumns.size(); String msg = "Materialized view \"" + viewName + "\" "; if (stmt.tableList.size() != 1) { msg += "has " + String.valueOf(stmt.tableList.size()) + " sources. " + "Only one source view or source table is allowed."; throw m_compiler.new VoltCompilerException(msg); } if (displayColCount <= groupColCount) { msg += "has too few columns."; throw m_compiler.new VoltCompilerException(msg); } int i; for (i = 0; i < groupColCount; i++) { ParsedSelectStmt.ParsedColInfo gbcol = stmt.groupByColumns.get(i); ParsedSelectStmt.ParsedColInfo outcol = stmt.displayColumns.get(i); if (outcol.expression.getExpressionType() != ExpressionType.VALUE_TUPLE) { msg += "must have column at index " + String.valueOf(i) + " be " + gbcol.alias; throw m_compiler.new VoltCompilerException(msg); } TupleValueExpression expr = (TupleValueExpression) outcol.expression; if (expr.getColumnIndex() != gbcol.index) { msg += "must have column at index " + String.valueOf(i) + " be " + gbcol.alias; throw m_compiler.new VoltCompilerException(msg); } } AbstractExpression coli = stmt.displayColumns.get(i).expression; if ((coli.getExpressionType() != ExpressionType.AGGREGATE_COUNT) || (coli.getLeft() != null) || (coli.getRight() != null)) { msg += "is missing count(*) as the column after the group by columns, a materialized view requirement."; throw m_compiler.new VoltCompilerException(msg); } for (i++; i < displayColCount; i++) { ParsedSelectStmt.ParsedColInfo outcol = stmt.displayColumns.get(i); if ((outcol.expression.getExpressionType() != ExpressionType.AGGREGATE_COUNT) && (outcol.expression.getExpressionType() != ExpressionType.AGGREGATE_SUM)) { msg += "must have non-group by columns aggregated by sum or count."; throw m_compiler.new VoltCompilerException(msg); } if (outcol.expression.getLeft().getExpressionType() != ExpressionType.VALUE_TUPLE) { msg += "must have non-group by columns use only one level of aggregation."; throw m_compiler.new VoltCompilerException(msg); } } } void processMaterializedViewColumn(MaterializedViewInfo info, Table srcTable, Table destTable, Column destColumn, ExpressionType type, TupleValueExpression colExpr) throws VoltCompiler.VoltCompilerException { if (colExpr != null) { assert(colExpr.getTableName().equalsIgnoreCase(srcTable.getTypeName())); String srcColName = colExpr.getColumnName(); Column srcColumn = srcTable.getColumns().getIgnoreCase(srcColName); destColumn.setMatviewsource(srcColumn); } destColumn.setMatview(info); destColumn.setAggregatetype(type.getValue()); } }
src/frontend/org/voltdb/compiler/DDLCompiler.java
/* This file is part of VoltDB. * Copyright (C) 2008-2010 VoltDB L.L.C. * * VoltDB is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * VoltDB is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb.compiler; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.LineNumberReader; import java.io.PrintStream; import java.util.HashMap; import java.util.List; import java.util.Map.Entry; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.hsqldb.HSQLInterface; import org.hsqldb.HSQLInterface.HSQLParseException; import org.voltdb.VoltType; import org.voltdb.catalog.Catalog; import org.voltdb.catalog.Column; import org.voltdb.catalog.ColumnRef; import org.voltdb.catalog.Constraint; import org.voltdb.catalog.ConstraintRef; import org.voltdb.catalog.Database; import org.voltdb.catalog.Index; import org.voltdb.catalog.MaterializedViewInfo; import org.voltdb.catalog.Table; import org.voltdb.compiler.VoltCompiler.VoltCompilerException; import org.voltdb.expressions.AbstractExpression; import org.voltdb.expressions.TupleValueExpression; import org.voltdb.planner.AbstractParsedStmt; import org.voltdb.planner.ParsedSelectStmt; import org.voltdb.types.ConstraintType; import org.voltdb.types.ExpressionType; import org.voltdb.types.IndexType; import org.voltdb.utils.BuildDirectoryUtils; import org.voltdb.utils.CatalogUtil; import org.voltdb.utils.Encoder; import org.voltdb.utils.StringInputStream; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; /** * Compiles schema (SQL DDL) text files and stores the results in a given catalog. * */ public class DDLCompiler { static final int MAX_COLUMNS = 1024; static final int MAX_ROW_SIZE = 1024 * 1024 * 2; HSQLInterface m_hsql; VoltCompiler m_compiler; String m_fullDDL = ""; HashMap<String, Column> columnMap = new HashMap<String, Column>(); HashMap<String, Index> indexMap = new HashMap<String, Index>(); HashMap<Table, String> matViewMap = new HashMap<Table, String>(); private class DDLStatement { String statement; int lineNo; } public DDLCompiler(VoltCompiler compiler, HSQLInterface hsql) { assert(hsql != null); this.m_hsql = hsql; this.m_compiler = compiler; } /** * Compile a DDL schema from a file on disk * @param path * @throws VoltCompiler.VoltCompilerException */ public void loadSchema(String path) throws VoltCompiler.VoltCompilerException { File inputFile = new File(path); FileReader fr = null; LineNumberReader reader = null; try { fr = new FileReader(inputFile); reader = new LineNumberReader(fr); } catch (FileNotFoundException e) { throw m_compiler.new VoltCompilerException("Unable to open schema file for reading"); } this.loadSchema(path, reader); } /** * Compile a file from an open input stream * @param path * @param reader * @throws VoltCompiler.VoltCompilerException */ public void loadSchema(String path, LineNumberReader reader) throws VoltCompiler.VoltCompilerException { DDLStatement stmt = getNextStatement(reader, m_compiler); while (stmt != null) { try { m_fullDDL += stmt.statement + " "; m_hsql.runDDLCommand(stmt.statement); stmt = getNextStatement(reader, m_compiler); } catch (HSQLParseException e) { String msg = "DDL Error: \"" + e.getMessage() + "\" in statement ending on lineno: " + stmt.lineNo; throw m_compiler.new VoltCompilerException(msg, stmt.lineNo); } } try { reader.close(); } catch (IOException e) { throw m_compiler.new VoltCompilerException("Error closing schema file"); } } public void compileToCatalog(Catalog catalog, Database db) throws VoltCompilerException { String hexDDL = Encoder.hexEncode(m_fullDDL); catalog.execute("set " + db.getPath() + " schema \"" + hexDDL + "\""); String xmlCatalog; try { xmlCatalog = m_hsql.getXMLFromCatalog(); } catch (HSQLParseException e) { String msg = "DDL Error: " + e.getMessage(); throw m_compiler.new VoltCompilerException(msg); } // output the xml catalog to disk PrintStream ddlXmlOutput = BuildDirectoryUtils.getDebugOutputPrintStream( "schema-xml", "hsql-catalog-output.xml"); ddlXmlOutput.println(xmlCatalog); ddlXmlOutput.close(); // build the local catalog from the xml catalog fillCatalogFromXML(catalog, db, xmlCatalog); } DDLStatement getNextStatement(LineNumberReader reader, VoltCompiler compiler) throws VoltCompiler.VoltCompilerException { DDLStatement retval = new DDLStatement(); try { String stmt = ""; // skip over any empty lines to read first real line while (stmt.equals("") || stmt.startsWith("--")) { stmt = reader.readLine(); if (stmt == null) return null; stmt = stmt.trim(); } // record the line number retval.lineNo = reader.getLineNumber(); // add all lines until one ends with a semicolon while((stmt.endsWith(";") == false) && (stmt.endsWith(";\n") == false)) { String newline = reader.readLine(); if (newline == null) { String msg = "Schema file ended mid statment (no semicolon found)"; throw compiler.new VoltCompilerException(msg, retval.lineNo); } newline = newline.trim(); if (newline.equals("")) continue; if (newline.startsWith("--")) continue; stmt += " " + newline + "\n"; } retval.statement = stmt; } catch (IOException e) { throw compiler.new VoltCompilerException("Unable to read from file"); } return retval; } public void fillCatalogFromXML(Catalog catalog, Database db, String xml) throws VoltCompiler.VoltCompilerException { StringInputStream xmlStream = new StringInputStream(xml); Document doc = null; DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setValidating(true); try { DocumentBuilder builder = factory.newDocumentBuilder(); doc = builder.parse(xmlStream); } catch (SAXParseException sxe) { m_compiler.addErr(sxe.getMessage(), sxe.getLineNumber()); } catch (SAXException sxe) { m_compiler.addErr(sxe.getMessage()); } catch (ParserConfigurationException e) { m_compiler.addErr(e.getMessage()); } catch (IOException e) { m_compiler.addErr(e.getMessage()); } if ((doc == null) || m_compiler.hasErrors()) throw m_compiler.new VoltCompilerException("Unable to parse catalog xml file from hsqldb"); Node root = doc.getDocumentElement(); assert root.getNodeName().equals("databaseschema"); NodeList tableNodes = root.getChildNodes(); for (int i = 0; i < tableNodes.getLength(); i++) { Node node = tableNodes.item(i); if (node.getNodeName().equals("table")) addTableToCatalog(catalog, db, node); } processMaterializedViews(db); } void addTableToCatalog(Catalog catalog, Database db, Node node) throws VoltCompilerException { assert node.getNodeName().equals("table"); // clear these maps, as they're table specific columnMap.clear(); indexMap.clear(); NamedNodeMap attrs = node.getAttributes(); String name = attrs.getNamedItem("name").getNodeValue(); Table table = db.getTables().add(name); // handle the case where this is a materialized view Node queryAttr = attrs.getNamedItem("query"); if (queryAttr != null) { String query = queryAttr.getNodeValue(); assert(query.length() > 0); matViewMap.put(table, query); } // all tables start replicated // if a partition is found in the project file later, // then this is reversed table.setIsreplicated(true); NodeList childNodes = node.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node subNode = childNodes.item(i); if (subNode.getNodeName().equals("columns")) { NodeList columnNodes = subNode.getChildNodes(); int colIndex = 0; for (int j = 0; j < columnNodes.getLength(); j++) { Node columnNode = columnNodes.item(j); if (columnNode.getNodeName().equals("column")) addColumnToCatalog(table, columnNode, colIndex++); } // limit the total number of columns in a table if (colIndex > MAX_COLUMNS) { String msg = "Table " + name + " has " + colIndex + " columns (max is " + MAX_COLUMNS + ")"; throw m_compiler.new VoltCompilerException(msg); } } if (subNode.getNodeName().equals("indexes")) { NodeList indexNodes = subNode.getChildNodes(); for (int j = 0; j < indexNodes.getLength(); j++) { Node indexNode = indexNodes.item(j); if (indexNode.getNodeName().equals("index")) addIndexToCatalog(table, indexNode); } } if (subNode.getNodeName().equals("constraints")) { NodeList constraintNodes = subNode.getChildNodes(); for (int j = 0; j < constraintNodes.getLength(); j++) { Node constraintNode = constraintNodes.item(j); if (constraintNode.getNodeName().equals("constraint")) addConstraintToCatalog(table, constraintNode); } } } /* * Validate that the total size */ int maxRowSize = 0; for (Column c : columnMap.values()) { VoltType t = VoltType.get((byte)c.getType()); if (t == VoltType.STRING) { if (c.getSize() > 1024 * 1024) { throw m_compiler.new VoltCompilerException("Table name " + name + " column " + c.getName() + " has a maximum size of " + c.getSize() + " bytes" + " but the maximum supported size is " + VoltType.MAX_VALUE_LENGTH_STR); } maxRowSize += 4 + c.getSize(); } else { maxRowSize += t.getLengthInBytesForFixedTypes(); } } if (maxRowSize > MAX_ROW_SIZE) { throw m_compiler.new VoltCompilerException("Table name " + name + " has a maximum row size of " + maxRowSize + " but the maximum supported row size is " + MAX_ROW_SIZE); } } void addColumnToCatalog(Table table, Node node, int index) throws VoltCompilerException { assert node.getNodeName().equals("column"); NamedNodeMap attrs = node.getAttributes(); String name = attrs.getNamedItem("name").getNodeValue(); String typename = attrs.getNamedItem("type").getNodeValue(); String nullable = attrs.getNamedItem("nullable").getNodeValue(); String sizeString = attrs.getNamedItem("size").getNodeValue(); String defaultvalue = null; String defaulttype = null; // throws an exception if string isn't an int (i think) Integer.parseInt(sizeString); // Default Value NodeList children = node.getChildNodes(); for (int i = 0, cnt = children.getLength(); i < cnt; i++) { Node child = children.item(i); if (child.getNodeName().equals("default")) { NodeList inner_children = child.getChildNodes(); for (int j = 0; j < inner_children.getLength(); j++) { Node inner_child = inner_children.item(j); attrs = inner_child.getAttributes(); // Value if (inner_child.getNodeName().equals("value")) { defaultvalue = attrs.getNamedItem("value").getNodeValue(); defaulttype = attrs.getNamedItem("type").getNodeValue(); } // Function /*else if (inner_child.getNodeName().equals("function")) { defaultvalue = attrs.getNamedItem("name").getNodeValue(); defaulttype = VoltType.VOLTFUNCTION.name(); }*/ if (defaultvalue != null) break; } } } if (defaultvalue != null && defaultvalue.equals("NULL")) defaultvalue = null; if (defaulttype != null) defaulttype = Integer.toString(VoltType.typeFromString(defaulttype).getValue()); VoltType type = VoltType.typeFromString(typename); int size = Integer.parseInt(sizeString); // check valid length if varchar if (type == VoltType.STRING) { if ((size == 0) || (size > VoltType.MAX_VALUE_LENGTH)) { String msg = "VARCHAR Column " + name + " in table " + table.getTypeName() + " has unsupported length " + sizeString; throw m_compiler.new VoltCompilerException(msg); } } Column column = table.getColumns().add(name); // need to set other column data here (default, nullable, etc) column.setName(name); column.setIndex(index); column.setType(type.getValue()); column.setNullable(nullable.toLowerCase().startsWith("t") ? true : false); column.setSize(size); column.setDefaultvalue(defaultvalue); if (defaulttype != null) column.setDefaulttype(Integer.parseInt(defaulttype)); columnMap.put(name, column); } void addIndexToCatalog(Table table, Node node) throws VoltCompilerException { assert node.getNodeName().equals("index"); NamedNodeMap attrs = node.getAttributes(); String name = attrs.getNamedItem("name").getNodeValue(); // this won't work for multi-column indices String colList = attrs.getNamedItem("columns").getNodeValue(); String[] colNames = colList.split(","); Column[] columns = new Column[colNames.length]; for (int i = 0; i < colNames.length; i++) { columns[i] = columnMap.get(colNames[i]); if (columns[i] == null) { //String msg = "Index " + name + " references column " + colNames[i] + // " which doesn't exist"; //throw compiler.new VoltCompilerException(msg); return; } } Index index = table.getIndexes().add(name); // all indexes default to hash tables // if they are used in a non-equality lookup, the planner // will change this to a binary tree // set the type of the index based on it's name (giant hack) String indexNameNoCase = name.toLowerCase(); if (indexNameNoCase.contains("tree")) index.setType(IndexType.BALANCED_TREE.getValue()); else if (indexNameNoCase.contains("array")) index.setType(IndexType.ARRAY.getValue()); else index.setType(IndexType.HASH_TABLE.getValue()); // need to set other index data here (column, etc) for (int i = 0; i < columns.length; i++) { ColumnRef cref = index.getColumns().add(columns[i].getTypeName()); cref.setColumn(columns[i]); cref.setIndex(i); } indexMap.put(name, index); } /** * Add a constraint on a given table to the catalog * @param table * @param node * @throws VoltCompilerException */ void addConstraintToCatalog(Table table, Node node) throws VoltCompilerException { assert node.getNodeName().equals("constraint"); NamedNodeMap attrs = node.getAttributes(); String name = attrs.getNamedItem("name").getNodeValue(); String typeName = attrs.getNamedItem("type").getNodeValue(); ConstraintType type = ConstraintType.valueOf(typeName); if (type == null) { throw this.m_compiler.new VoltCompilerException("Invalid constraint type '" + typeName + "'"); } // The constraint is backed by an index, therefore we need to create it // TODO: We need to be able to use indexes for foreign keys. I am purposely // leaving those out right now because HSQLDB just makes too many of them. Constraint catalog_const = null; if (attrs.getNamedItem("index") != null) { String indexName = attrs.getNamedItem("index") .getNodeValue(); Index catalog_index = indexMap.get(indexName); // if the constraint name contains index type hints, exercise them (giant hack) if (catalog_index != null) { String constraintNameNoCase = name.toLowerCase(); if (constraintNameNoCase.contains("tree")) catalog_index.setType(IndexType.BALANCED_TREE.getValue()); if (constraintNameNoCase.contains("array")) catalog_index.setType(IndexType.ARRAY.getValue()); } catalog_const = table.getConstraints().add(name); if (catalog_index != null) { catalog_const.setIndex(catalog_index); catalog_index.setUnique(type == ConstraintType.UNIQUE || type == ConstraintType.PRIMARY_KEY); } } else { catalog_const = table.getConstraints().add(name); } catalog_const.setType(type.getValue()); // Foreign Keys if (type == ConstraintType.FOREIGN_KEY) { String fkey_table_name = attrs.getNamedItem("foreignkeytable").getNodeValue(); Table catalog_fkey_tbl = ((Database)table.getParent()).getTables().getIgnoreCase(fkey_table_name); if (catalog_fkey_tbl == null) { throw this.m_compiler.new VoltCompilerException("Invalid foreign key table '" + fkey_table_name + "'"); } catalog_const.setForeignkeytable(catalog_fkey_tbl); // Column mappings NodeList children = node.getChildNodes(); for (int i = 0, cnt = children.getLength(); i < cnt; i++) { Node child = children.item(i); if (child.getNodeName().equals("reference")) { attrs = child.getAttributes(); String from_colname = attrs.getNamedItem("from").getNodeValue(); Column from_col = table.getColumns().get(from_colname); String to_colname = attrs.getNamedItem("to").getNodeValue(); Column to_col = catalog_fkey_tbl.getColumns().get(to_colname); // Make a reference in the fromcolumn to their column in the constraint // We store the name of from_olumn as the name of the reference in the catalog ColumnRef cref = catalog_const.getForeignkeycols().add(from_col.getTypeName()); cref.setColumn(to_col); // Add a ConstraintRef for the from_column ConstraintRef const_ref = from_col.getConstraints().add(catalog_const.getTypeName()); const_ref.setConstraint(catalog_const); } } // All other constraints } else { // Nothing for now... } return; } /** * Add materialized view info to the catalog for the tables that are * materialized views. */ void processMaterializedViews(Database db) throws VoltCompiler.VoltCompilerException { for (Entry<Table, String> entry : matViewMap.entrySet()) { Table destTable = entry.getKey(); String query = entry.getValue(); // get the xml for the query String xmlquery = null; try { xmlquery = m_hsql.getXMLCompiledStatement(query); } catch (HSQLParseException e) { e.printStackTrace(); } assert(xmlquery != null); // parse the xml like any other sql statement ParsedSelectStmt stmt = null; try { stmt = (ParsedSelectStmt) AbstractParsedStmt.parse(query, xmlquery, db); } catch (Exception e) { throw m_compiler.new VoltCompilerException(e.getMessage()); } assert(stmt != null); // throw an error if the view isn't withing voltdb's limited worldview checkViewMeetsSpec(destTable.getTypeName(), stmt); // create the materializedviewinfo catalog node for the source table Table srcTable = stmt.tableList.get(0); MaterializedViewInfo matviewinfo = srcTable.getViews().add(destTable.getTypeName()); matviewinfo.setDest(destTable); if (stmt.where == null) matviewinfo.setPredicate(""); else { String hex = Encoder.hexEncode(stmt.where.toJSONString()); matviewinfo.setPredicate(hex); } destTable.setMaterializer(srcTable); List<Column> srcColumnArray = CatalogUtil.getSortedCatalogItems(srcTable.getColumns(), "index"); List<Column> destColumnArray = CatalogUtil.getSortedCatalogItems(destTable.getColumns(), "index"); // add the group by columns from the src table for (int i = 0; i < stmt.groupByColumns.size(); i++) { ParsedSelectStmt.ParsedColInfo gbcol = stmt.groupByColumns.get(i); Column srcCol = srcColumnArray.get(gbcol.index); ColumnRef cref = matviewinfo.getGroupbycols().add(srcCol.getTypeName()); cref.setColumn(srcCol); } ParsedSelectStmt.ParsedColInfo countCol = stmt.displayColumns.get(stmt.groupByColumns.size()); assert(countCol.expression.getExpressionType() == ExpressionType.AGGREGATE_COUNT); assert(countCol.expression.getLeft() == null); processMaterializedViewColumn(matviewinfo, srcTable, destTable, destColumnArray.get(stmt.groupByColumns.size()), ExpressionType.AGGREGATE_COUNT, null); // create an index and constraint for the table Index pkIndex = destTable.getIndexes().add("MATVIEW_PK_INDEX"); pkIndex.setType(IndexType.BALANCED_TREE.getValue()); pkIndex.setUnique(true); // add the group by columns from the src table // assume index 1 throuh #grpByCols + 1 are the cols for (int i = 0; i < stmt.groupByColumns.size(); i++) { ColumnRef c = pkIndex.getColumns().add(String.valueOf(i)); c.setColumn(destColumnArray.get(i)); c.setIndex(i); } Constraint pkConstraint = destTable.getConstraints().add("MATVIEW_PK_CONSTRAINT"); pkConstraint.setType(ConstraintType.PRIMARY_KEY.getValue()); pkConstraint.setIndex(pkIndex); // parse out the group by columns into the dest table for (int i = 0; i < stmt.groupByColumns.size(); i++) { ParsedSelectStmt.ParsedColInfo col = stmt.displayColumns.get(i); Column destColumn = destColumnArray.get(i); processMaterializedViewColumn(matviewinfo, srcTable, destTable, destColumn, ExpressionType.VALUE_TUPLE, (TupleValueExpression)col.expression); } // parse out the aggregation columns into the dest table for (int i = stmt.groupByColumns.size() + 1; i < stmt.displayColumns.size(); i++) { ParsedSelectStmt.ParsedColInfo col = stmt.displayColumns.get(i); Column destColumn = destColumnArray.get(i); AbstractExpression colExpr = col.expression.getLeft(); assert(colExpr.getExpressionType() == ExpressionType.VALUE_TUPLE); processMaterializedViewColumn(matviewinfo, srcTable, destTable, destColumn, col.expression.getExpressionType(), (TupleValueExpression)colExpr); // Correctly set the type of the column so that it's consistent. // Otherwise HSQLDB might promote types differently than Volt. destColumn.setType(col.expression.getValueType().getValue()); } } } /** * Verify the materialized view meets our arcane rules about what can and can't * go in a materialized view. Throw hopefully helpful error messages when these * rules are inevitably borked. * * @param viewName The name of the view being checked. * @param stmt The output from the parser describing the select statement that creates the view. * @throws VoltCompilerException */ private void checkViewMeetsSpec(String viewName, ParsedSelectStmt stmt) throws VoltCompilerException { int groupColCount = stmt.groupByColumns.size(); int displayColCount = stmt.displayColumns.size(); String msg = "Materialized view \"" + viewName + "\" "; if (stmt.tableList.size() != 1) { msg += "has " + String.valueOf(stmt.tableList.size()) + " when only 1 is allowed."; throw m_compiler.new VoltCompilerException(msg); } if (displayColCount <= groupColCount) { msg += "has too few columns."; throw m_compiler.new VoltCompilerException(msg); } int i; for (i = 0; i < groupColCount; i++) { ParsedSelectStmt.ParsedColInfo gbcol = stmt.groupByColumns.get(i); ParsedSelectStmt.ParsedColInfo outcol = stmt.displayColumns.get(i); if (outcol.expression.getExpressionType() != ExpressionType.VALUE_TUPLE) { msg += "must have column at index " + String.valueOf(i) + " be " + gbcol.alias; throw m_compiler.new VoltCompilerException(msg); } TupleValueExpression expr = (TupleValueExpression) outcol.expression; if (expr.getColumnIndex() != gbcol.index) { msg += "must have column at index " + String.valueOf(i) + " be " + gbcol.alias; throw m_compiler.new VoltCompilerException(msg); } } AbstractExpression coli = stmt.displayColumns.get(i).expression; if ((coli.getExpressionType() != ExpressionType.AGGREGATE_COUNT) || (coli.getLeft() != null) || (coli.getRight() != null)) { msg += "is missing count(*) as the column after the group by columns, a materialized view requirement."; throw m_compiler.new VoltCompilerException(msg); } for (i++; i < displayColCount; i++) { ParsedSelectStmt.ParsedColInfo outcol = stmt.displayColumns.get(i); if ((outcol.expression.getExpressionType() != ExpressionType.AGGREGATE_COUNT) && (outcol.expression.getExpressionType() != ExpressionType.AGGREGATE_SUM)) { msg += "must have non-group by columns aggregated by sum or count."; throw m_compiler.new VoltCompilerException(msg); } if (outcol.expression.getLeft().getExpressionType() != ExpressionType.VALUE_TUPLE) { msg += "must have non-group by columns use only one level of aggregation."; throw m_compiler.new VoltCompilerException(msg); } } } void processMaterializedViewColumn(MaterializedViewInfo info, Table srcTable, Table destTable, Column destColumn, ExpressionType type, TupleValueExpression colExpr) throws VoltCompiler.VoltCompilerException { if (colExpr != null) { assert(colExpr.getTableName().equalsIgnoreCase(srcTable.getTypeName())); String srcColName = colExpr.getColumnName(); Column srcColumn = srcTable.getColumns().getIgnoreCase(srcColName); destColumn.setMatviewsource(srcColumn); } destColumn.setMatview(info); destColumn.setAggregatetype(type.getValue()); } }
ENG-570 Cleanup view creation error message.
src/frontend/org/voltdb/compiler/DDLCompiler.java
ENG-570 Cleanup view creation error message.
Java
agpl-3.0
cd71492b631de10cc109bb504173750bff506b92
0
britzke/rennspur,britzke/rennspur,britzke/rennspur,britzke/rennspur
/** * */ package de.rennspur.test.beans; import static org.junit.Assert.fail; import static org.junit.Assert.assertNotNull; import static org.mockito.Mockito.when; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Query; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import de.rennspur.beans.ClubBean; /** * Tests the ClubBean unit. * * @author britzke */ @RunWith(MockitoJUnitRunner.class) public class ClubBeanTest { @InjectMocks ClubBean proband; @Mock EntityManagerFactory emf; @Mock EntityManager em; @Mock Query q; /** * @throws java.lang.Exception */ @Before public void setUp() throws Exception { // proband = new ClubBean(); // emf = mock(EntityManagerFactory.class); // em = mock(EntityManager.class); // q = mock(Query.class); // Class<? extends ClubBean> probandClass = proband.getClass(); // Field emfField = probandClass.getDeclaredField("emf"); // emfField.setAccessible(true); // emfField.set(proband, emf); when(emf.createEntityManager()).thenReturn(em); when(em.createNamedQuery("Club.findAll")).thenReturn(q); } /** * Tests if the clubs list field is initialized from the database. Test * method for {@link de.rennspur.beans.ClubBean#init()}. */ @Test public void testIfInitInitializesFieldClubs() { proband.init(); assertNotNull(proband.getClubs()); } /** * Test method for {@link de.rennspur.beans.ClubBean#insertNewClub()}. */ @Test public void testInsertNewClub() { fail("Not yet implemented"); } /** * Test method for * {@link de.rennspur.beans.ClubBean#onRowSelect(org.primefaces.event.SelectEvent)}. */ @Test public void testOnRowSelect() { fail("Not yet implemented"); } }
src/test/java/de/rennspur/test/beans/ClubBeanTest.java
/** * */ package de.rennspur.test.beans; import static org.junit.Assert.assertNotNull; import static org.mockito.Mockito.when; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Query; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import de.rennspur.beans.ClubBean; /** * Tests the ClubBean unit. * * @author britzke */ @RunWith(MockitoJUnitRunner.class) public class ClubBeanTest { @InjectMocks ClubBean proband; @Mock EntityManagerFactory emf; @Mock EntityManager em; @Mock Query q; /** * @throws java.lang.Exception */ @Before public void setUp() throws Exception { // proband = new ClubBean(); // emf = mock(EntityManagerFactory.class); // em = mock(EntityManager.class); // q = mock(Query.class); // Class<? extends ClubBean> probandClass = proband.getClass(); // Field emfField = probandClass.getDeclaredField("emf"); // emfField.setAccessible(true); // emfField.set(proband, emf); when(emf.createEntityManager()).thenReturn(em); when(em.createNamedQuery("Club.findAll")).thenReturn(q); } /** * Tests if the clubs list field is initialized from the database. Test * method for {@link de.rennspur.beans.ClubBean#init()}. */ @Test public void testIfInitInitializesFieldClubs() { proband.init(); assertNotNull(proband.getClubs()); } /** * Test method for {@link de.rennspur.beans.ClubBean#insertNewClub()}. */ @Test public void testInsertNewClub() { // fail("Not yet implemented"); } /** * Test method for * {@link de.rennspur.beans.ClubBean#onRowSelect(org.primefaces.event.SelectEvent)}. */ @Test public void testOnRowSelect() { // fail("Not yet implemented"); } }
#40 other tests still fail, because the implementation is missing
src/test/java/de/rennspur/test/beans/ClubBeanTest.java
#40 other tests still fail, because the implementation is missing
Java
lgpl-2.1
e9f983ad785536c68c395f6767685edd94e9eed0
0
cytoscape/cytoscape-impl,cytoscape/cytoscape-impl,cytoscape/cytoscape-impl,cytoscape/cytoscape-impl,cytoscape/cytoscape-impl
package org.cytoscape.ding; import static org.cytoscape.work.ServiceProperties.*; import java.net.URL; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Properties; import java.util.Set; import org.cytoscape.application.NetworkViewRenderer; import org.cytoscape.application.events.SetCurrentNetworkViewListener; import org.cytoscape.application.swing.CyAction; import org.cytoscape.application.swing.CyEdgeViewContextMenuFactory; import org.cytoscape.application.swing.CyNetworkViewContextMenuFactory; import org.cytoscape.application.swing.CyNodeViewContextMenuFactory; import org.cytoscape.application.swing.CytoPanelComponent; import org.cytoscape.ding.action.GraphicsDetailAction; import org.cytoscape.ding.customgraphics.CustomGraphicsManager; import org.cytoscape.ding.customgraphics.CustomGraphicsTranslator; import org.cytoscape.ding.customgraphics.CyCustomGraphics2Manager; import org.cytoscape.ding.customgraphics.CyCustomGraphics2ManagerImpl; import org.cytoscape.ding.customgraphics.bitmap.URLImageCustomGraphicsFactory; import org.cytoscape.ding.customgraphics.vector.GradientOvalFactory; import org.cytoscape.ding.customgraphics.vector.GradientRoundRectangleFactory; import org.cytoscape.ding.customgraphicsmgr.internal.CustomGraphicsManagerImpl; import org.cytoscape.ding.customgraphicsmgr.internal.action.CustomGraphicsManagerAction; import org.cytoscape.ding.customgraphicsmgr.internal.ui.CustomGraphicsBrowser; import org.cytoscape.ding.debug.DingDebugPanel; import org.cytoscape.ding.dependency.CustomGraphicsSizeDependencyFactory; import org.cytoscape.ding.dependency.EdgeColorDependencyFactory; import org.cytoscape.ding.dependency.NodeSizeDependencyFactory; import org.cytoscape.ding.impl.AddEdgeNodeViewTaskFactoryImpl; import org.cytoscape.ding.impl.BendFactoryImpl; import org.cytoscape.ding.impl.DingGraphLOD; import org.cytoscape.ding.impl.DingNetworkViewFactory; import org.cytoscape.ding.impl.DingRenderer; import org.cytoscape.ding.impl.HandleFactoryImpl; import org.cytoscape.ding.impl.NVLTFActionSupport; import org.cytoscape.ding.impl.ViewTaskFactoryListener; // Annotation creation import org.cytoscape.ding.impl.cyannotator.AnnotationFactoryManager; import org.cytoscape.ding.impl.cyannotator.AnnotationManagerImpl; import org.cytoscape.ding.impl.cyannotator.AnnotationTree.Shift; import org.cytoscape.ding.impl.cyannotator.create.ArrowAnnotationFactory; import org.cytoscape.ding.impl.cyannotator.create.BoundedTextAnnotationFactory; import org.cytoscape.ding.impl.cyannotator.create.GroupAnnotationFactory; import org.cytoscape.ding.impl.cyannotator.create.ImageAnnotationFactory; import org.cytoscape.ding.impl.cyannotator.create.ShapeAnnotationFactory; import org.cytoscape.ding.impl.cyannotator.create.TextAnnotationFactory; // Annotation edits and changes import org.cytoscape.ding.impl.cyannotator.tasks.AddAnnotationTaskFactory; import org.cytoscape.ding.impl.cyannotator.tasks.AddArrowTaskFactory; import org.cytoscape.ding.impl.cyannotator.tasks.EditAnnotationTaskFactory; import org.cytoscape.ding.impl.cyannotator.tasks.GroupAnnotationsTaskFactory; import org.cytoscape.ding.impl.cyannotator.tasks.RemoveAnnotationTaskFactory; import org.cytoscape.ding.impl.cyannotator.tasks.ReorderSelectedAnnotationsTaskFactory; import org.cytoscape.ding.impl.cyannotator.tasks.UngroupAnnotationsTaskFactory; import org.cytoscape.ding.impl.cyannotator.ui.AnnotationMediator; import org.cytoscape.ding.impl.editor.CustomGraphicsVisualPropertyEditor; import org.cytoscape.ding.impl.editor.CyCustomGraphicsValueEditor; import org.cytoscape.ding.impl.editor.EdgeBendEditor; import org.cytoscape.ding.impl.editor.EdgeBendValueEditor; import org.cytoscape.ding.impl.editor.ObjectPositionEditor; import org.cytoscape.ding.internal.charts.bar.BarChartFactory; import org.cytoscape.ding.internal.charts.box.BoxChartFactory; import org.cytoscape.ding.internal.charts.heatmap.HeatMapChartFactory; import org.cytoscape.ding.internal.charts.line.LineChartFactory; import org.cytoscape.ding.internal.charts.pie.PieChartFactory; import org.cytoscape.ding.internal.charts.ring.RingChartFactory; import org.cytoscape.ding.internal.gradients.linear.LinearGradientFactory; import org.cytoscape.ding.internal.gradients.radial.RadialGradientFactory; import org.cytoscape.model.CyNetwork; import org.cytoscape.property.PropertyUpdatedListener; import org.cytoscape.service.util.AbstractCyActivator; import org.cytoscape.service.util.CyServiceRegistrar; import org.cytoscape.task.EdgeViewTaskFactory; import org.cytoscape.task.NetworkViewLocationTaskFactory; import org.cytoscape.task.NetworkViewTaskFactory; import org.cytoscape.task.NodeViewTaskFactory; import org.cytoscape.view.model.CyNetworkViewConfig; import org.cytoscape.view.model.CyNetworkViewFactory; import org.cytoscape.view.model.CyNetworkViewFactoryFactory; import org.cytoscape.view.model.VisualLexicon; import org.cytoscape.view.model.events.NetworkViewAboutToBeDestroyedListener; import org.cytoscape.view.presentation.RenderingEngineFactory; import org.cytoscape.view.presentation.annotations.Annotation; import org.cytoscape.view.presentation.annotations.AnnotationFactory; import org.cytoscape.view.presentation.annotations.AnnotationManager; import org.cytoscape.view.presentation.customgraphics.CyCustomGraphics; import org.cytoscape.view.presentation.customgraphics.CyCustomGraphics2Factory; import org.cytoscape.view.presentation.customgraphics.CyCustomGraphicsFactory; import org.cytoscape.view.presentation.property.values.BendFactory; import org.cytoscape.view.presentation.property.values.HandleFactory; import org.cytoscape.view.vizmap.VisualPropertyDependencyFactory; import org.cytoscape.view.vizmap.gui.editor.ContinuousMappingCellRendererFactory; import org.cytoscape.view.vizmap.gui.editor.ValueEditor; import org.cytoscape.view.vizmap.gui.editor.VisualPropertyEditor; import org.cytoscape.view.vizmap.mappings.ValueTranslator; import org.osgi.framework.BundleContext; /* * #%L * Cytoscape Ding View/Presentation Impl (ding-presentation-impl) * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2006 - 2019 The Cytoscape Consortium * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation, either version 2.1 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Lesser Public License for more details. * * You should have received a copy of the GNU General Lesser Public * License along with this program. If not, see * <http://www.gnu.org/licenses/lgpl-2.1.html>. * #L% */ public class CyActivator extends AbstractCyActivator { // Set to true to enable the Ding Debug panel public static final boolean DEBUG = false; private CustomGraphicsManager cgManager; private CyCustomGraphics2Manager cg2Manager; private CustomGraphicsBrowser cgBrowser; @Override public void start(BundleContext bc) { CyServiceRegistrar serviceRegistrar = getService(bc, CyServiceRegistrar.class); startCustomGraphicsMgr(bc, serviceRegistrar); startCharts(bc, serviceRegistrar); startGradients(bc, serviceRegistrar); startPresentationImpl(bc, serviceRegistrar); if(DEBUG) { DingDebugPanel debugPanel = new DingDebugPanel(serviceRegistrar); registerService(bc, debugPanel, CytoPanelComponent.class); registerService(bc, debugPanel, SetCurrentNetworkViewListener.class); } } private void startPresentationImpl(final BundleContext bc, final CyServiceRegistrar serviceRegistrar) { DVisualLexicon dVisualLexicon = new DVisualLexicon(cgManager); NVLTFActionSupport nvltfActionSupport = new NVLTFActionSupport(serviceRegistrar); ViewTaskFactoryListener vtfListener = new ViewTaskFactoryListener(nvltfActionSupport); registerService(bc, vtfListener, ViewTaskFactoryListener.class); AnnotationFactoryManager annotationFactoryManager = new AnnotationFactoryManager(); AnnotationManager annotationManager = new AnnotationManagerImpl(serviceRegistrar); DingGraphLOD dingGraphLOD = new DingGraphLOD(serviceRegistrar); registerService(bc, dingGraphLOD, PropertyUpdatedListener.class); HandleFactory handleFactory = new HandleFactoryImpl(); registerService(bc, handleFactory, HandleFactory.class); AddEdgeNodeViewTaskFactoryImpl addEdgeNodeViewTaskFactory = new AddEdgeNodeViewTaskFactoryImpl(serviceRegistrar); ContinuousMappingCellRendererFactory continuousMappingCellRendererFactory = getService(bc, ContinuousMappingCellRendererFactory.class); // Object Position Editor ObjectPositionValueEditor objectPositionValueEditor = new ObjectPositionValueEditor(); ObjectPositionEditor objectPositionEditor = new ObjectPositionEditor(objectPositionValueEditor, continuousMappingCellRendererFactory, serviceRegistrar); CyNetworkViewFactoryFactory netViewFactoryFactory = getService(bc, CyNetworkViewFactoryFactory.class); CyNetworkViewConfig viewFactoryConfig = DingNetworkViewFactory.getNetworkViewConfig(netViewFactoryFactory, dVisualLexicon); CyNetworkViewFactory netViewFactory = netViewFactoryFactory.createNetworkViewFactory(dVisualLexicon, DingRenderer.ID, viewFactoryConfig); DingNetworkViewFactory dingNetViewFactory = new DingNetworkViewFactory(netViewFactory, dVisualLexicon, annotationFactoryManager, dingGraphLOD, handleFactory, serviceRegistrar); registerService(bc, dingNetViewFactory, NetworkViewAboutToBeDestroyedListener.class); DingRenderer renderer = new DingRenderer(dingNetViewFactory, dVisualLexicon, serviceRegistrar); registerService(bc, renderer, NetworkViewRenderer.class); registerService(bc, renderer, DingRenderer.class); RenderingEngineFactory<CyNetwork> dingRenderingEngineFactory = renderer.getRenderingEngineFactory(DingRenderer.DEFAULT_CONTEXT); // Edge Bend editor EdgeBendValueEditor edgeBendValueEditor = new EdgeBendValueEditor(dingNetViewFactory, dingRenderingEngineFactory, serviceRegistrar); EdgeBendEditor edgeBendEditor = new EdgeBendEditor(edgeBendValueEditor, continuousMappingCellRendererFactory, serviceRegistrar); Properties dingRenderingEngineFactoryProps = new Properties(); dingRenderingEngineFactoryProps.setProperty(ID, "ding"); registerAllServices(bc, dingRenderingEngineFactory, dingRenderingEngineFactoryProps); // Properties dingNavigationRenderingEngineFactoryProps = new Properties(); // dingNavigationRenderingEngineFactoryProps.setProperty(ID, "dingNavigation"); // registerAllServices(bc, dingNavigationRenderingEngineFactory, dingNavigationRenderingEngineFactoryProps); Properties addEdgeNodeViewTaskFactoryProps = new Properties(); addEdgeNodeViewTaskFactoryProps.setProperty(PREFERRED_ACTION, "Edge"); addEdgeNodeViewTaskFactoryProps.setProperty(PREFERRED_MENU, NODE_ADD_MENU); addEdgeNodeViewTaskFactoryProps.setProperty(TITLE, "Edge"); addEdgeNodeViewTaskFactoryProps.setProperty(MENU_GRAVITY, "0.1"); registerService(bc, addEdgeNodeViewTaskFactory, NodeViewTaskFactory.class, addEdgeNodeViewTaskFactoryProps); Properties dVisualLexiconProps = new Properties(); dVisualLexiconProps.setProperty(ID, "ding"); registerService(bc, dVisualLexicon, VisualLexicon.class, dVisualLexiconProps); final Properties positionEditorProp = new Properties(); positionEditorProp.setProperty(ID, "objectPositionValueEditor"); registerService(bc, objectPositionValueEditor, ValueEditor.class, positionEditorProp); final Properties objectPositionEditorProp = new Properties(); objectPositionEditorProp.setProperty(ID, "objectPositionEditor"); registerService(bc, objectPositionEditor, VisualPropertyEditor.class, objectPositionEditorProp); registerAllServices(bc, edgeBendValueEditor); registerService(bc, edgeBendEditor, VisualPropertyEditor.class); // Annotation Manager registerServiceListener(bc, annotationFactoryManager::addAnnotationFactory, annotationFactoryManager::removeAnnotationFactory, AnnotationFactory.class); registerService(bc, annotationManager, AnnotationManager.class); // Annotations UI AnnotationMediator annotationMediator = new AnnotationMediator(serviceRegistrar); registerServiceListener(bc, annotationMediator::addAnnotationFactory, annotationMediator::removeAnnotationFactory, AnnotationFactory.class); registerAllServices(bc, annotationMediator); // Annotation Factories (the order they are registered is the order they appear in the UI) AnnotationFactory<?> textAnnotationFactory = new TextAnnotationFactory(serviceRegistrar); Properties textFactory = new Properties(); textFactory.setProperty("type","TextAnnotation.class"); registerService(bc, textAnnotationFactory, AnnotationFactory.class, textFactory); AnnotationFactory<?> boundedAnnotationFactory = new BoundedTextAnnotationFactory(serviceRegistrar); Properties boundedFactory = new Properties(); boundedFactory.setProperty("type","BoundedTextAnnotation.class"); registerService(bc, boundedAnnotationFactory, AnnotationFactory.class, boundedFactory); AnnotationFactory<?> shapeAnnotationFactory = new ShapeAnnotationFactory(serviceRegistrar); Properties shapeFactory = new Properties(); shapeFactory.setProperty("type","ShapeAnnotation.class"); registerService(bc, shapeAnnotationFactory, AnnotationFactory.class, shapeFactory); AnnotationFactory<?> imageAnnotationFactory = new ImageAnnotationFactory(serviceRegistrar); Properties imageFactory = new Properties(); imageFactory.setProperty("type","ImageAnnotation.class"); registerService(bc, imageAnnotationFactory, AnnotationFactory.class, imageFactory); AnnotationFactory<?> arrowAnnotationFactory = new ArrowAnnotationFactory(serviceRegistrar); Properties arrowFactory = new Properties(); arrowFactory.setProperty("type","ArrowAnnotation.class"); registerService(bc, arrowAnnotationFactory, AnnotationFactory.class, arrowFactory); AnnotationFactory<?> groupAnnotationFactory = new GroupAnnotationFactory(serviceRegistrar); Properties groupFactory = new Properties(); groupFactory.setProperty("type","GroupAnnotation.class"); registerService(bc, groupAnnotationFactory, AnnotationFactory.class, groupFactory); // Annotation Task Factories AddArrowTaskFactory addArrowTaskFactory = new AddArrowTaskFactory(arrowAnnotationFactory, renderer); Properties addArrowTaskFactoryProps = new Properties(); addArrowTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); addArrowTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_ADD_MENU); addArrowTaskFactoryProps.setProperty(MENU_GRAVITY, "1.2"); addArrowTaskFactoryProps.setProperty(TITLE, "Arrow Annotation..."); registerService(bc, addArrowTaskFactory, NetworkViewLocationTaskFactory.class, addArrowTaskFactoryProps); AddAnnotationTaskFactory addImageTaskFactory = new AddAnnotationTaskFactory(imageAnnotationFactory, renderer); Properties addImageTaskFactoryProps = new Properties(); addImageTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); addImageTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_ADD_MENU); addImageTaskFactoryProps.setProperty(MENU_GRAVITY, "1.3"); addImageTaskFactoryProps.setProperty(TITLE, "Image Annotation..."); registerService(bc, addImageTaskFactory, NetworkViewLocationTaskFactory.class, addImageTaskFactoryProps); AddAnnotationTaskFactory addShapeTaskFactory = new AddAnnotationTaskFactory(shapeAnnotationFactory, renderer); Properties addShapeTaskFactoryProps = new Properties(); addShapeTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); addShapeTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_ADD_MENU); addShapeTaskFactoryProps.setProperty(MENU_GRAVITY, "1.4"); addShapeTaskFactoryProps.setProperty(TITLE, "Shape Annotation..."); registerService(bc, addShapeTaskFactory, NetworkViewLocationTaskFactory.class, addShapeTaskFactoryProps); AddAnnotationTaskFactory addTextTaskFactory = new AddAnnotationTaskFactory(textAnnotationFactory, renderer); Properties addTextTaskFactoryProps = new Properties(); addTextTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); addTextTaskFactoryProps.setProperty(MENU_GRAVITY, "1.5"); addTextTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_ADD_MENU); addTextTaskFactoryProps.setProperty(TITLE, "Text Annotation..."); registerService(bc, addTextTaskFactory, NetworkViewLocationTaskFactory.class, addTextTaskFactoryProps); AddAnnotationTaskFactory addBoundedTextTaskFactory = new AddAnnotationTaskFactory(boundedAnnotationFactory, renderer); Properties addBoundedTextTaskFactoryProps = new Properties(); addBoundedTextTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); addBoundedTextTaskFactoryProps.setProperty(MENU_GRAVITY, "1.6"); addBoundedTextTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_ADD_MENU); addBoundedTextTaskFactoryProps.setProperty(TITLE, "Bounded Text Annotation..."); registerService(bc, addBoundedTextTaskFactory, NetworkViewLocationTaskFactory.class, addBoundedTextTaskFactoryProps); // Annotation edit EditAnnotationTaskFactory editAnnotationTaskFactory = new EditAnnotationTaskFactory(renderer); Properties editAnnotationTaskFactoryProps = new Properties(); editAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); editAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "2.0"); editAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); editAnnotationTaskFactoryProps.setProperty(TITLE, "Modify Annotation..."); editAnnotationTaskFactoryProps.setProperty(INSERT_SEPARATOR_BEFORE, "true"); registerService(bc, editAnnotationTaskFactory, NetworkViewLocationTaskFactory.class, editAnnotationTaskFactoryProps); /* MoveAnnotationTaskFactory moveAnnotationTaskFactory = new MoveAnnotationTaskFactory(); Properties moveAnnotationTaskFactoryProps = new Properties(); moveAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); moveAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "2.1"); moveAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); moveAnnotationTaskFactoryProps.setProperty(TITLE, "Move Annotation"); registerService(bc, moveAnnotationTaskFactory, NetworkViewLocationTaskFactory.class, moveAnnotationTaskFactoryProps); */ // Reorder Selected Annotations - Edit Menu { ReorderSelectedAnnotationsTaskFactory factory = new ReorderSelectedAnnotationsTaskFactory(renderer, Shift.TO_FRONT); Properties props = new Properties(); props.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); props.setProperty(TITLE, "Bring Annotations to Front"); props.setProperty(ACCELERATOR, "shift cmd CLOSE_BRACKET"); props.setProperty(MENU_GRAVITY, "6.1"); props.setProperty(INSERT_SEPARATOR_BEFORE, "true"); registerService(bc, factory, NetworkViewTaskFactory.class, props); } { ReorderSelectedAnnotationsTaskFactory factory = new ReorderSelectedAnnotationsTaskFactory(renderer, Shift.UP_ONE); Properties props = new Properties(); props.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); props.setProperty(TITLE, "Bring Annotations Forward"); props.setProperty(ACCELERATOR, "cmd CLOSE_BRACKET"); props.setProperty(MENU_GRAVITY, "6.2"); registerService(bc, factory, NetworkViewTaskFactory.class, props); } { ReorderSelectedAnnotationsTaskFactory factory = new ReorderSelectedAnnotationsTaskFactory(renderer, Shift.DOWN_ONE); Properties props = new Properties(); props.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); props.setProperty(TITLE, "Send Annotations Backward"); props.setProperty(ACCELERATOR, "cmd OPEN_BRACKET"); props.setProperty(MENU_GRAVITY, "6.3"); registerService(bc, factory, NetworkViewTaskFactory.class, props); } { ReorderSelectedAnnotationsTaskFactory factory = new ReorderSelectedAnnotationsTaskFactory(renderer, Shift.TO_BACK); Properties props = new Properties(); props.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); props.setProperty(TITLE, "Send Annotations to Back"); props.setProperty(ACCELERATOR, "shift cmd OPEN_BRACKET"); props.setProperty(MENU_GRAVITY, "6.4"); props.setProperty(INSERT_SEPARATOR_AFTER, "true"); registerService(bc, factory, NetworkViewTaskFactory.class, props); } { ReorderSelectedAnnotationsTaskFactory factory = new ReorderSelectedAnnotationsTaskFactory(renderer, Annotation.FOREGROUND); Properties props = new Properties(); props.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); props.setProperty(MENU_GRAVITY, "6.5"); props.setProperty(TITLE, "Pull Annotations to Foreground Layer"); registerService(bc, factory, NetworkViewTaskFactory.class, props); } { ReorderSelectedAnnotationsTaskFactory factory = new ReorderSelectedAnnotationsTaskFactory(renderer, Annotation.BACKGROUND); Properties props = new Properties(); props.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); props.setProperty(TITLE, "Push Annotations to Background Layer"); props.setProperty(MENU_GRAVITY, "6.6"); props.setProperty(INSERT_SEPARATOR_AFTER, "true"); registerService(bc, factory, NetworkViewTaskFactory.class, props); } /* ResizeAnnotationTaskFactory resizeAnnotationTaskFactory = new ResizeAnnotationTaskFactory(); Properties resizeAnnotationTaskFactoryProps = new Properties(); resizeAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); resizeAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "2.3"); resizeAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); resizeAnnotationTaskFactoryProps.setProperty(TITLE, "Resize Annotation"); registerService(bc, resizeAnnotationTaskFactory, NetworkViewLocationTaskFactory.class, resizeAnnotationTaskFactoryProps); */ // Annotation delete RemoveAnnotationTaskFactory removeAnnotationTaskFactory = new RemoveAnnotationTaskFactory(renderer); Properties removeAnnotationTaskFactoryProps = new Properties(); removeAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); removeAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "1.1"); removeAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_DELETE_MENU); removeAnnotationTaskFactoryProps.setProperty(TITLE, "Annotation"); registerService(bc, removeAnnotationTaskFactory, NetworkViewLocationTaskFactory.class, removeAnnotationTaskFactoryProps); /* // Annotation select SelectAnnotationTaskFactory selectAnnotationTaskFactory = new SelectAnnotationTaskFactory(); Properties selectAnnotationTaskFactoryProps = new Properties(); selectAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); selectAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "1.1"); selectAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_SELECT_MENU); selectAnnotationTaskFactoryProps.setProperty(TITLE, "Select/Unselect Annotation"); registerService(bc, selectAnnotationTaskFactory, NetworkViewLocationTaskFactory.class, selectAnnotationTaskFactoryProps); */ // Annotation group GroupAnnotationsTaskFactory groupAnnotationTaskFactory = new GroupAnnotationsTaskFactory(renderer); Properties groupAnnotationTaskFactoryProps = new Properties(); groupAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); groupAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "100"); groupAnnotationTaskFactoryProps.setProperty(INSERT_SEPARATOR_BEFORE, "true"); groupAnnotationTaskFactoryProps.setProperty(IN_MENU_BAR, "false"); groupAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_GROUP_MENU); groupAnnotationTaskFactoryProps.setProperty(TITLE, "Group Annotations"); registerService(bc, groupAnnotationTaskFactory, NetworkViewTaskFactory.class, groupAnnotationTaskFactoryProps); // Annotation ungroup UngroupAnnotationsTaskFactory ungroupAnnotationTaskFactory = new UngroupAnnotationsTaskFactory(renderer); Properties ungroupAnnotationTaskFactoryProps = new Properties(); ungroupAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); ungroupAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "100"); ungroupAnnotationTaskFactoryProps.setProperty(INSERT_SEPARATOR_BEFORE, "true"); ungroupAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_GROUP_MENU); ungroupAnnotationTaskFactoryProps.setProperty(TITLE, "Ungroup Annotations"); registerService(bc, ungroupAnnotationTaskFactory, NetworkViewLocationTaskFactory.class, ungroupAnnotationTaskFactoryProps); // Set mouse drag selection modes SelectModeAction selectNodesOnlyAction = new SelectModeAction(SelectModeAction.NODES, 0.5f, serviceRegistrar); registerAllServices(bc, selectNodesOnlyAction); SelectModeAction selectEdgesOnlyAction = new SelectModeAction(SelectModeAction.EDGES, 0.6f, serviceRegistrar); registerAllServices(bc, selectEdgesOnlyAction); SelectModeAction selectAnnotationsOnlyAction = new SelectModeAction(SelectModeAction.ANNOTATIONS, 0.7f, serviceRegistrar); registerAllServices(bc, selectAnnotationsOnlyAction); SelectModeAction selectNodesEdgesAction = new SelectModeAction(SelectModeAction.NODES_EDGES, 0.8f, serviceRegistrar); registerAllServices(bc, selectNodesEdgesAction); SelectModeAction selectAllAction = new SelectModeAction(SelectModeAction.ALL, 0.9f, serviceRegistrar); registerAllServices(bc, selectAllAction); { // Toggle Graphics Details ShowGraphicsDetailsTaskFactory factory = new ShowGraphicsDetailsTaskFactory(); Properties props = new Properties(); props.setProperty(ID, "showGraphicsDetailsTaskFactory"); registerService(bc, factory, NetworkViewTaskFactory.class, props); // Used at least by cyREST // Main menu GraphicsDetailAction mainMenuAction = new GraphicsDetailAction(5.0f, "View", factory, serviceRegistrar); registerAllServices(bc, mainMenuAction); } final String vtfFilter = String.format("(| (!(%s=*)) (%s=true))", IN_CONTEXT_MENU, IN_CONTEXT_MENU); // if IN_CONTEXT_MENU is not specified, default to true registerServiceListener(bc, vtfListener::addNodeViewTaskFactory, vtfListener::removeNodeViewTaskFactory, NodeViewTaskFactory.class, vtfFilter); registerServiceListener(bc, vtfListener::addEdgeViewTaskFactory, vtfListener::removeEdgeViewTaskFactory, EdgeViewTaskFactory.class, vtfFilter); registerServiceListener(bc, vtfListener::addNetworkViewTaskFactory, vtfListener::removeNetworkViewTaskFactory, NetworkViewTaskFactory.class, vtfFilter); registerServiceListener(bc, vtfListener::addNetworkViewLocationTaskFactory, vtfListener::removeNetworkViewLocationTaskFactory, NetworkViewLocationTaskFactory.class); registerServiceListener(bc, vtfListener::addCyEdgeViewContextMenuFactory, vtfListener::removeCyEdgeViewContextMenuFactory, CyEdgeViewContextMenuFactory.class); registerServiceListener(bc, vtfListener::addCyNodeViewContextMenuFactory, vtfListener::removeCyNodeViewContextMenuFactory, CyNodeViewContextMenuFactory.class); registerServiceListener(bc, vtfListener::addCyNetworkViewContextMenuFactory, vtfListener::removeCyNetworkViewContextMenuFactory, CyNetworkViewContextMenuFactory.class); registerServiceListener(bc, annotationFactoryManager::addAnnotationFactory, annotationFactoryManager::removeAnnotationFactory, AnnotationFactory.class); BendFactory bendFactory = new BendFactoryImpl(); registerService(bc, bendFactory, BendFactory.class); // Register the factory dVisualLexicon.addBendFactory(bendFactory, new HashMap<Object, Object>()); // Translators for Passthrough final CustomGraphicsTranslator cgTranslator = new CustomGraphicsTranslator(cgManager, cg2Manager); registerService(bc, cgTranslator, ValueTranslator.class); // Factories for Visual Property Dependency final NodeSizeDependencyFactory nodeSizeDependencyFactory = new NodeSizeDependencyFactory(dVisualLexicon); registerService(bc, nodeSizeDependencyFactory, VisualPropertyDependencyFactory.class); final EdgeColorDependencyFactory edgeColorDependencyFactory = new EdgeColorDependencyFactory(dVisualLexicon); registerService(bc, edgeColorDependencyFactory, VisualPropertyDependencyFactory.class); final CustomGraphicsSizeDependencyFactory cgSizeDependencyFactory = new CustomGraphicsSizeDependencyFactory( dVisualLexicon); registerService(bc, cgSizeDependencyFactory, VisualPropertyDependencyFactory.class); // Custom Graphics Editors final CyCustomGraphicsValueEditor cgValueEditor = new CyCustomGraphicsValueEditor(cgManager, cg2Manager, cgBrowser, serviceRegistrar); registerAllServices(bc, cgValueEditor); final CustomGraphicsVisualPropertyEditor cgVisualPropertyEditor = new CustomGraphicsVisualPropertyEditor( CyCustomGraphics.class, cgValueEditor, continuousMappingCellRendererFactory, serviceRegistrar); registerService(bc, cgVisualPropertyEditor, VisualPropertyEditor.class); } private void startCustomGraphicsMgr(final BundleContext bc, final CyServiceRegistrar serviceRegistrar) { cgManager = new CustomGraphicsManagerImpl(getdefaultImageURLs(bc), serviceRegistrar); registerAllServices(bc, cgManager); cgBrowser = new CustomGraphicsBrowser(cgManager); registerAllServices(bc, cgBrowser); CustomGraphicsManagerAction cgManagerAction = new CustomGraphicsManagerAction(cgManager, cgBrowser, serviceRegistrar); registerService(bc, cgManagerAction, CyAction.class); // Create and register our built-in factories. // TODO: When the CustomGraphicsFactory service stuff is set up, just // register these as services URLImageCustomGraphicsFactory imageFactory = new URLImageCustomGraphicsFactory(cgManager); cgManager.addCustomGraphicsFactory(imageFactory, new Properties()); GradientOvalFactory ovalFactory = new GradientOvalFactory(cgManager); cgManager.addCustomGraphicsFactory(ovalFactory, new Properties()); GradientRoundRectangleFactory rectangleFactory = new GradientRoundRectangleFactory(cgManager); cgManager.addCustomGraphicsFactory(rectangleFactory, new Properties()); // Register this service listener so that app writers can provide their own CustomGraphics factories registerServiceListener(bc, cgManager::addCustomGraphicsFactory, cgManager::removeCustomGraphicsFactory, CyCustomGraphicsFactory.class); // Register this service listener so that app writers can provide their own CyCustomGraphics2 factories cg2Manager = CyCustomGraphics2ManagerImpl.getInstance(); registerAllServices(bc, cg2Manager); registerServiceListener(bc, ((CyCustomGraphics2ManagerImpl)cg2Manager)::addFactory, ((CyCustomGraphics2ManagerImpl)cg2Manager)::removeFactory, CyCustomGraphics2Factory.class); } private void startCharts(final BundleContext bc, final CyServiceRegistrar serviceRegistrar) { // Register Chart Factories final Properties factoryProps = new Properties(); factoryProps.setProperty(CyCustomGraphics2Factory.GROUP, CyCustomGraphics2Manager.GROUP_CHARTS); { final BarChartFactory factory = new BarChartFactory(serviceRegistrar); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } { final BoxChartFactory factory = new BoxChartFactory(serviceRegistrar); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } { final PieChartFactory factory = new PieChartFactory(serviceRegistrar); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } { final RingChartFactory factory = new RingChartFactory(serviceRegistrar); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } { final LineChartFactory factory = new LineChartFactory(serviceRegistrar); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } { final HeatMapChartFactory factory = new HeatMapChartFactory(serviceRegistrar); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } } private void startGradients(final BundleContext bc, final CyServiceRegistrar serviceRegistrar) { // Register Gradient Factories final Properties factoryProps = new Properties(); factoryProps.setProperty(CyCustomGraphics2Factory.GROUP, CyCustomGraphics2Manager.GROUP_GRADIENTS); { final LinearGradientFactory factory = new LinearGradientFactory(); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } { final RadialGradientFactory factory = new RadialGradientFactory(); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } } /** * Get list of default images from resource. */ private Set<URL> getdefaultImageURLs(final BundleContext bc) { Enumeration<URL> e = bc.getBundle().findEntries("images/sampleCustomGraphics", "*.png", true); final Set<URL> defaultImageUrls = new HashSet<>(); while (e.hasMoreElements()) defaultImageUrls.add(e.nextElement()); return defaultImageUrls; } }
ding-impl/ding-presentation-impl/src/main/java/org/cytoscape/ding/CyActivator.java
package org.cytoscape.ding; import static org.cytoscape.work.ServiceProperties.*; import java.net.URL; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Properties; import java.util.Set; import org.cytoscape.application.NetworkViewRenderer; import org.cytoscape.application.events.SetCurrentNetworkViewListener; import org.cytoscape.application.swing.CyAction; import org.cytoscape.application.swing.CyEdgeViewContextMenuFactory; import org.cytoscape.application.swing.CyNetworkViewContextMenuFactory; import org.cytoscape.application.swing.CyNodeViewContextMenuFactory; import org.cytoscape.application.swing.CytoPanelComponent; import org.cytoscape.ding.action.GraphicsDetailAction; import org.cytoscape.ding.customgraphics.CustomGraphicsManager; import org.cytoscape.ding.customgraphics.CustomGraphicsTranslator; import org.cytoscape.ding.customgraphics.CyCustomGraphics2Manager; import org.cytoscape.ding.customgraphics.CyCustomGraphics2ManagerImpl; import org.cytoscape.ding.customgraphics.bitmap.URLImageCustomGraphicsFactory; import org.cytoscape.ding.customgraphics.vector.GradientOvalFactory; import org.cytoscape.ding.customgraphics.vector.GradientRoundRectangleFactory; import org.cytoscape.ding.customgraphicsmgr.internal.CustomGraphicsManagerImpl; import org.cytoscape.ding.customgraphicsmgr.internal.action.CustomGraphicsManagerAction; import org.cytoscape.ding.customgraphicsmgr.internal.ui.CustomGraphicsBrowser; import org.cytoscape.ding.debug.DingDebugPanel; import org.cytoscape.ding.dependency.CustomGraphicsSizeDependencyFactory; import org.cytoscape.ding.dependency.EdgeColorDependencyFactory; import org.cytoscape.ding.dependency.NodeSizeDependencyFactory; import org.cytoscape.ding.impl.AddEdgeNodeViewTaskFactoryImpl; import org.cytoscape.ding.impl.BendFactoryImpl; import org.cytoscape.ding.impl.DingGraphLOD; import org.cytoscape.ding.impl.DingNetworkViewFactory; import org.cytoscape.ding.impl.DingRenderer; import org.cytoscape.ding.impl.HandleFactoryImpl; import org.cytoscape.ding.impl.NVLTFActionSupport; import org.cytoscape.ding.impl.ViewTaskFactoryListener; // Annotation creation import org.cytoscape.ding.impl.cyannotator.AnnotationFactoryManager; import org.cytoscape.ding.impl.cyannotator.AnnotationManagerImpl; import org.cytoscape.ding.impl.cyannotator.AnnotationTree.Shift; import org.cytoscape.ding.impl.cyannotator.create.ArrowAnnotationFactory; import org.cytoscape.ding.impl.cyannotator.create.BoundedTextAnnotationFactory; import org.cytoscape.ding.impl.cyannotator.create.GroupAnnotationFactory; import org.cytoscape.ding.impl.cyannotator.create.ImageAnnotationFactory; import org.cytoscape.ding.impl.cyannotator.create.ShapeAnnotationFactory; import org.cytoscape.ding.impl.cyannotator.create.TextAnnotationFactory; // Annotation edits and changes import org.cytoscape.ding.impl.cyannotator.tasks.AddAnnotationTaskFactory; import org.cytoscape.ding.impl.cyannotator.tasks.AddArrowTaskFactory; import org.cytoscape.ding.impl.cyannotator.tasks.EditAnnotationTaskFactory; import org.cytoscape.ding.impl.cyannotator.tasks.GroupAnnotationsTaskFactory; import org.cytoscape.ding.impl.cyannotator.tasks.RemoveAnnotationTaskFactory; import org.cytoscape.ding.impl.cyannotator.tasks.ReorderSelectedAnnotationsTaskFactory; import org.cytoscape.ding.impl.cyannotator.tasks.UngroupAnnotationsTaskFactory; import org.cytoscape.ding.impl.cyannotator.ui.AnnotationMediator; import org.cytoscape.ding.impl.editor.CustomGraphicsVisualPropertyEditor; import org.cytoscape.ding.impl.editor.CyCustomGraphicsValueEditor; import org.cytoscape.ding.impl.editor.EdgeBendEditor; import org.cytoscape.ding.impl.editor.EdgeBendValueEditor; import org.cytoscape.ding.impl.editor.ObjectPositionEditor; import org.cytoscape.ding.internal.charts.bar.BarChartFactory; import org.cytoscape.ding.internal.charts.box.BoxChartFactory; import org.cytoscape.ding.internal.charts.heatmap.HeatMapChartFactory; import org.cytoscape.ding.internal.charts.line.LineChartFactory; import org.cytoscape.ding.internal.charts.pie.PieChartFactory; import org.cytoscape.ding.internal.charts.ring.RingChartFactory; import org.cytoscape.ding.internal.gradients.linear.LinearGradientFactory; import org.cytoscape.ding.internal.gradients.radial.RadialGradientFactory; import org.cytoscape.model.CyNetwork; import org.cytoscape.property.PropertyUpdatedListener; import org.cytoscape.service.util.AbstractCyActivator; import org.cytoscape.service.util.CyServiceRegistrar; import org.cytoscape.task.EdgeViewTaskFactory; import org.cytoscape.task.NetworkViewLocationTaskFactory; import org.cytoscape.task.NetworkViewTaskFactory; import org.cytoscape.task.NodeViewTaskFactory; import org.cytoscape.view.model.CyNetworkViewConfig; import org.cytoscape.view.model.CyNetworkViewFactory; import org.cytoscape.view.model.CyNetworkViewFactoryFactory; import org.cytoscape.view.model.VisualLexicon; import org.cytoscape.view.model.events.NetworkViewAboutToBeDestroyedListener; import org.cytoscape.view.presentation.RenderingEngineFactory; import org.cytoscape.view.presentation.annotations.Annotation; import org.cytoscape.view.presentation.annotations.AnnotationFactory; import org.cytoscape.view.presentation.annotations.AnnotationManager; import org.cytoscape.view.presentation.customgraphics.CyCustomGraphics; import org.cytoscape.view.presentation.customgraphics.CyCustomGraphics2Factory; import org.cytoscape.view.presentation.customgraphics.CyCustomGraphicsFactory; import org.cytoscape.view.presentation.property.values.BendFactory; import org.cytoscape.view.presentation.property.values.HandleFactory; import org.cytoscape.view.vizmap.VisualPropertyDependencyFactory; import org.cytoscape.view.vizmap.gui.editor.ContinuousMappingCellRendererFactory; import org.cytoscape.view.vizmap.gui.editor.ValueEditor; import org.cytoscape.view.vizmap.gui.editor.VisualPropertyEditor; import org.cytoscape.view.vizmap.mappings.ValueTranslator; import org.osgi.framework.BundleContext; /* * #%L * Cytoscape Ding View/Presentation Impl (ding-presentation-impl) * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2006 - 2019 The Cytoscape Consortium * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation, either version 2.1 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Lesser Public License for more details. * * You should have received a copy of the GNU General Lesser Public * License along with this program. If not, see * <http://www.gnu.org/licenses/lgpl-2.1.html>. * #L% */ public class CyActivator extends AbstractCyActivator { // Set to true to enable the Ding Debug panel public static final boolean DEBUG = true; private CustomGraphicsManager cgManager; private CyCustomGraphics2Manager cg2Manager; private CustomGraphicsBrowser cgBrowser; @Override public void start(BundleContext bc) { CyServiceRegistrar serviceRegistrar = getService(bc, CyServiceRegistrar.class); startCustomGraphicsMgr(bc, serviceRegistrar); startCharts(bc, serviceRegistrar); startGradients(bc, serviceRegistrar); startPresentationImpl(bc, serviceRegistrar); if(DEBUG) { DingDebugPanel debugPanel = new DingDebugPanel(serviceRegistrar); registerService(bc, debugPanel, CytoPanelComponent.class); registerService(bc, debugPanel, SetCurrentNetworkViewListener.class); } } private void startPresentationImpl(final BundleContext bc, final CyServiceRegistrar serviceRegistrar) { DVisualLexicon dVisualLexicon = new DVisualLexicon(cgManager); NVLTFActionSupport nvltfActionSupport = new NVLTFActionSupport(serviceRegistrar); ViewTaskFactoryListener vtfListener = new ViewTaskFactoryListener(nvltfActionSupport); registerService(bc, vtfListener, ViewTaskFactoryListener.class); AnnotationFactoryManager annotationFactoryManager = new AnnotationFactoryManager(); AnnotationManager annotationManager = new AnnotationManagerImpl(serviceRegistrar); DingGraphLOD dingGraphLOD = new DingGraphLOD(serviceRegistrar); registerService(bc, dingGraphLOD, PropertyUpdatedListener.class); HandleFactory handleFactory = new HandleFactoryImpl(); registerService(bc, handleFactory, HandleFactory.class); AddEdgeNodeViewTaskFactoryImpl addEdgeNodeViewTaskFactory = new AddEdgeNodeViewTaskFactoryImpl(serviceRegistrar); ContinuousMappingCellRendererFactory continuousMappingCellRendererFactory = getService(bc, ContinuousMappingCellRendererFactory.class); // Object Position Editor ObjectPositionValueEditor objectPositionValueEditor = new ObjectPositionValueEditor(); ObjectPositionEditor objectPositionEditor = new ObjectPositionEditor(objectPositionValueEditor, continuousMappingCellRendererFactory, serviceRegistrar); CyNetworkViewFactoryFactory netViewFactoryFactory = getService(bc, CyNetworkViewFactoryFactory.class); CyNetworkViewConfig viewFactoryConfig = DingNetworkViewFactory.getNetworkViewConfig(netViewFactoryFactory, dVisualLexicon); CyNetworkViewFactory netViewFactory = netViewFactoryFactory.createNetworkViewFactory(dVisualLexicon, DingRenderer.ID, viewFactoryConfig); DingNetworkViewFactory dingNetViewFactory = new DingNetworkViewFactory(netViewFactory, dVisualLexicon, annotationFactoryManager, dingGraphLOD, handleFactory, serviceRegistrar); registerService(bc, dingNetViewFactory, NetworkViewAboutToBeDestroyedListener.class); DingRenderer renderer = new DingRenderer(dingNetViewFactory, dVisualLexicon, serviceRegistrar); registerService(bc, renderer, NetworkViewRenderer.class); registerService(bc, renderer, DingRenderer.class); RenderingEngineFactory<CyNetwork> dingRenderingEngineFactory = renderer.getRenderingEngineFactory(DingRenderer.DEFAULT_CONTEXT); // Edge Bend editor EdgeBendValueEditor edgeBendValueEditor = new EdgeBendValueEditor(dingNetViewFactory, dingRenderingEngineFactory, serviceRegistrar); EdgeBendEditor edgeBendEditor = new EdgeBendEditor(edgeBendValueEditor, continuousMappingCellRendererFactory, serviceRegistrar); Properties dingRenderingEngineFactoryProps = new Properties(); dingRenderingEngineFactoryProps.setProperty(ID, "ding"); registerAllServices(bc, dingRenderingEngineFactory, dingRenderingEngineFactoryProps); // Properties dingNavigationRenderingEngineFactoryProps = new Properties(); // dingNavigationRenderingEngineFactoryProps.setProperty(ID, "dingNavigation"); // registerAllServices(bc, dingNavigationRenderingEngineFactory, dingNavigationRenderingEngineFactoryProps); Properties addEdgeNodeViewTaskFactoryProps = new Properties(); addEdgeNodeViewTaskFactoryProps.setProperty(PREFERRED_ACTION, "Edge"); addEdgeNodeViewTaskFactoryProps.setProperty(PREFERRED_MENU, NODE_ADD_MENU); addEdgeNodeViewTaskFactoryProps.setProperty(TITLE, "Edge"); addEdgeNodeViewTaskFactoryProps.setProperty(MENU_GRAVITY, "0.1"); registerService(bc, addEdgeNodeViewTaskFactory, NodeViewTaskFactory.class, addEdgeNodeViewTaskFactoryProps); Properties dVisualLexiconProps = new Properties(); dVisualLexiconProps.setProperty(ID, "ding"); registerService(bc, dVisualLexicon, VisualLexicon.class, dVisualLexiconProps); final Properties positionEditorProp = new Properties(); positionEditorProp.setProperty(ID, "objectPositionValueEditor"); registerService(bc, objectPositionValueEditor, ValueEditor.class, positionEditorProp); final Properties objectPositionEditorProp = new Properties(); objectPositionEditorProp.setProperty(ID, "objectPositionEditor"); registerService(bc, objectPositionEditor, VisualPropertyEditor.class, objectPositionEditorProp); registerAllServices(bc, edgeBendValueEditor); registerService(bc, edgeBendEditor, VisualPropertyEditor.class); // Annotation Manager registerServiceListener(bc, annotationFactoryManager::addAnnotationFactory, annotationFactoryManager::removeAnnotationFactory, AnnotationFactory.class); registerService(bc, annotationManager, AnnotationManager.class); // Annotations UI AnnotationMediator annotationMediator = new AnnotationMediator(serviceRegistrar); registerServiceListener(bc, annotationMediator::addAnnotationFactory, annotationMediator::removeAnnotationFactory, AnnotationFactory.class); registerAllServices(bc, annotationMediator); // Annotation Factories (the order they are registered is the order they appear in the UI) AnnotationFactory<?> textAnnotationFactory = new TextAnnotationFactory(serviceRegistrar); Properties textFactory = new Properties(); textFactory.setProperty("type","TextAnnotation.class"); registerService(bc, textAnnotationFactory, AnnotationFactory.class, textFactory); AnnotationFactory<?> boundedAnnotationFactory = new BoundedTextAnnotationFactory(serviceRegistrar); Properties boundedFactory = new Properties(); boundedFactory.setProperty("type","BoundedTextAnnotation.class"); registerService(bc, boundedAnnotationFactory, AnnotationFactory.class, boundedFactory); AnnotationFactory<?> shapeAnnotationFactory = new ShapeAnnotationFactory(serviceRegistrar); Properties shapeFactory = new Properties(); shapeFactory.setProperty("type","ShapeAnnotation.class"); registerService(bc, shapeAnnotationFactory, AnnotationFactory.class, shapeFactory); AnnotationFactory<?> imageAnnotationFactory = new ImageAnnotationFactory(serviceRegistrar); Properties imageFactory = new Properties(); imageFactory.setProperty("type","ImageAnnotation.class"); registerService(bc, imageAnnotationFactory, AnnotationFactory.class, imageFactory); AnnotationFactory<?> arrowAnnotationFactory = new ArrowAnnotationFactory(serviceRegistrar); Properties arrowFactory = new Properties(); arrowFactory.setProperty("type","ArrowAnnotation.class"); registerService(bc, arrowAnnotationFactory, AnnotationFactory.class, arrowFactory); AnnotationFactory<?> groupAnnotationFactory = new GroupAnnotationFactory(serviceRegistrar); Properties groupFactory = new Properties(); groupFactory.setProperty("type","GroupAnnotation.class"); registerService(bc, groupAnnotationFactory, AnnotationFactory.class, groupFactory); // Annotation Task Factories AddArrowTaskFactory addArrowTaskFactory = new AddArrowTaskFactory(arrowAnnotationFactory, renderer); Properties addArrowTaskFactoryProps = new Properties(); addArrowTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); addArrowTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_ADD_MENU); addArrowTaskFactoryProps.setProperty(MENU_GRAVITY, "1.2"); addArrowTaskFactoryProps.setProperty(TITLE, "Arrow Annotation..."); registerService(bc, addArrowTaskFactory, NetworkViewLocationTaskFactory.class, addArrowTaskFactoryProps); AddAnnotationTaskFactory addImageTaskFactory = new AddAnnotationTaskFactory(imageAnnotationFactory, renderer); Properties addImageTaskFactoryProps = new Properties(); addImageTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); addImageTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_ADD_MENU); addImageTaskFactoryProps.setProperty(MENU_GRAVITY, "1.3"); addImageTaskFactoryProps.setProperty(TITLE, "Image Annotation..."); registerService(bc, addImageTaskFactory, NetworkViewLocationTaskFactory.class, addImageTaskFactoryProps); AddAnnotationTaskFactory addShapeTaskFactory = new AddAnnotationTaskFactory(shapeAnnotationFactory, renderer); Properties addShapeTaskFactoryProps = new Properties(); addShapeTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); addShapeTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_ADD_MENU); addShapeTaskFactoryProps.setProperty(MENU_GRAVITY, "1.4"); addShapeTaskFactoryProps.setProperty(TITLE, "Shape Annotation..."); registerService(bc, addShapeTaskFactory, NetworkViewLocationTaskFactory.class, addShapeTaskFactoryProps); AddAnnotationTaskFactory addTextTaskFactory = new AddAnnotationTaskFactory(textAnnotationFactory, renderer); Properties addTextTaskFactoryProps = new Properties(); addTextTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); addTextTaskFactoryProps.setProperty(MENU_GRAVITY, "1.5"); addTextTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_ADD_MENU); addTextTaskFactoryProps.setProperty(TITLE, "Text Annotation..."); registerService(bc, addTextTaskFactory, NetworkViewLocationTaskFactory.class, addTextTaskFactoryProps); AddAnnotationTaskFactory addBoundedTextTaskFactory = new AddAnnotationTaskFactory(boundedAnnotationFactory, renderer); Properties addBoundedTextTaskFactoryProps = new Properties(); addBoundedTextTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); addBoundedTextTaskFactoryProps.setProperty(MENU_GRAVITY, "1.6"); addBoundedTextTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_ADD_MENU); addBoundedTextTaskFactoryProps.setProperty(TITLE, "Bounded Text Annotation..."); registerService(bc, addBoundedTextTaskFactory, NetworkViewLocationTaskFactory.class, addBoundedTextTaskFactoryProps); // Annotation edit EditAnnotationTaskFactory editAnnotationTaskFactory = new EditAnnotationTaskFactory(renderer); Properties editAnnotationTaskFactoryProps = new Properties(); editAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); editAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "2.0"); editAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); editAnnotationTaskFactoryProps.setProperty(TITLE, "Modify Annotation..."); editAnnotationTaskFactoryProps.setProperty(INSERT_SEPARATOR_BEFORE, "true"); registerService(bc, editAnnotationTaskFactory, NetworkViewLocationTaskFactory.class, editAnnotationTaskFactoryProps); /* MoveAnnotationTaskFactory moveAnnotationTaskFactory = new MoveAnnotationTaskFactory(); Properties moveAnnotationTaskFactoryProps = new Properties(); moveAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); moveAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "2.1"); moveAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); moveAnnotationTaskFactoryProps.setProperty(TITLE, "Move Annotation"); registerService(bc, moveAnnotationTaskFactory, NetworkViewLocationTaskFactory.class, moveAnnotationTaskFactoryProps); */ // Reorder Selected Annotations - Edit Menu { ReorderSelectedAnnotationsTaskFactory factory = new ReorderSelectedAnnotationsTaskFactory(renderer, Shift.TO_FRONT); Properties props = new Properties(); props.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); props.setProperty(TITLE, "Bring Annotations to Front"); props.setProperty(ACCELERATOR, "shift cmd CLOSE_BRACKET"); props.setProperty(MENU_GRAVITY, "6.1"); props.setProperty(INSERT_SEPARATOR_BEFORE, "true"); registerService(bc, factory, NetworkViewTaskFactory.class, props); } { ReorderSelectedAnnotationsTaskFactory factory = new ReorderSelectedAnnotationsTaskFactory(renderer, Shift.UP_ONE); Properties props = new Properties(); props.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); props.setProperty(TITLE, "Bring Annotations Forward"); props.setProperty(ACCELERATOR, "cmd CLOSE_BRACKET"); props.setProperty(MENU_GRAVITY, "6.2"); registerService(bc, factory, NetworkViewTaskFactory.class, props); } { ReorderSelectedAnnotationsTaskFactory factory = new ReorderSelectedAnnotationsTaskFactory(renderer, Shift.DOWN_ONE); Properties props = new Properties(); props.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); props.setProperty(TITLE, "Send Annotations Backward"); props.setProperty(ACCELERATOR, "cmd OPEN_BRACKET"); props.setProperty(MENU_GRAVITY, "6.3"); registerService(bc, factory, NetworkViewTaskFactory.class, props); } { ReorderSelectedAnnotationsTaskFactory factory = new ReorderSelectedAnnotationsTaskFactory(renderer, Shift.TO_BACK); Properties props = new Properties(); props.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); props.setProperty(TITLE, "Send Annotations to Back"); props.setProperty(ACCELERATOR, "shift cmd OPEN_BRACKET"); props.setProperty(MENU_GRAVITY, "6.4"); props.setProperty(INSERT_SEPARATOR_AFTER, "true"); registerService(bc, factory, NetworkViewTaskFactory.class, props); } { ReorderSelectedAnnotationsTaskFactory factory = new ReorderSelectedAnnotationsTaskFactory(renderer, Annotation.FOREGROUND); Properties props = new Properties(); props.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); props.setProperty(MENU_GRAVITY, "6.5"); props.setProperty(TITLE, "Pull Annotations to Foreground Layer"); registerService(bc, factory, NetworkViewTaskFactory.class, props); } { ReorderSelectedAnnotationsTaskFactory factory = new ReorderSelectedAnnotationsTaskFactory(renderer, Annotation.BACKGROUND); Properties props = new Properties(); props.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); props.setProperty(TITLE, "Push Annotations to Background Layer"); props.setProperty(MENU_GRAVITY, "6.6"); props.setProperty(INSERT_SEPARATOR_AFTER, "true"); registerService(bc, factory, NetworkViewTaskFactory.class, props); } /* ResizeAnnotationTaskFactory resizeAnnotationTaskFactory = new ResizeAnnotationTaskFactory(); Properties resizeAnnotationTaskFactoryProps = new Properties(); resizeAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); resizeAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "2.3"); resizeAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_EDIT_MENU); resizeAnnotationTaskFactoryProps.setProperty(TITLE, "Resize Annotation"); registerService(bc, resizeAnnotationTaskFactory, NetworkViewLocationTaskFactory.class, resizeAnnotationTaskFactoryProps); */ // Annotation delete RemoveAnnotationTaskFactory removeAnnotationTaskFactory = new RemoveAnnotationTaskFactory(renderer); Properties removeAnnotationTaskFactoryProps = new Properties(); removeAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); removeAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "1.1"); removeAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_DELETE_MENU); removeAnnotationTaskFactoryProps.setProperty(TITLE, "Annotation"); registerService(bc, removeAnnotationTaskFactory, NetworkViewLocationTaskFactory.class, removeAnnotationTaskFactoryProps); /* // Annotation select SelectAnnotationTaskFactory selectAnnotationTaskFactory = new SelectAnnotationTaskFactory(); Properties selectAnnotationTaskFactoryProps = new Properties(); selectAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); selectAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "1.1"); selectAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_SELECT_MENU); selectAnnotationTaskFactoryProps.setProperty(TITLE, "Select/Unselect Annotation"); registerService(bc, selectAnnotationTaskFactory, NetworkViewLocationTaskFactory.class, selectAnnotationTaskFactoryProps); */ // Annotation group GroupAnnotationsTaskFactory groupAnnotationTaskFactory = new GroupAnnotationsTaskFactory(renderer); Properties groupAnnotationTaskFactoryProps = new Properties(); groupAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); groupAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "100"); groupAnnotationTaskFactoryProps.setProperty(INSERT_SEPARATOR_BEFORE, "true"); groupAnnotationTaskFactoryProps.setProperty(IN_MENU_BAR, "false"); groupAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_GROUP_MENU); groupAnnotationTaskFactoryProps.setProperty(TITLE, "Group Annotations"); registerService(bc, groupAnnotationTaskFactory, NetworkViewTaskFactory.class, groupAnnotationTaskFactoryProps); // Annotation ungroup UngroupAnnotationsTaskFactory ungroupAnnotationTaskFactory = new UngroupAnnotationsTaskFactory(renderer); Properties ungroupAnnotationTaskFactoryProps = new Properties(); ungroupAnnotationTaskFactoryProps.setProperty(PREFERRED_ACTION, "NEW"); ungroupAnnotationTaskFactoryProps.setProperty(MENU_GRAVITY, "100"); ungroupAnnotationTaskFactoryProps.setProperty(INSERT_SEPARATOR_BEFORE, "true"); ungroupAnnotationTaskFactoryProps.setProperty(PREFERRED_MENU, NETWORK_GROUP_MENU); ungroupAnnotationTaskFactoryProps.setProperty(TITLE, "Ungroup Annotations"); registerService(bc, ungroupAnnotationTaskFactory, NetworkViewLocationTaskFactory.class, ungroupAnnotationTaskFactoryProps); // Set mouse drag selection modes SelectModeAction selectNodesOnlyAction = new SelectModeAction(SelectModeAction.NODES, 0.5f, serviceRegistrar); registerAllServices(bc, selectNodesOnlyAction); SelectModeAction selectEdgesOnlyAction = new SelectModeAction(SelectModeAction.EDGES, 0.6f, serviceRegistrar); registerAllServices(bc, selectEdgesOnlyAction); SelectModeAction selectAnnotationsOnlyAction = new SelectModeAction(SelectModeAction.ANNOTATIONS, 0.7f, serviceRegistrar); registerAllServices(bc, selectAnnotationsOnlyAction); SelectModeAction selectNodesEdgesAction = new SelectModeAction(SelectModeAction.NODES_EDGES, 0.8f, serviceRegistrar); registerAllServices(bc, selectNodesEdgesAction); SelectModeAction selectAllAction = new SelectModeAction(SelectModeAction.ALL, 0.9f, serviceRegistrar); registerAllServices(bc, selectAllAction); { // Toggle Graphics Details ShowGraphicsDetailsTaskFactory factory = new ShowGraphicsDetailsTaskFactory(); Properties props = new Properties(); props.setProperty(ID, "showGraphicsDetailsTaskFactory"); registerService(bc, factory, NetworkViewTaskFactory.class, props); // Used at least by cyREST // Main menu GraphicsDetailAction mainMenuAction = new GraphicsDetailAction(5.0f, "View", factory, serviceRegistrar); registerAllServices(bc, mainMenuAction); } final String vtfFilter = String.format("(| (!(%s=*)) (%s=true))", IN_CONTEXT_MENU, IN_CONTEXT_MENU); // if IN_CONTEXT_MENU is not specified, default to true registerServiceListener(bc, vtfListener::addNodeViewTaskFactory, vtfListener::removeNodeViewTaskFactory, NodeViewTaskFactory.class, vtfFilter); registerServiceListener(bc, vtfListener::addEdgeViewTaskFactory, vtfListener::removeEdgeViewTaskFactory, EdgeViewTaskFactory.class, vtfFilter); registerServiceListener(bc, vtfListener::addNetworkViewTaskFactory, vtfListener::removeNetworkViewTaskFactory, NetworkViewTaskFactory.class, vtfFilter); registerServiceListener(bc, vtfListener::addNetworkViewLocationTaskFactory, vtfListener::removeNetworkViewLocationTaskFactory, NetworkViewLocationTaskFactory.class); registerServiceListener(bc, vtfListener::addCyEdgeViewContextMenuFactory, vtfListener::removeCyEdgeViewContextMenuFactory, CyEdgeViewContextMenuFactory.class); registerServiceListener(bc, vtfListener::addCyNodeViewContextMenuFactory, vtfListener::removeCyNodeViewContextMenuFactory, CyNodeViewContextMenuFactory.class); registerServiceListener(bc, vtfListener::addCyNetworkViewContextMenuFactory, vtfListener::removeCyNetworkViewContextMenuFactory, CyNetworkViewContextMenuFactory.class); registerServiceListener(bc, annotationFactoryManager::addAnnotationFactory, annotationFactoryManager::removeAnnotationFactory, AnnotationFactory.class); BendFactory bendFactory = new BendFactoryImpl(); registerService(bc, bendFactory, BendFactory.class); // Register the factory dVisualLexicon.addBendFactory(bendFactory, new HashMap<Object, Object>()); // Translators for Passthrough final CustomGraphicsTranslator cgTranslator = new CustomGraphicsTranslator(cgManager, cg2Manager); registerService(bc, cgTranslator, ValueTranslator.class); // Factories for Visual Property Dependency final NodeSizeDependencyFactory nodeSizeDependencyFactory = new NodeSizeDependencyFactory(dVisualLexicon); registerService(bc, nodeSizeDependencyFactory, VisualPropertyDependencyFactory.class); final EdgeColorDependencyFactory edgeColorDependencyFactory = new EdgeColorDependencyFactory(dVisualLexicon); registerService(bc, edgeColorDependencyFactory, VisualPropertyDependencyFactory.class); final CustomGraphicsSizeDependencyFactory cgSizeDependencyFactory = new CustomGraphicsSizeDependencyFactory( dVisualLexicon); registerService(bc, cgSizeDependencyFactory, VisualPropertyDependencyFactory.class); // Custom Graphics Editors final CyCustomGraphicsValueEditor cgValueEditor = new CyCustomGraphicsValueEditor(cgManager, cg2Manager, cgBrowser, serviceRegistrar); registerAllServices(bc, cgValueEditor); final CustomGraphicsVisualPropertyEditor cgVisualPropertyEditor = new CustomGraphicsVisualPropertyEditor( CyCustomGraphics.class, cgValueEditor, continuousMappingCellRendererFactory, serviceRegistrar); registerService(bc, cgVisualPropertyEditor, VisualPropertyEditor.class); } private void startCustomGraphicsMgr(final BundleContext bc, final CyServiceRegistrar serviceRegistrar) { cgManager = new CustomGraphicsManagerImpl(getdefaultImageURLs(bc), serviceRegistrar); registerAllServices(bc, cgManager); cgBrowser = new CustomGraphicsBrowser(cgManager); registerAllServices(bc, cgBrowser); CustomGraphicsManagerAction cgManagerAction = new CustomGraphicsManagerAction(cgManager, cgBrowser, serviceRegistrar); registerService(bc, cgManagerAction, CyAction.class); // Create and register our built-in factories. // TODO: When the CustomGraphicsFactory service stuff is set up, just // register these as services URLImageCustomGraphicsFactory imageFactory = new URLImageCustomGraphicsFactory(cgManager); cgManager.addCustomGraphicsFactory(imageFactory, new Properties()); GradientOvalFactory ovalFactory = new GradientOvalFactory(cgManager); cgManager.addCustomGraphicsFactory(ovalFactory, new Properties()); GradientRoundRectangleFactory rectangleFactory = new GradientRoundRectangleFactory(cgManager); cgManager.addCustomGraphicsFactory(rectangleFactory, new Properties()); // Register this service listener so that app writers can provide their own CustomGraphics factories registerServiceListener(bc, cgManager::addCustomGraphicsFactory, cgManager::removeCustomGraphicsFactory, CyCustomGraphicsFactory.class); // Register this service listener so that app writers can provide their own CyCustomGraphics2 factories cg2Manager = CyCustomGraphics2ManagerImpl.getInstance(); registerAllServices(bc, cg2Manager); registerServiceListener(bc, ((CyCustomGraphics2ManagerImpl)cg2Manager)::addFactory, ((CyCustomGraphics2ManagerImpl)cg2Manager)::removeFactory, CyCustomGraphics2Factory.class); } private void startCharts(final BundleContext bc, final CyServiceRegistrar serviceRegistrar) { // Register Chart Factories final Properties factoryProps = new Properties(); factoryProps.setProperty(CyCustomGraphics2Factory.GROUP, CyCustomGraphics2Manager.GROUP_CHARTS); { final BarChartFactory factory = new BarChartFactory(serviceRegistrar); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } { final BoxChartFactory factory = new BoxChartFactory(serviceRegistrar); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } { final PieChartFactory factory = new PieChartFactory(serviceRegistrar); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } { final RingChartFactory factory = new RingChartFactory(serviceRegistrar); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } { final LineChartFactory factory = new LineChartFactory(serviceRegistrar); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } { final HeatMapChartFactory factory = new HeatMapChartFactory(serviceRegistrar); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } } private void startGradients(final BundleContext bc, final CyServiceRegistrar serviceRegistrar) { // Register Gradient Factories final Properties factoryProps = new Properties(); factoryProps.setProperty(CyCustomGraphics2Factory.GROUP, CyCustomGraphics2Manager.GROUP_GRADIENTS); { final LinearGradientFactory factory = new LinearGradientFactory(); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } { final RadialGradientFactory factory = new RadialGradientFactory(); registerService(bc, factory, CyCustomGraphics2Factory.class, factoryProps); } } /** * Get list of default images from resource. */ private Set<URL> getdefaultImageURLs(final BundleContext bc) { Enumeration<URL> e = bc.getBundle().findEntries("images/sampleCustomGraphics", "*.png", true); final Set<URL> defaultImageUrls = new HashSet<>(); while (e.hasMoreElements()) defaultImageUrls.add(e.nextElement()); return defaultImageUrls; } }
turn off ding debug panel
ding-impl/ding-presentation-impl/src/main/java/org/cytoscape/ding/CyActivator.java
turn off ding debug panel
Java
lgpl-2.1
f62100640354d49b066255e1ef93f80b4603c7d1
0
SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,viktorbahr/jaer,viktorbahr/jaer,viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer,viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer,viktorbahr/jaer
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package net.sf.jaer.util.avioutput; import ch.unizh.ini.jaer.projects.davis.frames.*; import com.jogamp.common.nio.Buffers; import eu.seebetter.ini.chips.ApsDvsChip; import java.awt.Desktop; import java.awt.image.BufferedImage; import java.awt.image.DataBufferInt; import java.awt.image.WritableRaster; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Date; import java.util.logging.Level; import java.util.logging.Logger; import javax.media.opengl.GL; import javax.media.opengl.GL2; import javax.media.opengl.GLAutoDrawable; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import javax.swing.filechooser.FileFilter; import net.sf.jaer.Description; import net.sf.jaer.DevelopmentStatus; import net.sf.jaer.chip.AEChip; import net.sf.jaer.event.EventPacket; import net.sf.jaer.eventio.AEInputStream; import static net.sf.jaer.eventprocessing.EventFilter.log; import net.sf.jaer.eventprocessing.EventFilter2D; import net.sf.jaer.eventprocessing.FilterChain; import net.sf.jaer.graphics.FrameAnnotater; /** * Writes AVI file from displayed AEViewer frames, The AVI file is in RAW * format. * * @author Tobi */ @Description("Writes AVI file AEViewer displayed OpenGL graphics") @DevelopmentStatus(DevelopmentStatus.Status.Experimental) public class JaerAviWriter extends EventFilter2D implements FrameAnnotater { AVIOutputStream aviOutputStream = null; private static final String DEFAULT_FILENAME = "jAER-AEViewer.avi"; private String lastFileName = getString("lastFileName", DEFAULT_FILENAME); private int framesWritten = 0; private final int logEveryThisManyFrames = 30; private boolean writeTimecodeFile = getBoolean("writeTimecodeFile", true); private static final String TIMECODE_SUFFIX = "-timecode.txt"; private File timecodeFile = null; private FileWriter timecodeWriter = null; private boolean closeOnRewind = getBoolean("closeOnRewind", true); private boolean propertyChangeListenerAdded = false; private AVIOutputStream.VideoFormat format = AVIOutputStream.VideoFormat.valueOf(getString("format", AVIOutputStream.VideoFormat.RAW.toString())); private int maxFrames = getInt("maxFrames", 0); private float compressionQuality = getFloat("compressionQuality", 0.9f); public JaerAviWriter(AEChip chip) { super(chip); setPropertyTooltip("saveAVIFileAs", "Opens the output file. The AVI file is in RAW format with pixel values 0-255 coming from ApsFrameExtractor displayed frames, which are offset and scaled by it."); setPropertyTooltip("closeFile", "Closes the output file if it is open."); setPropertyTooltip("writeTimecodeFile", "writes a file alongside AVI file (with suffix " + TIMECODE_SUFFIX + ") that maps from AVI frame to AER timestamp for that frame (the frame end timestamp)"); setPropertyTooltip("closeOnRewind", "closes recording on rewind event, to allow unattended operation"); setPropertyTooltip("format", "video file is writtent to this output format (note that RLE will throw exception because OpenGL frames are not 4 or 8 bit images)"); setPropertyTooltip("maxFrames", "file is automatically closed after this many frames have been written; set to 0 to disable"); setPropertyTooltip("framesWritten", "READONLY, shows number of frames written"); setPropertyTooltip("compressionQuality", "In PNG or JPG format, sets compression quality; 0 is lowest quality and 1 is highest, 0.9 is default value"); setPropertyTooltip("showFolderInDesktop", "Opens the folder containging the last-written AVI file"); } @Override synchronized public EventPacket<?> filterPacket(EventPacket<?> in) { if (chip instanceof ApsDvsChip && !propertyChangeListenerAdded) { if (chip.getAeViewer() != null) { propertyChangeListenerAdded = true; } } return in; } @Override public void resetFilter() { } @Override public void initFilter() { } public void doShowFolderInDesktop() { if (!Desktop.isDesktopSupported()) { log.warning("Sorry, desktop operations are not supported"); return; } try { Desktop desktop = Desktop.getDesktop(); File f = new File(lastFileName); if (f.exists()) { desktop.open(f.getParentFile()); } } catch (Exception e) { log.warning(e.toString()); } } synchronized public void doSaveAVIFileAs() { if (aviOutputStream != null) { JOptionPane.showMessageDialog(null, "AVI output stream is already opened"); return; } JFileChooser c = new JFileChooser(lastFileName); c.setFileFilter(new FileFilter() { @Override public boolean accept(File f) { return f.isDirectory() || f.getName().toLowerCase().endsWith(".avi"); } @Override public String getDescription() { return "AVI (Audio Video Interleave) Microsoft video file"; } }); c.setSelectedFile(new File(lastFileName)); int ret = c.showSaveDialog(null); if (ret != JFileChooser.APPROVE_OPTION) { return; } if (!c.getSelectedFile().getName().toLowerCase().endsWith(".avi")) { String newName = c.getSelectedFile().toString() + ".avi"; c.setSelectedFile(new File(newName)); } lastFileName = c.getSelectedFile().toString(); if (c.getSelectedFile().exists()) { int r = JOptionPane.showConfirmDialog(null, "File " + c.getSelectedFile().toString() + " already exists, overwrite it?"); if (r != JOptionPane.OK_OPTION) { return; } } openAVIOutputStream(c.getSelectedFile()); } synchronized public void doCloseFile() { if (aviOutputStream != null) { try { aviOutputStream.close(); aviOutputStream = null; if (timecodeWriter != null) { timecodeWriter.close(); log.info("Closed timecode file " + timecodeFile.toString()); timecodeWriter = null; } log.info("Closed " + lastFileName + " in format " + format + " with " + framesWritten + " frames"); } catch (Exception ex) { log.warning(ex.toString()); ex.printStackTrace(); aviOutputStream = null; } } } private void openAVIOutputStream(File f) { try { aviOutputStream = new AVIOutputStream(f, format); aviOutputStream.setFrameRate(chip.getAeViewer().getFrameRate()); aviOutputStream.setVideoCompressionQuality(compressionQuality); // aviOutputStream.setVideoDimension(chip.getSizeX(), chip.getSizeY()); lastFileName = f.toString(); putString("lastFileName", lastFileName); if (writeTimecodeFile) { String s = f.toString().subSequence(0, f.toString().lastIndexOf(".")).toString() + TIMECODE_SUFFIX; timecodeFile = new File(s); timecodeWriter = new FileWriter(timecodeFile); timecodeWriter.write(String.format("# timecode file relating frames of AVI file to AER timestamps\n")); timecodeWriter.write(String.format("# written %s\n", new Date().toString())); timecodeWriter.write(String.format("# frameNumber timestamp\n")); log.info("Opened timecode file " + timecodeFile.toString()); } log.info("Opened AVI output file " + f.toString() + " with format " + format); framesWritten = 0; getSupport().firePropertyChange("framesWritten", null, framesWritten); } catch (IOException ex) { JOptionPane.showMessageDialog(null, ex.toString(), "Couldn't create output file stream", JOptionPane.WARNING_MESSAGE, null); return; } } /** * @return the writeTimecodeFile */ public boolean isWriteTimecodeFile() { return writeTimecodeFile; } /** * @param writeTimecodeFile the writeTimecodeFile to set */ public void setWriteTimecodeFile(boolean writeTimecodeFile) { this.writeTimecodeFile = writeTimecodeFile; putBoolean("writeTimecodeFile", writeTimecodeFile); } /** * @return the closeOnRewind */ public boolean isCloseOnRewind() { return closeOnRewind; } /** * @param closeOnRewind the closeOnRewind to set */ public void setCloseOnRewind(boolean closeOnRewind) { this.closeOnRewind = closeOnRewind; putBoolean("closeOnRewind", closeOnRewind); } @Override public void annotate(GLAutoDrawable drawable) { if (aviOutputStream == null) { return; } GL2 gl = drawable.getGL().getGL2(); BufferedImage bi = toImage(gl, drawable.getNativeSurface().getSurfaceWidth(), drawable.getNativeSurface().getSurfaceHeight()); try { aviOutputStream.writeFrame(bi); if (timecodeWriter != null) { int timestamp = chip.getAeViewer().getAePlayer().getTime(); timecodeWriter.write(String.format("%d %d\n", framesWritten, timestamp)); } if (++framesWritten % logEveryThisManyFrames == 0) { log.info(String.format("wrote %d frames", framesWritten)); } getSupport().firePropertyChange("framesWritten", null, framesWritten); if (maxFrames > 0 && framesWritten >= maxFrames) { log.info("wrote maxFrames=" + maxFrames + " frames; closing AVI file"); doCloseFile(); } } catch (Exception e) { log.warning("While writing AVI frame, caught exception, closing file: " + e.toString()); doCloseFile(); } } public BufferedImage toImage(GL2 gl, int w, int h) { gl.glReadBuffer(GL.GL_FRONT); // or GL.GL_BACK ByteBuffer glBB = Buffers.newDirectByteBuffer(4 * w * h); gl.glReadPixels(0, 0, w, h, GL2.GL_BGRA, GL.GL_BYTE, glBB); BufferedImage bi = new BufferedImage(w, h, BufferedImage.TYPE_INT_BGR); int[] bd = ((DataBufferInt) bi.getRaster().getDataBuffer()).getData(); for (int y = 0; y < h; y++) { for (int x = 0; x < w; x++) { int b = 2 * glBB.get(); int g = 2 * glBB.get(); int r = 2 * glBB.get(); int a = glBB.get(); // not using bd[(h - y - 1) * w + x] = (b << 16) | (g << 8) | r | 0xFF000000; } } return bi; } /** * @return the format */ public AVIOutputStream.VideoFormat getFormat() { return format; } /** * @param format the format to set */ public void setFormat(AVIOutputStream.VideoFormat format) { this.format = format; putString("format", format.toString()); } /** * @return the maxFrames */ public int getMaxFrames() { return maxFrames; } /** * @param maxFrames the maxFrames to set */ public void setMaxFrames(int maxFrames) { this.maxFrames = maxFrames; putInt("maxFrames", maxFrames); } /** * @return the framesWritten */ public int getFramesWritten() { return framesWritten; } /** * @param framesWritten the framesWritten to set */ public void setFramesWritten(int framesWritten) { // do nothing, only here to expose in GUI } /** * @return the compressionQuality */ public float getCompressionQuality() { return compressionQuality; } /** * @param compressionQuality the compressionQuality to set */ public void setCompressionQuality(float compressionQuality) { if (compressionQuality < 0) { compressionQuality = 0; } else if (compressionQuality > 1) { compressionQuality = 1; } this.compressionQuality = compressionQuality; putFloat("compressionQuality", compressionQuality); } }
src/net/sf/jaer/util/avioutput/JaerAviWriter.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package net.sf.jaer.util.avioutput; import ch.unizh.ini.jaer.projects.davis.frames.*; import com.jogamp.common.nio.Buffers; import eu.seebetter.ini.chips.ApsDvsChip; import java.awt.image.BufferedImage; import java.awt.image.DataBufferInt; import java.awt.image.WritableRaster; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Date; import java.util.logging.Level; import java.util.logging.Logger; import javax.media.opengl.GL; import javax.media.opengl.GL2; import javax.media.opengl.GLAutoDrawable; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import javax.swing.filechooser.FileFilter; import net.sf.jaer.Description; import net.sf.jaer.DevelopmentStatus; import net.sf.jaer.chip.AEChip; import net.sf.jaer.event.EventPacket; import net.sf.jaer.eventio.AEInputStream; import static net.sf.jaer.eventprocessing.EventFilter.log; import net.sf.jaer.eventprocessing.EventFilter2D; import net.sf.jaer.eventprocessing.FilterChain; import net.sf.jaer.graphics.FrameAnnotater; /** * Writes AVI file from displayed AEViewer frames, The AVI file is in RAW * format. * * @author Tobi */ @Description("Writes AVI file AEViewer displayed OpenGL graphics") @DevelopmentStatus(DevelopmentStatus.Status.Experimental) public class JaerAviWriter extends EventFilter2D implements FrameAnnotater { AVIOutputStream aviOutputStream = null; private static final String DEFAULT_FILENAME = "jAER-AEViewer.avi"; private String lastFileName = getString("lastFileName", DEFAULT_FILENAME); private int framesWritten = 0; private final int logEveryThisManyFrames = 30; private boolean writeTimecodeFile = getBoolean("writeTimecodeFile", true); private static final String TIMECODE_SUFFIX = "-timecode.txt"; private File timecodeFile = null; private FileWriter timecodeWriter = null; private boolean closeOnRewind = getBoolean("closeOnRewind", true); private boolean propertyChangeListenerAdded = false; private AVIOutputStream.VideoFormat format=AVIOutputStream.VideoFormat.valueOf(getString("format", AVIOutputStream.VideoFormat.RAW.toString())); private int maxFrames=getInt("maxFrames",0); private float compressionQuality=getFloat("compressionQuality",0.9f); public JaerAviWriter(AEChip chip) { super(chip); setPropertyTooltip("saveAVIFileAs", "Opens the output file. The AVI file is in RAW format with pixel values 0-255 coming from ApsFrameExtractor displayed frames, which are offset and scaled by it."); setPropertyTooltip("closeFile", "Closes the output file if it is open."); setPropertyTooltip("writeTimecodeFile", "writes a file alongside AVI file (with suffix " + TIMECODE_SUFFIX + ") that maps from AVI frame to AER timestamp for that frame (the frame end timestamp)"); setPropertyTooltip("closeOnRewind", "closes recording on rewind event, to allow unattended operation"); setPropertyTooltip("format", "video file is writtent to this output format (note that RLE will throw exception because OpenGL frames are not 4 or 8 bit images)"); setPropertyTooltip("maxFrames", "file is automatically closed after this many frames have been written; set to 0 to disable"); setPropertyTooltip("framesWritten", "READONLY, shows number of frames written"); setPropertyTooltip("compressionQuality", "In PNG or JPG format, sets compression quality; 0 is lowest quality and 1 is highest, 0.9 is default value"); } @Override synchronized public EventPacket<?> filterPacket(EventPacket<?> in) { if (chip instanceof ApsDvsChip && !propertyChangeListenerAdded) { if (chip.getAeViewer() != null) { propertyChangeListenerAdded = true; } } return in; } @Override public void resetFilter() { } @Override public void initFilter() { } synchronized public void doSaveAVIFileAs() { if (aviOutputStream != null) { JOptionPane.showMessageDialog(null, "AVI output stream is already opened"); return; } JFileChooser c = new JFileChooser(lastFileName); c.setFileFilter(new FileFilter() { @Override public boolean accept(File f) { return f.isDirectory() || f.getName().toLowerCase().endsWith(".avi"); } @Override public String getDescription() { return "AVI (Audio Video Interleave) Microsoft video file"; } }); c.setSelectedFile(new File(lastFileName)); int ret = c.showSaveDialog(null); if (ret != JFileChooser.APPROVE_OPTION) { return; } if (!c.getSelectedFile().getName().toLowerCase().endsWith(".avi")) { String newName = c.getSelectedFile().toString() + ".avi"; c.setSelectedFile(new File(newName)); } lastFileName = c.getSelectedFile().toString(); if (c.getSelectedFile().exists()) { int r = JOptionPane.showConfirmDialog(null, "File " + c.getSelectedFile().toString() + " already exists, overwrite it?"); if (r != JOptionPane.OK_OPTION) { return; } } openAVIOutputStream(c.getSelectedFile()); } synchronized public void doCloseFile() { if (aviOutputStream != null) { try { aviOutputStream.close(); aviOutputStream = null; if (timecodeWriter != null) { timecodeWriter.close(); log.info("Closed timecode file "+timecodeFile.toString()); timecodeWriter = null; } log.info("Closed " + lastFileName + " in format "+format+" with " + framesWritten + " frames"); } catch (Exception ex) { log.warning(ex.toString()); ex.printStackTrace(); aviOutputStream=null; } } } private void openAVIOutputStream(File f) { try { aviOutputStream = new AVIOutputStream(f, format); aviOutputStream.setFrameRate(chip.getAeViewer().getFrameRate()); aviOutputStream.setVideoCompressionQuality(compressionQuality); // aviOutputStream.setVideoDimension(chip.getSizeX(), chip.getSizeY()); lastFileName = f.toString(); putString("lastFileName", lastFileName); if (writeTimecodeFile) { String s = f.toString().subSequence(0, f.toString().lastIndexOf(".")).toString() + TIMECODE_SUFFIX; timecodeFile = new File(s); timecodeWriter = new FileWriter(timecodeFile); timecodeWriter.write(String.format("# timecode file relating frames of AVI file to AER timestamps\n")); timecodeWriter.write(String.format("# written %s\n", new Date().toString())); timecodeWriter.write(String.format("# frameNumber timestamp\n")); log.info("Opened timecode file "+timecodeFile.toString()); } log.info("Opened AVI output file " + f.toString()+" with format "+format); framesWritten = 0; getSupport().firePropertyChange("framesWritten", null, framesWritten); } catch (IOException ex) { JOptionPane.showMessageDialog(null, ex.toString(), "Couldn't create output file stream", JOptionPane.WARNING_MESSAGE, null); return; } } /** * @return the writeTimecodeFile */ public boolean isWriteTimecodeFile() { return writeTimecodeFile; } /** * @param writeTimecodeFile the writeTimecodeFile to set */ public void setWriteTimecodeFile(boolean writeTimecodeFile) { this.writeTimecodeFile = writeTimecodeFile; putBoolean("writeTimecodeFile", writeTimecodeFile); } /** * @return the closeOnRewind */ public boolean isCloseOnRewind() { return closeOnRewind; } /** * @param closeOnRewind the closeOnRewind to set */ public void setCloseOnRewind(boolean closeOnRewind) { this.closeOnRewind = closeOnRewind; putBoolean("closeOnRewind", closeOnRewind); } @Override public void annotate(GLAutoDrawable drawable) { if(aviOutputStream==null) return; GL2 gl=drawable.getGL().getGL2(); BufferedImage bi=toImage(gl, drawable.getNativeSurface().getSurfaceWidth(), drawable.getNativeSurface().getSurfaceHeight()); try { aviOutputStream.writeFrame(bi); if (timecodeWriter != null) { int timestamp = chip.getAeViewer().getAePlayer().getTime(); timecodeWriter.write(String.format("%d %d\n", framesWritten, timestamp)); } if (++framesWritten % logEveryThisManyFrames == 0) { log.info(String.format("wrote %d frames", framesWritten)); } getSupport().firePropertyChange("framesWritten", null, framesWritten); if(maxFrames>0 && framesWritten>=maxFrames){ log.info("wrote maxFrames="+maxFrames+" frames; closing AVI file"); doCloseFile(); } } catch (Exception e) { log.warning("While writing AVI frame, caught exception, closing file: "+e.toString()); doCloseFile(); } } public BufferedImage toImage(GL2 gl, int w, int h) { gl.glReadBuffer(GL.GL_FRONT); // or GL.GL_BACK ByteBuffer glBB = Buffers.newDirectByteBuffer(4 * w * h); gl.glReadPixels(0, 0, w, h, GL2.GL_BGRA, GL.GL_BYTE, glBB); BufferedImage bi = new BufferedImage(w, h, BufferedImage.TYPE_INT_BGR); int[] bd = ((DataBufferInt) bi.getRaster().getDataBuffer()).getData(); for (int y = 0; y < h; y++) { for (int x = 0; x < w; x++) { int b = 2 * glBB.get(); int g = 2 * glBB.get(); int r = 2 * glBB.get(); int a=glBB.get(); // not using bd[(h - y - 1) * w + x] = (b << 16) | (g << 8) | r | 0xFF000000; } } return bi; } /** * @return the format */ public AVIOutputStream.VideoFormat getFormat() { return format; } /** * @param format the format to set */ public void setFormat(AVIOutputStream.VideoFormat format) { this.format = format; putString("format", format.toString()); } /** * @return the maxFrames */ public int getMaxFrames() { return maxFrames; } /** * @param maxFrames the maxFrames to set */ public void setMaxFrames(int maxFrames) { this.maxFrames = maxFrames; putInt("maxFrames",maxFrames); } /** * @return the framesWritten */ public int getFramesWritten() { return framesWritten; } /** * @param framesWritten the framesWritten to set */ public void setFramesWritten(int framesWritten) { // do nothing, only here to expose in GUI } /** * @return the compressionQuality */ public float getCompressionQuality() { return compressionQuality; } /** * @param compressionQuality the compressionQuality to set */ public void setCompressionQuality(float compressionQuality) { if(compressionQuality<0) compressionQuality=0; else if(compressionQuality>1)compressionQuality=1; this.compressionQuality = compressionQuality; putFloat("compressionQuality",compressionQuality); } }
added buttion to open folder where file was written git-svn-id: fe6b3b33f0410f5f719dcd9e0c58b92353e7a5d3@6037 b7f4320f-462c-0410-a916-d9f35bb82d52
src/net/sf/jaer/util/avioutput/JaerAviWriter.java
added buttion to open folder where file was written
Java
apache-2.0
9f012c06602b198d83aa3dedff286a80953538d8
0
budgefeeney/twitter-tools,budgefeeney/twitter-tools,budgefeeney/twitter-tools
package cc.twittertools.spider; import static cc.twittertools.download.AsyncEmbeddedJsonStatusBlockCrawler.CONNECTION_TIMEOUT; import java.io.BufferedWriter; import java.io.IOException; import java.nio.file.Files; import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.Callable; import org.apache.commons.httpclient.Header; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.HttpException; import org.apache.commons.httpclient.methods.GetMethod; import org.apache.commons.httpclient.params.HttpClientParams; import org.apache.commons.lang.StringUtils; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.joda.time.DateTime; import cc.twittertools.post.SavedTweetReader; import cc.twittertools.post.Tweet; import com.google.common.base.Charsets; import com.j256.simplejmx.common.BaseJmxSelfNaming; import com.j256.simplejmx.common.JmxAttributeField; import com.j256.simplejmx.common.JmxOperation; import com.j256.simplejmx.common.JmxResource; import com.j256.simplejmx.common.JmxSelfNaming; /** * Takes a list of users, and one by one downloads their tweets, using * a synchronous HTTP client, with a 2 second delay between requests. * <p> * The individual part refers to the fact that we're only downloading * tweets for one user at a time, and downloading them serially. * <p> * See UserTweetsSpider for the code to set all this in motion. * <p> * We continue downloading tweets until we've accumulated a minimum * number of tweets in total from a minimum number of users (i.e. one * of these minima will likely be exceeded to meet the other). * <p> * Tweets written out by this should be read in using * {@link SavedTweetReader} */ @JmxResource(description = "Category tweet download", domainName = "cc.twittertools.spider", folderNames={ "spiders" }) public class IndividualUserTweetsSpider extends BaseJmxSelfNaming implements JmxSelfNaming, Callable<Integer> { private static final int HTTP_200_OK = 200; private final static Logger LOG = Logger.getLogger(IndividualUserTweetsSpider.class); private final static int MIN_USERS_SPIDERED = 200; private final static int MIN_TWEETS_PER_USER = 1000; private final static int MIN_TWEETS_SPIDERED = MIN_USERS_SPIDERED * MIN_TWEETS_PER_USER; private final static int TIME_LIMIT_MONTHS = 6; private final static int DAYS_PER_MONTH = 31; private final static int AVG_TWEETS_PER_DAY = 50; private final static int ESTIMATED_TWEET_COUNT = AVG_TWEETS_PER_DAY * DAYS_PER_MONTH * TIME_LIMIT_MONTHS; private final static long DOWNLOAD_ALL_AVAILABLE_TWEETS = -1; private final List<String> users; private final HttpClient httpClient; private final TweetsHtmlParser htmlParser; private final TweetsJsonParser jsonParser; private final DateTime oldestTweet; @JmxAttributeField(description = "Actively spidering users", isWritable = false) private boolean running = false; @JmxAttributeField(description = "Finished spidering users", isWritable = false) private boolean completed = false; @JmxAttributeField(description = "Users Processed", isWritable = false) private final Path outputDirectory; @JmxAttributeField(description = "Users Processed", isWritable = false) private final String category; @JmxAttributeField(description = "Paused", isWritable = false) private boolean paused = false; @JmxAttributeField(description = "Users Processed", isWritable = false) private int spideredUsers = 0; @JmxAttributeField(description = "Tweets Downloaded", isWritable = false) private int tweetsDownloaded = 0; @JmxAttributeField(description = "Users in Category", isWritable = false) private int userCount = 0; private final Throttle throttle; private final ProgressMonitor progress; public IndividualUserTweetsSpider(Throttle throttle, ProgressMonitor progress, String category, List<String> users, Path outputDirectory) { super(); this.category = category; this.outputDirectory = outputDirectory; this.users = users; this.httpClient = createHttpClient(); this.htmlParser = new TweetsHtmlParser(); this.jsonParser = new TweetsJsonParser(htmlParser); this.oldestTweet = new DateTime().minusMonths(TIME_LIMIT_MONTHS); this.userCount = users.size(); this.throttle = throttle; this.progress = progress; this.progress.markPending(category); } public synchronized Integer call() { running = true; progress.markActive(category); List<Tweet> aggregateTweets = new ArrayList<>(ESTIMATED_TWEET_COUNT); int page; String responseBody; for (String user : users) { page = 1; try { long lastTweetId = readLastTweetId(user); if (shouldDownloadUsersTweets(user)) break; // We may be paused during working hours to avoid saturating the // network while (paused) wait(); final String pageUrl = "https://twitter.com/" + user; responseBody = makeHttpRequest(pageUrl); List<Tweet> tweets = htmlParser.parse (responseBody); Tweet lastTweet = removeLastAuthoredTweet(user, tweets); // continue reading until we've gone far enough back in time or we've // run out of tweets from the current user. while (! tweets.isEmpty() && lastTweet != null && lastTweet.getLocalTime().isAfter(oldestTweet)) { throttle.pause(); ++page; aggregateTweets.addAll(tweets); LOG.debug("Have accumulated " + aggregateTweets.size() + " tweets for user " + user + " after processing page " + page); responseBody = makeHttpRequest (jsonTweetsUrl(user, lastTweet.getId()), pageUrl); tweets = jsonParser.parse(responseBody); tweets = removeUndesireableTweets(tweets, lastTweetId); if (tweets.size() != UserRanker.STD_TWEETS_PER_PAGE) { LOG.warn ("Only got " + tweets.size() + " tweets for the most recent request for user " + user + " on page " + page + " with ID " + lastTweet.getId()); //System.err.println (resp.get().getResponseBody()); } lastTweet = removeLastAuthoredTweet(user, tweets); if (page % 10 == 0) writeTweets (user, aggregateTweets); } ++page; aggregateTweets.addAll(tweets); writeTweets (user, aggregateTweets); LOG.info("Finished fetching tweets for user " + user); //System.err.println ("Final response body was " + responseBody); } catch (Exception e) { e.printStackTrace(); LOG.error("Error downloading tweets on page " + page + " for user " + user + " : " + e.getMessage(), e); try { writeTweets (user, aggregateTweets); } catch (Exception eio) { LOG.error("Error writing tweets for user " + user + " while recovering from previous error : " + eio.getMessage(), eio); } } finally { ++spideredUsers; tweetsDownloaded += aggregateTweets.size(); aggregateTweets.clear(); } } completed = true; progress.markCompleted(category, tweetsDownloaded); return spideredUsers; } /** * Removes tweets we don't want. In this implementation, this does nothing. * @return */ protected List<Tweet> removeUndesireableTweets(List<Tweet> tweets, long lastTweetId) { return tweets; } /** * Should we download user's tweets or not. In the case of this method this will only return * true if we've failed either to download tweets from the minimum number of users, or failed * to download the minimum number of tweets thus far. */ protected boolean shouldDownloadUsersTweets(String user) { return spideredUsers < MIN_USERS_SPIDERED || tweetsDownloaded < MIN_TWEETS_SPIDERED; } private String makeHttpRequest(String url) throws IOException, HttpException { return makeHttpRequest(url, null); } private String makeHttpRequest(String url, String refUrl) throws IOException, HttpException { String responseBody; GetMethod req = new GetMethod(url); req.addRequestHeader(new Header("Accept-Charset", "utf-8")); req.addRequestHeader(new Header("Accept-Language", "en-US,en;q=0.8")); req.addRequestHeader(new Header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")); req.addRequestHeader(new Header("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_4) AppleWebKit/536.30.1 (KHTML, like Gecko) Version/6.0.5 Safari/536.30.1")); if (! StringUtils.isBlank(refUrl)) req.addRequestHeader(new Header("Referer", refUrl)); req.setFollowRedirects(true); int respStatusCode = httpClient.executeMethod(req); if (respStatusCode != HTTP_200_OK) throw new IOException ("Failed to download page, received HTTP response code " + respStatusCode); responseBody = req.getResponseBodyAsString(); return responseBody; } /** * Working from the final tweet, removes all tweets not authored * by the given user. Then removes one tweet, at the end, authored * by the given user, and returns it. This is to facilitate the * creation of URLs to fetch more tweets. * @param author the author whose tweet we search for at the end * of the given list. * @param tweets the list of tweets that is <strong>TRUNCATED</strong> * by this method * @return the last tweet originally (but no longer) in the list of * tweets to be authored by the given author */ private Tweet removeLastAuthoredTweet(String author, List<Tweet> tweets) { Tweet lastTweet = null; while (! tweets.isEmpty()) { lastTweet = tweets.remove (tweets.size() - 1); if (lastTweet.getAuthor().equals (author)) return lastTweet; } return lastTweet; } /** * Write all the tweets to a file. * @param user the user we're currently considering, determines the * filename * @param tweets the user's tweets. * @throws IOException */ private void writeTweets(String user, List<Tweet> tweets) throws IOException { Path catOutputDir = outputDirectory.resolve(category); if (! Files.exists(catOutputDir)) Files.createDirectories(catOutputDir); Path userOutputPath = catOutputDir.resolve(user); try ( BufferedWriter wtr = Files.newBufferedWriter(userOutputPath, Charsets.UTF_8); ) { for (Tweet tweet : tweets) { wtr.write(tweet.toShortTabDelimString()); } } } /** * If it exists, read the most recent user file, and find the ID of their most recent * tweet. * @throws IOException */ private long readLastTweetId(String user) throws IOException { Path path = newestTweetsFile (user, StandardOpenOption.READ); if (! Files.exists (path)) return DOWNLOAD_ALL_AVAILABLE_TWEETS; try ( SavedTweetReader rdr = new SavedTweetReader (path); ) { if (! rdr.hasNext()) return DOWNLOAD_ALL_AVAILABLE_TWEETS; else return rdr.next().getId(); } } /** * Returns the path from which we can either read the user's most recent * tweets, or to which we can write the tweets we've just downloaded, * the action specified by the third {@link OpenOption} parameter which * can be READ, APPEND, or CREATE. For CREATE we create a new file, based * on the old file name, with a period and then a number appended to the * end of the name. * @param user the user whose tweets are being read or written. * @param outDir where the tweets are stored * @param openOption whether the file should alreadsy exists (READ or * APPEND) or whether we should create a new file (CREATE). New files are * named with number suffixes so they never overwrite exting files. * @return the path to a file containing a users tweets. * @throws IOException */ protected Path newestTweetsFile(String user, StandardOpenOption openOption) throws IOException { Path catOutputDir = outputDirectory.resolve(category); if (! Files.exists(catOutputDir)) Files.createDirectories(catOutputDir); // Iterate until we've found the most recent pre-existing file, // and a suitable path for the next new file to create // In the special case of a first-time write existingUserPath // won't actually exist. Path newUserPath = catOutputDir.resolve(user); Path existingUserPath = null; int i = 0; do { existingUserPath = newUserPath; newUserPath = catOutputDir.resolve (user + '.' + (++i)); } while (Files.exists(newUserPath)); // Return the appropriate file based on the open criteria switch (openOption) { case READ: case APPEND: { return existingUserPath; } case CREATE: { return newUserPath; } default: throw new IllegalArgumentException ("The only open options allowed are READ, APPEND and CREATE as defined in StandardOpenOption. You specified " + openOption); } } private HttpClient createHttpClient() { HttpClientParams params = new HttpClientParams(); params.setConnectionManagerTimeout(CONNECTION_TIMEOUT); params.setSoTimeout(CONNECTION_TIMEOUT); return new HttpClient (params); } @Override public String getJmxNameOfObject() { StringBuilder sb = new StringBuilder (category.length()); for (int i = 0; i < category.length(); i++) { char c = category.charAt(i); if (Character.isJavaIdentifierPart(c)) sb.append(c); } return this.getClass().getSimpleName() + '-' + sb.toString(); } /** * Creates the URL from which the next batch of tweets can be * fetched. The returned results is in JSON. * @param user * @param id * @return */ private final static String jsonTweetsUrl (String user, long id) { final String FMT = "https://twitter.com/i/profiles/show/%1$s/timeline/with_replies?include_available_features=1&include_entities=1&max_id=%2$d"; return String.format(FMT, user, id); } @JmxOperation(description = "Pause this downloader or vice versa") public synchronized void togglePaused() { paused = ! paused; notifyAll(); } public synchronized boolean isPaused() { return paused; } public synchronized void setPaused(boolean paused) { this.paused = paused; notifyAll(); } public synchronized boolean isCompleted() { return spideredUsers == users.size(); } public static void main (String[] args) { BasicConfigurator.configure(); Logger.getRootLogger().setLevel(Level.DEBUG); Path outputDir = Paths.get("/home/bfeeney/Desktop"); IndividualUserTweetsSpider tweetsSpider = new IndividualUserTweetsSpider ( new Throttle(), new ProgressMonitor(), "misc", Collections.singletonList("rtraister"), outputDir ); tweetsSpider.call(); } }
src/main/java/cc/twittertools/spider/IndividualUserTweetsSpider.java
package cc.twittertools.spider; import static cc.twittertools.download.AsyncEmbeddedJsonStatusBlockCrawler.CONNECTION_TIMEOUT; import java.io.BufferedWriter; import java.io.IOException; import java.nio.file.Files; import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.Callable; import org.apache.commons.httpclient.Header; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.HttpException; import org.apache.commons.httpclient.methods.GetMethod; import org.apache.commons.httpclient.params.HttpClientParams; import org.apache.commons.lang.StringUtils; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.joda.time.DateTime; import cc.twittertools.post.SavedTweetReader; import cc.twittertools.post.Tweet; import com.google.common.base.Charsets; import com.j256.simplejmx.common.BaseJmxSelfNaming; import com.j256.simplejmx.common.JmxAttributeField; import com.j256.simplejmx.common.JmxOperation; import com.j256.simplejmx.common.JmxResource; import com.j256.simplejmx.common.JmxSelfNaming; /** * Takes a list of users, and one by one downloads their tweets, using * a synchronous HTTP client, with a 2 second delay between requests. * <p> * The individual part refers to the fact that we're only downloading * tweets for one user at a time, and downloading them serially. * <p> * See UserTweetsSpider for the code to set all this in motion. * <p> * We continue downloading tweets until we've accumulated a minimum * number of tweets in total from a minimum number of users (i.e. one * of these minima will likely be exceeded to meet the other). * <p> * Tweets written out by this should be read in using * {@link SavedTweetReader} */ @JmxResource(description = "Category tweet download", domainName = "cc.twittertools.spider", folderNames={ "spiders" }) public class IndividualUserTweetsSpider extends BaseJmxSelfNaming implements JmxSelfNaming, Callable<Integer> { private static final int HTTP_200_OK = 200; private final static Logger LOG = Logger.getLogger(IndividualUserTweetsSpider.class); private final static int MIN_USERS_SPIDERED = 200; private final static int MIN_TWEETS_PER_USER = 1000; private final static int MIN_TWEETS_SPIDERED = MIN_USERS_SPIDERED * MIN_TWEETS_PER_USER; private final static int TIME_LIMIT_MONTHS = 6; private final static int DAYS_PER_MONTH = 31; private final static int AVG_TWEETS_PER_DAY = 50; private final static int ESTIMATED_TWEET_COUNT = AVG_TWEETS_PER_DAY * DAYS_PER_MONTH * TIME_LIMIT_MONTHS; private final static long DOWNLOAD_ALL_AVAILABLE_TWEETS = -1; private final List<String> users; private final HttpClient httpClient; private final TweetsHtmlParser htmlParser; private final TweetsJsonParser jsonParser; private final DateTime oldestTweet; @JmxAttributeField(description = "Actively spidering users", isWritable = false) private boolean running = false; @JmxAttributeField(description = "Finished spidering users", isWritable = false) private boolean completed = false; @JmxAttributeField(description = "Users Processed", isWritable = false) private final Path outputDirectory; @JmxAttributeField(description = "Users Processed", isWritable = false) private final String category; @JmxAttributeField(description = "Paused", isWritable = false) private boolean paused = false; @JmxAttributeField(description = "Users Processed", isWritable = false) private int spideredUsers = 0; @JmxAttributeField(description = "Tweets Downloaded", isWritable = false) private int tweetsDownloaded = 0; @JmxAttributeField(description = "Users in Category", isWritable = false) private int userCount = 0; private final Throttle throttle; private final ProgressMonitor progress; public IndividualUserTweetsSpider(Throttle throttle, ProgressMonitor progress, String category, List<String> users, Path outputDirectory) { super(); this.category = category; this.outputDirectory = outputDirectory; this.users = users; this.httpClient = createHttpClient(); this.htmlParser = new TweetsHtmlParser(); this.jsonParser = new TweetsJsonParser(htmlParser); this.oldestTweet = new DateTime().minusMonths(TIME_LIMIT_MONTHS); this.userCount = users.size(); this.throttle = throttle; this.progress = progress; this.progress.markPending(category); } public synchronized Integer call() { running = true; progress.markActive(category); List<Tweet> aggregateTweets = new ArrayList<>(ESTIMATED_TWEET_COUNT); int page; String responseBody; for (String user : users) { page = 1; try { long lastTweetId = readLastTweetId(user); if (shouldDownloadUsersTweets(user)) break; // We may be paused during working hours to avoid saturating the // network while (paused) wait(); final String pageUrl = "https://twitter.com/" + user; responseBody = makeHttpRequest(pageUrl); List<Tweet> tweets = htmlParser.parse (responseBody); Tweet lastTweet = removeLastAuthoredTweet(user, tweets); // continue reading until we've gone far enough back in time or we've // run out of tweets from the current user. while (! tweets.isEmpty() && lastTweet != null && lastTweet.getLocalTime().isAfter(oldestTweet)) { throttle.pause(); ++page; aggregateTweets.addAll(tweets); LOG.debug("Have accumulated " + aggregateTweets.size() + " tweets for user " + user + " after processing page " + page); responseBody = makeHttpRequest (jsonTweetsUrl(user, lastTweet.getId()), pageUrl); tweets = jsonParser.parse(responseBody); tweets = removeUndesireableTweets(tweets, lastTweetId); if (tweets.size() != UserRanker.STD_TWEETS_PER_PAGE) { LOG.warn ("Only got " + tweets.size() + " tweets for the most recent request for user " + user + " on page " + page + " with ID " + lastTweet.getId()); //System.err.println (resp.get().getResponseBody()); } lastTweet = removeLastAuthoredTweet(user, tweets); if (page % 10 == 0) writeTweets (user, aggregateTweets); } ++page; aggregateTweets.addAll(tweets); writeTweets (user, aggregateTweets); LOG.info("Finished fetching tweets for user " + user); //System.err.println ("Final response body was " + responseBody); } catch (Exception e) { e.printStackTrace(); LOG.error("Error downloading tweets on page " + page + " for user " + user + " : " + e.getMessage(), e); try { writeTweets (user, aggregateTweets); } catch (Exception eio) { LOG.error("Error writing tweets for user " + user + " while recovering from previous error : " + eio.getMessage(), eio); } } finally { ++spideredUsers; tweetsDownloaded += aggregateTweets.size(); aggregateTweets.clear(); } } completed = true; progress.markCompleted(category, tweetsDownloaded); return spideredUsers; } /** * Removes tweets we don't want. In this implementation, this does nothing. * @return */ protected List<Tweet> removeUndesireableTweets(List<Tweet> tweets, long lastTweetId) { return tweets; } /** * Should we download user's tweets or not. In the case of this method this will only return * true if we've failed either to download tweets from the minimum number of users, or failed * to download the minimum number of tweets thus far. */ protected boolean shouldDownloadUsersTweets(String user) { return spideredUsers < MIN_USERS_SPIDERED || tweetsDownloaded < MIN_TWEETS_SPIDERED; } private String makeHttpRequest(String url) throws IOException, HttpException { return makeHttpRequest(url, null); } private String makeHttpRequest(String url, String refUrl) throws IOException, HttpException { String responseBody; GetMethod req = new GetMethod(url); req.addRequestHeader(new Header("Accept-Charset", "utf-8")); req.addRequestHeader(new Header("Accept-Language", "en-US,en;q=0.8")); req.addRequestHeader(new Header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")); req.addRequestHeader(new Header("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_4) AppleWebKit/536.30.1 (KHTML, like Gecko) Version/6.0.5 Safari/536.30.1")); if (! StringUtils.isBlank(refUrl)) req.addRequestHeader(new Header("Referer", refUrl)); req.setFollowRedirects(true); int respStatusCode = httpClient.executeMethod(req); if (respStatusCode != HTTP_200_OK) throw new IOException ("Failed to download page, received HTTP response code " + respStatusCode); responseBody = req.getResponseBodyAsString(); return responseBody; } /** * Working from the final tweet, removes all tweets not authored * by the given user. Then removes one tweet, at the end, authored * by the given user, and returns it. This is to facilitate the * creation of URLs to fetch more tweets. * @param author the author whose tweet we search for at the end * of the given list. * @param tweets the list of tweets that is <strong>TRUNCATED</strong> * by this method * @return the last tweet originally (but no longer) in the list of * tweets to be authored by the given author */ private Tweet removeLastAuthoredTweet(String author, List<Tweet> tweets) { Tweet lastTweet = null; while (! tweets.isEmpty()) { lastTweet = tweets.remove (tweets.size() - 1); if (lastTweet.getAuthor().equals (author)) return lastTweet; } return lastTweet; } /** * Write all the tweets to a file. * @param user the user we're currently considering, determines the * filename * @param tweets the user's tweets. * @throws IOException */ private void writeTweets(String user, List<Tweet> tweets) throws IOException { Path catOutputDir = outputDirectory.resolve(category); if (! Files.exists(catOutputDir)) Files.createDirectories(catOutputDir); Path userOutputPath = catOutputDir.resolve(user); try ( BufferedWriter wtr = Files.newBufferedWriter(userOutputPath, Charsets.UTF_8); ) { for (Tweet tweet : tweets) { wtr.write(tweet.toShortTabDelimString()); } } } /** * If it exists, read the most recent user file, and find the ID of their most recent * tweet. * @throws IOException */ private long readLastTweetId(String user) throws IOException { Path path = newestTweetsFile (user, StandardOpenOption.READ); if (! Files.exists (path)) return DOWNLOAD_ALL_AVAILABLE_TWEETS; try ( SavedTweetReader rdr = new SavedTweetReader (path); ) { if (! rdr.hasNext()) return DOWNLOAD_ALL_AVAILABLE_TWEETS; else return rdr.next().getId(); } } /** * Returns the path from which we can either read the user's most recent * tweets, or to which we can write the tweets we've just downloaded, * the action specified by the third {@link OpenOption} parameter which * can be READ, APPEND, or CREATE. For CREATE we create a new file, based * on the old file name, with a period and then a number appended to the * end of the name. * @param user the user whose tweets are being read or written. * @param outDir where the tweets are stored * @param openOption whether the file should alreadsy exists (READ or * APPEND) or whether we should create a new file (CREATE). New files are * named with number suffixes so they never overwrite exting files. * @return the path to a file containing a users tweets. * @throws IOException */ private Path newestTweetsFile(String user, StandardOpenOption openOption) throws IOException { Path catOutputDir = outputDirectory.resolve(category); if (! Files.exists(catOutputDir)) Files.createDirectories(catOutputDir); Path userOutputPath = catOutputDir.resolve(user); switch (openOption) { case READ: case APPEND: { // BUG BUG NEED TO ITERATE TO MOST RECENT FILE // NEED TO RETURN NULL IF FILE IS ABSENT q = 3 +1; return userOutputPath; } case CREATE: { int i = 0; while (Files.exists(userOutputPath)) userOutputPath = catOutputDir.resolve (user + '.' + (++i)); return userOutputPath; } default: throw new IllegalArgumentException ("The only open options allowed are READ, APPEND and CREATE as defined in StandardOpenOption. You specified " + openOption); } } private HttpClient createHttpClient() { HttpClientParams params = new HttpClientParams(); params.setConnectionManagerTimeout(CONNECTION_TIMEOUT); params.setSoTimeout(CONNECTION_TIMEOUT); return new HttpClient (params); } @Override public String getJmxNameOfObject() { StringBuilder sb = new StringBuilder (category.length()); for (int i = 0; i < category.length(); i++) { char c = category.charAt(i); if (Character.isJavaIdentifierPart(c)) sb.append(c); } return this.getClass().getSimpleName() + '-' + sb.toString(); } /** * Creates the URL from which the next batch of tweets can be * fetched. The returned results is in JSON. * @param user * @param id * @return */ private final static String jsonTweetsUrl (String user, long id) { final String FMT = "https://twitter.com/i/profiles/show/%1$s/timeline/with_replies?include_available_features=1&include_entities=1&max_id=%2$d"; return String.format(FMT, user, id); } @JmxOperation(description = "Pause this downloader or vice versa") public synchronized void togglePaused() { paused = ! paused; notifyAll(); } public synchronized boolean isPaused() { return paused; } public synchronized void setPaused(boolean paused) { this.paused = paused; notifyAll(); } public synchronized boolean isCompleted() { return spideredUsers == users.size(); } public static void main (String[] args) { BasicConfigurator.configure(); Logger.getRootLogger().setLevel(Level.DEBUG); Path outputDir = Paths.get("/home/bfeeney/Desktop"); IndividualUserTweetsSpider tweetsSpider = new IndividualUserTweetsSpider ( new Throttle(), new ProgressMonitor(), "misc", Collections.singletonList("rtraister"), outputDir ); tweetsSpider.call(); } }
Fixed bug in how we're getting the most recent twitter file (for read we were returning the _oldest_ twitter file)
src/main/java/cc/twittertools/spider/IndividualUserTweetsSpider.java
Fixed bug in how we're getting the most recent twitter file (for read we were returning the _oldest_ twitter file)
Java
apache-2.0
479a3a103042d98dfa5ebf885e40b9447b5f0b85
0
sormuras/beethoven,sormuras/beethoven
/* * Copyright (C) 2016 Christian Stein * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.github.sormuras.beethoven; import java.lang.reflect.Member; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.function.IntPredicate; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; import javax.lang.model.SourceVersion; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.PackageElement; /** * Names are used to refer to entities declared in a program. * * <p>A declared entity is a package, class type (normal or enum), interface type (normal or * annotation type), member (class, interface, field, or method) argument a reference type, type * parameter (argument a class, interface, method or constructor), parameter (to a method, * constructor, or exception handler), or local variable. * * @see <a href="https://docs.oracle.com/javase/specs/jls/se8/html/jls-6.html">JLS 6</a> */ public final class Name { /** Compiled <code>"."</code> pattern used to split canonical package and type names. */ public static final Pattern DOT = Pattern.compile("\\."); /** Cast/convert any object to an instance argument {@link Name}. */ public static Name cast(Object any) { if (any == null) { return null; } if (any instanceof Name) { return (Name) any; } if (any instanceof Class) { return name((Class<?>) any); } if (any instanceof Enum) { return name((Enum<?>) any); } if (any instanceof Member) { return name((Member) any); } if (any instanceof String[]) { return name((String[]) any); } if (any instanceof Collection<?>) { Collection<?> collection = (Collection<?>) any; return name(collection.stream().map(Object::toString).collect(Collectors.toList())); } throw new IllegalArgumentException("Can't cast/convert instance argument " + any.getClass()); } /** Create name instance for the given class instance. */ public static Name name(Class<?> type) { String[] packageNames = DOT.split(type.getName()); // java[.]lang[.]Thread$State String[] identifiers = DOT.split(type.getCanonicalName()); // java[.]lang[.]Thread[.]State return new Name(packageNames.length - 1, Arrays.asList(identifiers)); } /** Create new Name based on the class type and declared member name. */ public static Name name(Class<?> declaringType, String declaredMemberName) { Name declaringName = name(declaringType); List<String> names = new ArrayList<>(declaringName.size + 1); names.addAll(declaringName.identifiers); names.add(declaredMemberName); return new Name(declaringName.packageLevel, names); } /** Create new Name based on type element instance. */ public static Name name(Element element) { List<String> simpleNames = new ArrayList<>(); for (Element e = element; true; e = e.getEnclosingElement()) { if (e.getKind() == ElementKind.PACKAGE) { PackageElement casted = (PackageElement) e; if (casted.isUnnamed()) { return new Name(0, simpleNames); } String[] packageNames = DOT.split(casted.getQualifiedName().toString()); simpleNames.addAll(0, Arrays.asList(packageNames)); return new Name(packageNames.length, simpleNames); } simpleNames.add(0, e.getSimpleName().toString()); } } /** Create name instance for the given enum constant. */ public static Name name(Enum<?> constant) { return name(constant.getDeclaringClass(), constant.name()); } /** * Create name instance for the identifiers. * * <p>The fully qualified class name {@code abc.xyz.Alphabet} can be created by: * * <pre> * name(2, "abc", "xyz", "Alphabet") * </pre> * * @throws AssertionError if any identifier is not a syntactically valid qualified name. */ public static Name name(int packageLevel, List<String> names) { assert packageLevel >= 0 : "Package level must not be < 0, but is " + packageLevel; assert packageLevel <= names.size() : "Package level " + packageLevel + " too high: " + names; assert names.stream().allMatch(SourceVersion::isName) : "Non-name in " + names; return new Name(packageLevel, names); } /** * Create name instance for the identifiers by delegating to {@link #name(int, List)}. * * <p>The package level is determined by the first capital name argument the list. */ public static Name name(List<String> names) { int size = names.size(); IntPredicate uppercase = index -> Character.isUpperCase(names.get(index).codePointAt(0)); int packageLevel = IntStream.range(0, size).filter(uppercase).findFirst().orElse(size); return name(packageLevel, names); } /** Create new Name based on the member instance. */ public static Name name(Member member) { return name(member.getDeclaringClass(), member.getName()); } /** Create name instance for the identifiers by delegating to {@link #name(List)}. */ public static Name name(String... identifiers) { return name(Arrays.asList(identifiers)); } /** Create new Name based on the class type and declared member name. */ public static Name reflect(Class<?> type, String declaredName) { try { Member field = type.getDeclaredField(declaredName); return name(field); } catch (Exception expected) { // fall-through } for (Member method : type.getDeclaredMethods()) { if (method.getName().equals(declaredName)) { return name(method); } } throw new AssertionError( String.format("Member '%s' argument %s not found!", declaredName, type)); } private final String canonical; private final List<String> identifiers; private final int packageLevel; private final String packageName; private final String simpleNames; private final int size; Name(int packageLevel, List<String> identifiers) { assert packageLevel <= identifiers.size() : "package level " + packageLevel + " too high: " + identifiers; this.packageLevel = packageLevel; this.identifiers = List.of(identifiers.toArray(new String[identifiers.size()])); this.size = identifiers.size(); this.canonical = String.join(".", identifiers); this.packageName = String.join(".", identifiers.subList(0, packageLevel)); this.simpleNames = String.join(".", identifiers.subList(packageLevel, size)); } public String canonical() { return canonical; } /** Create new enclosing {@link Name} instance based on this identifiers. */ public Name enclosing() { if (!isEnclosed()) { throw new IllegalStateException(String.format("Not enclosed: '%s'", this)); } int shrunkByOne = size - 1; int newPackageLevel = Math.min(packageLevel, shrunkByOne); return new Name(newPackageLevel, identifiers.subList(0, shrunkByOne)); } @Override public boolean equals(Object other) { if (this == other) { return true; } if (other == null || getClass() != other.getClass()) { return false; } return hashCode() == other.hashCode(); } @Override public int hashCode() { return canonical.hashCode(); } public List<String> identifiers() { return identifiers; } public boolean isEnclosed() { return size > 1; } public boolean isJavaLangObject() { return size == 3 && "java.lang.Object".equals(canonical); } public boolean isJavaLangPackage() { return packageLevel == 2 && "java.lang".equals(packageName); } public String lastName() { return identifiers.get(size - 1); } public String packageName() { return packageName; } public String simpleNames() { return simpleNames; } public int size() { return size; } public String topLevelName() { return identifiers.get(packageLevel); } @Override public String toString() { return String.format("Name{%s/%s}", packageName, simpleNames); } }
src/main/java/com/github/sormuras/beethoven/Name.java
/* * Copyright (C) 2016 Christian Stein * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.github.sormuras.beethoven; import java.lang.reflect.Member; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.function.IntPredicate; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; import javax.lang.model.SourceVersion; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.PackageElement; /** * Names are used to refer to entities declared in a program. * * <p>A declared entity is a package, class type (normal or enum), interface type (normal or * annotation type), member (class, interface, field, or method) argument a reference type, type * parameter (argument a class, interface, method or constructor), parameter (to a method, * constructor, or exception handler), or local variable. * * @see <a href="https://docs.oracle.com/javase/specs/jls/se8/html/jls-6.html">JLS 6</a> */ public final class Name { /** Compiled <code>"."</code> pattern used to split canonical package and type names. */ public static final Pattern DOT = Pattern.compile("\\."); /** Cast/convert any object to an instance argument {@link Name}. */ public static Name cast(Object any) { if (any == null) { return null; } if (any instanceof Name) { return (Name) any; } if (any instanceof Class) { return name((Class<?>) any); } if (any instanceof Enum) { return name((Enum<?>) any); } if (any instanceof Member) { return name((Member) any); } if (any instanceof String[]) { return name((String[]) any); } if (any instanceof Collection<?>) { Collection<?> collection = (Collection<?>) any; return name(collection.stream().map(Object::toString).collect(Collectors.toList())); } throw new IllegalArgumentException("Can't cast/convert instance argument " + any.getClass()); } /** Create name instance for the given class instance. */ public static Name name(Class<?> type) { String[] packageNames = DOT.split(type.getName()); // java[.]lang[.]Thread$State String[] identifiers = DOT.split(type.getCanonicalName()); // java[.]lang[.]Thread[.]State return new Name(packageNames.length - 1, Arrays.asList(identifiers)); } /** Create new Name based on the class type and declared member name. */ public static Name name(Class<?> declaringType, String declaredMemberName) { Name declaringName = name(declaringType); List<String> names = new ArrayList<>(declaringName.size + 1); names.addAll(declaringName.identifiers); names.add(declaredMemberName); return new Name(declaringName.packageLevel, names); } /** Create new Name based on type element instance. */ public static Name name(Element element) { List<String> simpleNames = new ArrayList<>(); for (Element e = element; true; e = e.getEnclosingElement()) { if (e.getKind() == ElementKind.PACKAGE) { PackageElement casted = (PackageElement) e; if (casted.isUnnamed()) { return new Name(0, simpleNames); } String[] packageNames = DOT.split(casted.getQualifiedName().toString()); simpleNames.addAll(0, Arrays.asList(packageNames)); return new Name(packageNames.length, simpleNames); } simpleNames.add(0, e.getSimpleName().toString()); } } /** Create name instance for the given enum constant. */ public static Name name(Enum<?> constant) { return name(constant.getDeclaringClass(), constant.name()); } /** * Create name instance for the identifiers. * * <p>The fully qualified class name {@code abc.xyz.Alphabet} can be created by: * * <pre> * name(2, "abc", "xyz", "Alphabet") * </pre> * * @throws AssertionError if any identifier is not a syntactically valid qualified name. */ public static Name name(int packageLevel, List<String> names) { assert packageLevel >= 0 : "Package level must not be < 0, but is " + packageLevel; assert packageLevel <= names.size() : "Package level " + packageLevel + " too high: " + names; assert names.stream().allMatch(SourceVersion::isName) : "Non-name in " + names; return new Name(packageLevel, names); } /** * Create name instance for the identifiers by delegating to {@link #name(int, List)}. * * <p>The package level is determined by the first capital name argument the list. */ public static Name name(List<String> names) { int size = names.size(); IntPredicate uppercase = index -> Character.isUpperCase(names.get(index).codePointAt(0)); int packageLevel = IntStream.range(0, size).filter(uppercase).findFirst().orElse(size); return name(packageLevel, names); } /** Create new Name based on the member instance. */ public static Name name(Member member) { return name(member.getDeclaringClass(), member.getName()); } /** Create name instance for the identifiers by delegating to {@link #name(List)}. */ public static Name name(String... identifiers) { return name(Arrays.asList(identifiers)); } /** Create new Name based on the class type and declared member name. */ public static Name reflect(Class<?> type, String declaredName) { try { Member field = type.getDeclaredField(declaredName); return name(field); } catch (Exception expected) { // fall-through } for (Member method : type.getDeclaredMethods()) { if (method.getName().equals(declaredName)) { return name(method); } } throw new AssertionError( String.format("Member '%s' argument %s not found!", declaredName, type)); } private final String canonical; private final List<String> identifiers; private final int packageLevel; private final String packageName; private final String simpleNames; private final int size; Name(int packageLevel, List<String> identifiers) { this.packageLevel = packageLevel; this.identifiers = List.of(identifiers.toArray(new String[identifiers.size()])); this.size = identifiers.size(); this.canonical = String.join(".", identifiers); this.packageName = String.join(".", identifiers.subList(0, packageLevel)); this.simpleNames = String.join(".", identifiers.subList(packageLevel, size)); } public String canonical() { return canonical; } /** Create new enclosing {@link Name} instance based on this identifiers. */ public Name enclosing() { if (!isEnclosed()) { throw new IllegalStateException(String.format("Not enclosed: '%s'", this)); } int shrunkByOne = size - 1; int newPackageLevel = Math.min(packageLevel, shrunkByOne); return new Name(newPackageLevel, identifiers.subList(0, shrunkByOne)); } @Override public boolean equals(Object other) { if (this == other) { return true; } if (other == null || getClass() != other.getClass()) { return false; } return hashCode() == other.hashCode(); } @Override public int hashCode() { return canonical.hashCode(); } public List<String> identifiers() { return identifiers; } public boolean isEnclosed() { return size > 1; } public boolean isJavaLangObject() { return size == 3 && "java.lang.Object".equals(canonical); } public boolean isJavaLangPackage() { return packageLevel == 2 && "java.lang".equals(packageName); } public String lastName() { return identifiers.get(size - 1); } public String packageName() { return packageName; } public String simpleNames() { return simpleNames; } public int size() { return size; } public String topLevelName() { return identifiers.get(packageLevel); } @Override public String toString() { return String.format("Name{%s/%s}", packageName, simpleNames); } }
Assertion added.
src/main/java/com/github/sormuras/beethoven/Name.java
Assertion added.
Java
apache-2.0
8e3d29cfe2812803c96485e9a5f699fe567f3d76
0
dsebban/orchestrate-java-client,dsebban/orchestrate-java-client,dsebban/orchestrate-java-client,dsebban/orchestrate-java-client
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.orchestrate.client; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import lombok.extern.slf4j.Slf4j; import org.glassfish.grizzly.Connection; import org.glassfish.grizzly.filterchain.FilterChain; import org.glassfish.grizzly.filterchain.FilterChainBuilder; import org.glassfish.grizzly.filterchain.TransportFilter; import org.glassfish.grizzly.http.*; import org.glassfish.grizzly.http.util.Header; import org.glassfish.grizzly.memory.ByteBufferWrapper; import org.glassfish.grizzly.nio.NIOTransport; import org.glassfish.grizzly.nio.transport.TCPNIOTransportBuilder; import org.glassfish.grizzly.strategies.WorkerThreadIOStrategy; import org.glassfish.grizzly.threadpool.ThreadPoolConfig; import java.io.IOException; import java.net.URI; import java.nio.ByteBuffer; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; /** * A client used to read and write data to the Orchestrate.io service. * * <p>Usage: * <pre> * {@code * Client client = Client.builder("your api key").build(); * * // OR (as a shorthand with default settings): * Client client = new Client("your api key"); * } * </pre> */ @Slf4j public final class Client { /** Initial API; has KV, Events, Search, and early Graph support. */ public static final API V0 = API.v0; /** * The different versions of the Orchestrate.io service. */ private enum API { v0 } /** The builder for this instance of the client. */ private final Builder builder; /** The transport implementation for socket handling. */ private final NIOTransport transport; /** * Create a new {@code client} with the specified {@code apiKey} and default * {@code JacksonMapper}. * * <p>Equivalent to: * <pre> * {@code * Client client = Client.builder("your api key").build(); * } * </pre> * * @param apiKey An API key for the Orchestrate.io service. */ public Client(final String apiKey) { this(builder(apiKey)); } /** * Create a new {@code client} with the specified {@code apiKey} and {@code * objectMapper}. * * @param apiKey An API key for the Orchestrate.io service. * @param objectMapper The Jackson JSON mapper to marshall data with. */ public Client(final String apiKey, final ObjectMapper objectMapper) { this(builder(apiKey).mapper(objectMapper)); } /** * Create a new {@code client} with the specified {@code apiKey} and {@code * mapper}. * * @param apiKey An API key for the Orchestrate.io service. * @param mapper The mapper to marshall data with. */ public Client(final String apiKey, final JacksonMapper mapper) { this(builder(apiKey).mapper(mapper)); } /** * A client configured via the {@code Builder}. * * @param builder The builder used to configure the client. */ private Client(final Builder builder) { assert (builder != null); this.builder = builder; // TODO allow a custom executor service to be provided? final ThreadPoolConfig poolConfig = ThreadPoolConfig.defaultConfig() .setPoolName("OrchestrateClientPool") .setCorePoolSize(builder.poolSize) .setMaxPoolSize(builder.maxPoolSize); // TODO add support for GZip compression // TODO add SSL support final FilterChain filterChain = FilterChainBuilder.stateless() .add(new TransportFilter()) // .add(new IdleTimeoutFilter(timeoutExecutor, 10, TimeUnit.SECONDS)) .add(new HttpClientFilter()) .add(new ClientFilter(builder.host.toString(), builder.apiKey, builder.version.name(), builder.mapper)) .build(); // TODO experiment with the Leader-Follower IOStrategy this.transport = TCPNIOTransportBuilder.newInstance() .setTcpNoDelay(true) .setKeepAlive(true) .setWorkerThreadPoolConfig(poolConfig) .setIOStrategy(WorkerThreadIOStrategy.getInstance()) .setProcessor(filterChain) .build(); } private Future<Connection> newConnection() { try { if (transport.isStopped()) { transport.start(); } return transport.connect(builder.host.getHost(), builder.port); } catch (final Exception e) { throw new ClientException(e); } } /** * Executes the specified {@code deleteOp} on the Orchestrate.io service. * * @param deleteOp The delete operation to execute. * @return A future for the response from this operation. */ public OrchestrateFuture<Boolean> execute(final DeleteOperation deleteOp) { final OrchestrateFuture<Boolean> future = new OrchestrateFutureImpl<Boolean>(deleteOp); String uri = deleteOp.getCollection(); if (deleteOp.hasKey()) { uri = uri.concat("/").concat(deleteOp.getKey()); } final HttpRequestPacket.Builder httpHeaderBuilder = HttpRequestPacket .builder() .method(Method.DELETE) .uri(uri); if (!deleteOp.hasKey()) { httpHeaderBuilder.query("force=true"); } if (deleteOp.hasKey() && deleteOp.hasCurrentRef()) { final String value = "\"".concat(deleteOp.getCurrentRef()).concat("\""); httpHeaderBuilder.header(Header.IfMatch, value); } execute(httpHeaderBuilder.build().httpContentBuilder().build(), future); return future; } /** * Executes the specified {@code eventFetchOp} on the Orchestrate.io service. * * @param eventFetchOp The event fetch operation to execute. * @param <T> The type to deserialize the results to. * @return A future for the response from this operation. */ public <T> OrchestrateFuture<Iterable<Event<T>>> execute(final EventFetchOperation<T> eventFetchOp) { final OrchestrateFuture<Iterable<Event<T>>> future = new OrchestrateFutureImpl<Iterable<Event<T>>>(eventFetchOp); final String uri = eventFetchOp.getCollection() .concat("/") .concat(eventFetchOp.getKey()) .concat("/events/") .concat(eventFetchOp.getType()); final HttpRequestPacket.Builder httpHeaderBuilder = HttpRequestPacket .builder() .method(Method.GET) .uri(uri); String query = null; if (eventFetchOp.hasStart()) { query += "start=" + eventFetchOp.getStart(); } if (eventFetchOp.hasEnd()) { query += "&end=" + eventFetchOp.getEnd(); } httpHeaderBuilder.query(query); execute(httpHeaderBuilder.build().httpContentBuilder().build(), future); return future; } /** * Executes the specified {@code eventStoreOp} on the Orchestrate.io service. * * @param eventStoreOp The event store operation to execute. * @return A future for the response from this operation. */ public OrchestrateFuture<Boolean> execute(final EventStoreOperation eventStoreOp) { final OrchestrateFutureImpl<Boolean> future = new OrchestrateFutureImpl<Boolean>(eventStoreOp); final ObjectMapper mapper = builder.mapper.getMapper(); final byte[] content; try { final Object value = eventStoreOp.getValue(); if (value instanceof String) { content = ((String) value).getBytes(); } else { content = mapper.writeValueAsBytes(value); } } catch (final JsonProcessingException e) { future.setException(e); return future; } final String uri = eventStoreOp.getCollection() .concat("/") .concat(eventStoreOp.getKey()) .concat("/events/") .concat(eventStoreOp.getType()); final HttpRequestPacket.Builder httpHeaderBuilder = HttpRequestPacket .builder() .method(Method.PUT) .contentType("application/json") .uri(uri); if (eventStoreOp.hasTimestamp()) { httpHeaderBuilder.query("timestamp=" + eventStoreOp.getTimestamp()); } httpHeaderBuilder.contentLength(content.length); final HttpContent httpContent = httpHeaderBuilder.build() .httpContentBuilder() .content(new ByteBufferWrapper(ByteBuffer.wrap(content))) .build(); execute(httpContent, future); return future; } /** * Executes the specified {@code kvFetchOp} on the Orchestrate.io service. * * @param kvFetchOp The KV fetch operation to execute. * @param <T> The type to deserialize the results to. * @return The future for the response from this operation. */ public <T> OrchestrateFuture<KvObject<T>> execute(final KvFetchOperation<T> kvFetchOp) { final OrchestrateFuture<KvObject<T>> future = new OrchestrateFutureImpl<KvObject<T>>(kvFetchOp); String uri = kvFetchOp.getCollection() .concat("/") .concat(kvFetchOp.getKey()); if (kvFetchOp.hasRef()) { uri = uri.concat("/refs/").concat(kvFetchOp.getRef()); } final HttpRequestPacket httpPacket = HttpRequestPacket .builder() .method(Method.GET) .uri(uri) .build(); execute(httpPacket.httpContentBuilder().build(), future); return future; } /** * Executes the specified {@code kvStoreOp} on the Orchestrate.io service. * * @param kvStoreOp The KV store operation to execute. * @return The future for the response from this operation. */ public OrchestrateFuture<KvMetadata> execute(final KvStoreOperation kvStoreOp) { final OrchestrateFutureImpl<KvMetadata> future = new OrchestrateFutureImpl<KvMetadata>(kvStoreOp); final ObjectMapper mapper = builder.mapper.getMapper(); final byte[] content; try { final Object value = kvStoreOp.getValue(); if (value instanceof String) { content = ((String) value).getBytes(); } else { content = mapper.writeValueAsBytes(value); } } catch (final JsonProcessingException e) { future.setException(e); return future; } final String uri = kvStoreOp.getCollection() .concat("/") .concat(kvStoreOp.getKey()); final HttpRequestPacket.Builder httpHeaderBuilder = HttpRequestPacket .builder() .method(Method.PUT) .contentType("application/json") .uri(uri); if (kvStoreOp.hasCurrentRef()) { final String ref = "\"".concat(kvStoreOp.getCurrentRef()).concat("\""); httpHeaderBuilder.header(Header.IfMatch, ref); } else if (kvStoreOp.hasIfAbsent()) { httpHeaderBuilder.header(Header.IfNoneMatch, "\"*\""); } httpHeaderBuilder.contentLength(content.length); final HttpContent httpContent = httpHeaderBuilder.build() .httpContentBuilder() .content(new ByteBufferWrapper(ByteBuffer.wrap(content))) .build(); execute(httpContent, future); return future; } /** * Executes the specified {@code relationFetchOp} on the Orchestrate.io * service. * * @param relationFetchOp The relation fetch operation to execute. * @return The future for the response from this operation. */ public OrchestrateFuture<Iterable<KvObject<String>>> execute( final RelationFetchOperation relationFetchOp) { final OrchestrateFuture<Iterable<KvObject<String>>> future = new OrchestrateFutureImpl<Iterable<KvObject<String>>>(relationFetchOp); String uri = relationFetchOp.getCollection() .concat("/") .concat(relationFetchOp.getKey()) .concat("/relations"); for (final String kind : relationFetchOp.getKinds()) { uri = uri.concat("/").concat(kind); } final HttpRequestPacket httpPacket = HttpRequestPacket .builder() .method(Method.GET) .uri(uri) .build(); execute(httpPacket.httpContentBuilder().build(), future); return future; } /** * Executes the specified {@code relationStoreOp} on the Orchestrate.io * service. * * @param relationStoreOp The relation store operation to execute. * @return The future for the response from this operation. */ public OrchestrateFuture<Boolean> execute(final RelationStoreOperation relationStoreOp) { final OrchestrateFuture<Boolean> future = new OrchestrateFutureImpl<Boolean>(relationStoreOp); final String uri = relationStoreOp.getCollection() .concat("/") .concat(relationStoreOp.getKey()) .concat("/relation/") .concat(relationStoreOp.getKind()) .concat("/") .concat(relationStoreOp.getToCollection()) .concat("/") .concat(relationStoreOp.getToKey()); final HttpRequestPacket httpPacket = HttpRequestPacket .builder() .method(Method.PUT) .uri(uri) .build(); execute(httpPacket.httpContentBuilder().build(), future); return future; } /** * Executes the specified {@code searchOp} on the Orchestrate.io service. * * @param searchOp The search operation to execute. * @param <T> The type to deserialize the results to. * @return The future for the response from this operation. */ public <T> OrchestrateFuture<SearchResults<T>> execute(final SearchOperation<T> searchOp) { final OrchestrateFuture<SearchResults<T>> future = new OrchestrateFutureImpl<SearchResults<T>>(searchOp); final String query = "query=".concat(searchOp.getQuery()) .concat("&limit=").concat(searchOp.getLimit() + "") .concat("&offset=").concat(searchOp.getOffset() + ""); final HttpRequestPacket httpPacket = HttpRequestPacket .builder() .method(Method.GET) .uri(searchOp.getCollection()) .query(query) .build(); execute(httpPacket.httpContentBuilder().build(), future); return future; } @SuppressWarnings("unchecked") private <T> void execute(final HttpContent httpPacket, final OrchestrateFuture<T> future) { final Connection<?> connection; try { final Future<Connection> connectionFuture = newConnection(); connection = connectionFuture.get(5, TimeUnit.SECONDS); log.info("{}", connection); } catch (final Exception e) { throw new ClientException(e); } // TODO abort the future early if the write fails connection.getAttributes().setAttribute(ClientFilter.HTTP_RESPONSE_ATTR, future); connection.write(httpPacket); } /** * Stops the thread pool and closes all connections in use by all the * operations. * * @throws IOException If resources couldn't be stopped. */ public void stop() throws IOException { if (transport != null && !transport.isStopped()) { transport.shutdownNow(); } } /** * A new builder to create a {@code Client} with default settings. * * @param apiKey An API key for the Orchestrate.io service. * @return A new {@code Builder} with default settings. */ public static Builder builder(final String apiKey) { if (apiKey == null) { throw new IllegalArgumentException("'apiKey' cannot be null."); } if (apiKey.length() < 1) { throw new IllegalArgumentException("'apiKey' cannot be empty."); } if (apiKey.length() != 36) { final String message = "'apiKey' is invalid. " + "Currently the Orchestrate.io service uses 36 character keys."; throw new IllegalArgumentException(message); } return new Builder(apiKey); } /** * Builder used to create {@code Client} instances. * * <p>Usage: * <pre> * {@code * Client client = Client.builder("your api key") * .host("https://api.orchestrate.io") // optional * .port(80) // optional * .version(Client.V0) // optional * .poolSize(0) // optional * .maxPoolSize(15) // optional * .build(); * } * </pre> */ public static final class Builder { /** The default host for the Orchestrate.io service. */ public static final String DEFAULT_HOST = "https://api.orchestrate.io"; /** The default port for the Orchestrate.io service. */ public static final int DEFAULT_PORT = 80; /** An API key for the Orchestrate.io service. */ private final String apiKey; /** The host for the Orchestrate.io service. */ private URI host; /** The port for the Orchestrate.io service. */ private int port; /** The version of the Orchestrate API to use. */ private API version; /** The number of threads to use with the client. */ private int poolSize; /** The maximum size of the thread pool to use with the client. */ private int maxPoolSize; /** The configured JSON mapper. */ private JacksonMapper mapper; private Builder(final String apiKey) { assert (apiKey != null); assert (apiKey.length() == 36); this.apiKey = apiKey; host(DEFAULT_HOST); port(DEFAULT_PORT); version(Client.V0); poolSize(Runtime.getRuntime().availableProcessors()); maxPoolSize(Integer.MAX_VALUE); mapper(JacksonMapper.builder()); } /** * Set the hostname for the Orchestrate.io service, defaults to {@code * Builder.DEFAULT_HOST}. * * @param host The hostname for the Orchestrate.io service. * @return This builder. * @see Builder#DEFAULT_HOST */ public Builder host(final String host) { if (host == null) { throw new IllegalArgumentException("'host' cannot be null."); } if (host.length() < 1) { throw new IllegalArgumentException("'host' cannot be empty."); } this.host = URI.create(host); return this; } /** * Set the port for the Orchestrate.io service, defaults to {@code * Builder.DEFAULT_PORT}. * * @param port The port for the Orchestrate.io service. * @return This builder. * @see Builder#DEFAULT_PORT */ public Builder port(final int port) { if (port < 1 || port > 65535) { throw new IllegalArgumentException("'port' must be between 1 and 65535."); } this.port = port; return this; } /** * The version of the API to use with the Orchestrate.io service, * defaults to the latest and greatest version of the API. * * @param version The version of the Orchestrate.io service to use, e.g. * {@code Client.V0}. * @return This builder. */ public Builder version(final API version) { if (version == null) { throw new IllegalArgumentException("'version' cannot be null."); } this.version = version; return this; } /** * The initial number of threads to use with the client, defaults to * {@link Runtime#availableProcessors()}. * * @param poolSize The size of the thread pool to start with. * @return This builder. */ public Builder poolSize(final int poolSize) { if (poolSize < 0) { throw new IllegalArgumentException("'poolSize' cannot be negative."); } this.poolSize = poolSize; return this; } /** * The maximum number of threads to use with the client, defaults to * {@link Integer#MAX_VALUE}. * * @param maxPoolSize The maximum size to grow the thread pool to. * @return This builder. */ public Builder maxPoolSize(final int maxPoolSize) { if (maxPoolSize < 1) { throw new IllegalArgumentException("'maxPoolSize' cannot be smaller than one."); } this.maxPoolSize = maxPoolSize; return this; } /** * The Jackson JSON {@code ObjectMapper} to use when marshalling data to * and from the service, defaults to {@link io.orchestrate.client.JacksonMapper#builder()}. * * @param objectMapper A Jackson JSON {@code ObjectMapper}. * @return This builder. */ public Builder mapper(final ObjectMapper objectMapper) { if (objectMapper == null) { throw new IllegalArgumentException("'objectMapper' cannot be null."); } return mapper(JacksonMapper.builder(objectMapper)); } /** * A {@code Builder} used to build the {@code JacksonMapper} to use when * marshalling data to and from the service. * * @param mapperBuilder A {@code JacksonMapper.Builder}. * @return This builder. */ public Builder mapper(final JacksonMapper.Builder mapperBuilder) { if (mapperBuilder == null) { throw new IllegalArgumentException("'mapperBuilder' cannot be null."); } return mapper(mapperBuilder.build()); } /** * A {@code JacksonMapper} to use when marshalling data to and from the * service. * * @param mapper A {@code JacksonMapper}. * @return This builder. */ public Builder mapper(final JacksonMapper mapper) { if (mapper == null) { throw new IllegalArgumentException("'mapper' cannot be null."); } this.mapper = mapper; return this; } /** * Creates a new {@code Client}. * * @return A new {@link Client}. */ public Client build() { return new Client(this); } } }
src/main/java/io/orchestrate/client/Client.java
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.orchestrate.client; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import lombok.extern.slf4j.Slf4j; import org.glassfish.grizzly.Connection; import org.glassfish.grizzly.filterchain.FilterChain; import org.glassfish.grizzly.filterchain.FilterChainBuilder; import org.glassfish.grizzly.filterchain.TransportFilter; import org.glassfish.grizzly.http.*; import org.glassfish.grizzly.http.util.Header; import org.glassfish.grizzly.memory.ByteBufferWrapper; import org.glassfish.grizzly.nio.NIOTransport; import org.glassfish.grizzly.nio.transport.TCPNIOTransportBuilder; import org.glassfish.grizzly.strategies.WorkerThreadIOStrategy; import org.glassfish.grizzly.threadpool.ThreadPoolConfig; import java.io.IOException; import java.net.URI; import java.nio.ByteBuffer; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; /** * A client used to read and write data to the Orchestrate.io service. * * <p>Usage: * <pre> * {@code * Client client = Client.builder("your api key").build(); * * // OR (as a shorthand with default settings): * Client client = new Client("your api key"); * } * </pre> */ @Slf4j public final class Client { /** Initial API; has KV, Events, Search, and early Graph support. */ public static final API V0 = API.v0; /** * The different versions of the Orchestrate.io service. */ private enum API { v0 } /** The builder for this instance of the client. */ private final Builder builder; /** The transport implementation for socket handling. */ private final NIOTransport transport; /** * Create a new {@code client} with the specified {@code apiKey} and default * {@code JacksonMapper}. * * <p>Equivalent to: * <pre> * {@code * Client client = Client.builder("your api key").build(); * } * </pre> * * @param apiKey An API key for the Orchestrate.io service. */ public Client(final String apiKey) { this(builder(apiKey)); } /** * Create a new {@code client} with the specified {@code apiKey} and {@code * objectMapper}. * * @param apiKey An API key for the Orchestrate.io service. * @param objectMapper The Jackson JSON mapper to marshall data with. */ public Client(final String apiKey, final ObjectMapper objectMapper) { this(builder(apiKey).mapper(objectMapper)); } /** * Create a new {@code client} with the specified {@code apiKey} and {@code * mapper}. * * @param apiKey An API key for the Orchestrate.io service. * @param mapper The mapper to marshall data with. */ public Client(final String apiKey, final JacksonMapper mapper) { this(builder(apiKey).mapper(mapper)); } /** * A client configured via the {@code Builder}. * * @param builder The builder used to configure the client. */ private Client(final Builder builder) { assert (builder != null); this.builder = builder; // TODO allow a custom executor service to be provided? final ThreadPoolConfig poolConfig = ThreadPoolConfig.defaultConfig() .setPoolName("OrchestrateClientPool") .setCorePoolSize(builder.poolSize) .setMaxPoolSize(builder.maxPoolSize); // TODO add support for GZip compression // TODO add SSL support final FilterChain filterChain = FilterChainBuilder.stateless() .add(new TransportFilter()) // .add(new IdleTimeoutFilter(timeoutExecutor, 10, TimeUnit.SECONDS)) .add(new HttpClientFilter()) .add(new ClientFilter(builder.host.toString(), builder.apiKey, builder.version.name(), builder.mapper)) .build(); // TODO experiment with the Leader-Follower IOStrategy this.transport = TCPNIOTransportBuilder.newInstance() .setTcpNoDelay(true) .setKeepAlive(true) .setWorkerThreadPoolConfig(poolConfig) .setIOStrategy(WorkerThreadIOStrategy.getInstance()) .setProcessor(filterChain) .build(); } private Future<Connection> newConnection() { try { if (transport.isStopped()) { transport.start(); } return transport.connect(builder.host.getHost(), builder.port); } catch (final Exception e) { throw new ClientException(e); } } /** * Executes the specified {@code deleteOp} on the Orchestrate.io service. * * @param deleteOp The delete operation to execute. * @return A future for the response from this operation. */ public OrchestrateFuture<Boolean> execute(final DeleteOperation deleteOp) { final OrchestrateFuture<Boolean> future = new OrchestrateFutureImpl<Boolean>(deleteOp); String uri = deleteOp.getCollection(); if (deleteOp.hasKey()) { uri = uri.concat("/").concat(deleteOp.getKey()); } final HttpRequestPacket.Builder httpHeaderBuilder = HttpRequestPacket .builder() .method(Method.DELETE) .uri(uri); if (!deleteOp.hasKey()) { httpHeaderBuilder.query("force=true"); } if (deleteOp.hasKey() && deleteOp.hasCurrentRef()) { final String value = "\"".concat(deleteOp.getCurrentRef()).concat("\""); httpHeaderBuilder.header(Header.IfMatch, value); } execute(httpHeaderBuilder.build().httpContentBuilder().build(), future); return future; } /** * Executes the specified {@code eventFetchOp} on the Orchestrate.io service. * * @param eventFetchOp The event fetch operation to execute. * @param <T> The type to deserialize the results to. * @return A future for the response from this operation. */ public <T> OrchestrateFuture<Iterable<Event<T>>> execute(final EventFetchOperation<T> eventFetchOp) { final OrchestrateFuture<Iterable<Event<T>>> future = new OrchestrateFutureImpl<Iterable<Event<T>>>(eventFetchOp); final String uri = eventFetchOp.getCollection() .concat("/") .concat(eventFetchOp.getKey()) .concat("/events/") .concat(eventFetchOp.getType()); final HttpRequestPacket.Builder httpHeaderBuilder = HttpRequestPacket .builder() .method(Method.GET) .uri(uri); String query = null; if (eventFetchOp.hasStart()) { query += "start=" + eventFetchOp.getStart(); } if (eventFetchOp.hasEnd()) { query += "&end=" + eventFetchOp.getEnd(); } httpHeaderBuilder.query(query); execute(httpHeaderBuilder.build().httpContentBuilder().build(), future); return future; } /** * Executes the specified {@code eventStoreOp} on the Orchestrate.io service. * * @param eventStoreOp The event store operation to execute. * @return A future for the response from this operation. */ public OrchestrateFuture<Boolean> execute(final EventStoreOperation eventStoreOp) { final OrchestrateFutureImpl<Boolean> future = new OrchestrateFutureImpl<Boolean>(eventStoreOp); final ObjectMapper mapper = builder.mapper.getMapper(); final byte[] content; try { final Object value = eventStoreOp.getValue(); if (value instanceof String) { content = ((String) value).getBytes(); } else { content = mapper.writeValueAsBytes(value); } } catch (final JsonProcessingException e) { future.setException(e); return future; } final String uri = eventStoreOp.getCollection() .concat("/") .concat(eventStoreOp.getKey()) .concat("/events/") .concat(eventStoreOp.getType()); final HttpRequestPacket.Builder httpHeaderBuilder = HttpRequestPacket .builder() .method(Method.PUT) .contentType("application/json") .uri(uri); if (eventStoreOp.hasTimestamp()) { httpHeaderBuilder.query("timestamp=" + eventStoreOp.getTimestamp()); } httpHeaderBuilder.contentLength(content.length); final HttpContent httpContent = httpHeaderBuilder.build() .httpContentBuilder() .content(new ByteBufferWrapper(ByteBuffer.wrap(content))) .build(); execute(httpContent, future); return future; } /** * Executes the specified {@code kvFetchOp} on the Orchestrate.io service. * * @param kvFetchOp The KV fetch operation to execute. * @param <T> The type to deserialize the results to. * @return The future for the response from this operation. */ public <T> OrchestrateFuture<KvObject<T>> execute(final KvFetchOperation<T> kvFetchOp) { final OrchestrateFuture<KvObject<T>> future = new OrchestrateFutureImpl<KvObject<T>>(kvFetchOp); String uri = kvFetchOp.getCollection() .concat("/") .concat(kvFetchOp.getKey()); if (kvFetchOp.hasRef()) { uri = uri.concat("/refs/").concat(kvFetchOp.getRef()); } final HttpRequestPacket httpPacket = HttpRequestPacket .builder() .method(Method.GET) .uri(uri) .build(); execute(httpPacket.httpContentBuilder().build(), future); return future; } /** * Executes the specified {@code kvStoreOp} on the Orchestrate.io service. * * @param kvStoreOp The KV store operation to execute. * @return The future for the response from this operation. */ public OrchestrateFuture<KvMetadata> execute(final KvStoreOperation kvStoreOp) { final OrchestrateFutureImpl<KvMetadata> future = new OrchestrateFutureImpl<KvMetadata>(kvStoreOp); final ObjectMapper mapper = builder.mapper.getMapper(); final byte[] content; try { final Object value = kvStoreOp.getValue(); if (value instanceof String) { content = ((String) value).getBytes(); } else { content = mapper.writeValueAsBytes(value); } } catch (final JsonProcessingException e) { future.setException(e); return future; } final String uri = kvStoreOp.getCollection() .concat("/") .concat(kvStoreOp.getKey()); final HttpRequestPacket.Builder httpHeaderBuilder = HttpRequestPacket .builder() .method(Method.PUT) .contentType("application/json") .uri(uri); if (kvStoreOp.hasCurrentRef()) { final String ref = "\"".concat(kvStoreOp.getCurrentRef()).concat("\""); httpHeaderBuilder.header(Header.IfMatch, ref); } else if (kvStoreOp.hasIfAbsent()) { httpHeaderBuilder.header(Header.IfNoneMatch, "\"*\""); } httpHeaderBuilder.contentLength(content.length); final HttpContent httpContent = httpHeaderBuilder.build() .httpContentBuilder() .content(new ByteBufferWrapper(ByteBuffer.wrap(content))) .build(); execute(httpContent, future); return future; } /** * Executes the specified {@code relationFetchOp} on the Orchestrate.io * service. * * @param relationFetchOp The relation fetch operation to execute. * @return The future for the response from this operation. */ public OrchestrateFuture<Iterable<KvObject<String>>> execute( final RelationFetchOperation relationFetchOp) { final OrchestrateFuture<Iterable<KvObject<String>>> future = new OrchestrateFutureImpl<Iterable<KvObject<String>>>(relationFetchOp); String uri = relationFetchOp.getCollection() .concat("/") .concat(relationFetchOp.getKey()) .concat("/relations"); for (final String kind : relationFetchOp.getKinds()) { uri = uri.concat("/").concat(kind); } final HttpRequestPacket httpPacket = HttpRequestPacket .builder() .method(Method.GET) .uri(uri) .build(); execute(httpPacket.httpContentBuilder().build(), future); return future; } /** * Executes the specified {@code relationStoreOp} on the Orchestrate.io * service. * * @param relationStoreOp The relation store operation to execute. * @return The future for the response from this operation. */ public OrchestrateFuture<Boolean> execute(final RelationStoreOperation relationStoreOp) { final OrchestrateFuture<Boolean> future = new OrchestrateFutureImpl<Boolean>(relationStoreOp); final String uri = relationStoreOp.getCollection() .concat("/") .concat(relationStoreOp.getKey()) .concat("/relation/") .concat(relationStoreOp.getKind()) .concat("/") .concat(relationStoreOp.getToCollection()) .concat("/") .concat(relationStoreOp.getToKey()); final HttpRequestPacket httpPacket = HttpRequestPacket .builder() .method(Method.PUT) .uri(uri) .build(); execute(httpPacket.httpContentBuilder().build(), future); return future; } /** * Executes the specified {@code searchOp} on the Orchestrate.io service. * * @param searchOp The search operation to execute. * @param <T> The type to deserialize the results to. * @return The future for the response from this operation. */ public <T> OrchestrateFuture<SearchResults<T>> execute(final SearchOperation<T> searchOp) { final OrchestrateFuture<SearchResults<T>> future = new OrchestrateFutureImpl<SearchResults<T>>(searchOp); final String query = "query=".concat(searchOp.getQuery()) .concat("&limit=").concat(searchOp.getLimit() + "") .concat("&offset=").concat(searchOp.getOffset() + ""); final HttpRequestPacket httpPacket = HttpRequestPacket .builder() .method(Method.GET) .uri(searchOp.getCollection()) .query(query) .build(); execute(httpPacket.httpContentBuilder().build(), future); return future; } @SuppressWarnings("unchecked") private <T> void execute(final HttpContent httpPacket, final OrchestrateFuture<T> future) { final Connection connection; try { final Future<Connection> connectionFuture = newConnection(); connection = connectionFuture.get(5, TimeUnit.SECONDS); log.info("{}", connection); } catch (final Exception e) { throw new ClientException(e); } // TODO abort the future early if the write fails connection.getAttributes().setAttribute(ClientFilter.HTTP_RESPONSE_ATTR, future); connection.write(httpPacket); } /** * Stops the thread pool and closes all connections in use by all the * operations. * * @throws IOException If resources couldn't be stopped. */ public void stop() throws IOException { if (transport != null && !transport.isStopped()) { transport.shutdownNow(); } } /** * A new builder to create a {@code Client} with default settings. * * @param apiKey An API key for the Orchestrate.io service. * @return A new {@code Builder} with default settings. */ public static Builder builder(final String apiKey) { if (apiKey == null) { throw new IllegalArgumentException("'apiKey' cannot be null."); } if (apiKey.length() < 1) { throw new IllegalArgumentException("'apiKey' cannot be empty."); } if (apiKey.length() != 36) { final String message = "'apiKey' is invalid. " + "Currently the Orchestrate.io service uses 36 character keys."; throw new IllegalArgumentException(message); } return new Builder(apiKey); } /** * Builder used to create {@code Client} instances. * * <p>Usage: * <pre> * {@code * Client client = Client.builder("your api key") * .host("https://api.orchestrate.io") // optional * .port(80) // optional * .version(Client.V0) // optional * .poolSize(0) // optional * .maxPoolSize(15) // optional * .build(); * } * </pre> */ public static final class Builder { /** The default host for the Orchestrate.io service. */ public static final String DEFAULT_HOST = "https://api.orchestrate.io"; /** The default port for the Orchestrate.io service. */ public static final int DEFAULT_PORT = 80; /** An API key for the Orchestrate.io service. */ private final String apiKey; /** The host for the Orchestrate.io service. */ private URI host; /** The port for the Orchestrate.io service. */ private int port; /** The version of the Orchestrate API to use. */ private API version; /** The number of threads to use with the client. */ private int poolSize; /** The maximum size of the thread pool to use with the client. */ private int maxPoolSize; /** The configured JSON mapper. */ private JacksonMapper mapper; private Builder(final String apiKey) { assert (apiKey != null); assert (apiKey.length() == 36); this.apiKey = apiKey; host(DEFAULT_HOST); port(DEFAULT_PORT); version(Client.V0); poolSize(Runtime.getRuntime().availableProcessors()); maxPoolSize(Integer.MAX_VALUE); mapper(JacksonMapper.builder()); } /** * Set the hostname for the Orchestrate.io service, defaults to {@code * Builder.DEFAULT_HOST}. * * @param host The hostname for the Orchestrate.io service. * @return This builder. * @see Builder#DEFAULT_HOST */ public Builder host(final String host) { if (host == null) { throw new IllegalArgumentException("'host' cannot be null."); } if (host.length() < 1) { throw new IllegalArgumentException("'host' cannot be empty."); } this.host = URI.create(host); return this; } /** * Set the port for the Orchestrate.io service, defaults to {@code * Builder.DEFAULT_PORT}. * * @param port The port for the Orchestrate.io service. * @return This builder. * @see Builder#DEFAULT_PORT */ public Builder port(final int port) { if (port < 1 || port > 65535) { throw new IllegalArgumentException("'port' must be between 1 and 65535."); } this.port = port; return this; } /** * The version of the API to use with the Orchestrate.io service, * defaults to the latest and greatest version of the API. * * @param version The version of the Orchestrate.io service to use, e.g. * {@code Client.V0}. * @return This builder. */ public Builder version(final API version) { if (version == null) { throw new IllegalArgumentException("'version' cannot be null."); } this.version = version; return this; } /** * The initial number of threads to use with the client, defaults to * {@link Runtime#availableProcessors()}. * * @param poolSize The size of the thread pool to start with. * @return This builder. */ public Builder poolSize(final int poolSize) { if (poolSize < 0) { throw new IllegalArgumentException("'poolSize' cannot be negative."); } this.poolSize = poolSize; return this; } /** * The maximum number of threads to use with the client, defaults to * {@link Integer#MAX_VALUE}. * * @param maxPoolSize The maximum size to grow the thread pool to. * @return This builder. */ public Builder maxPoolSize(final int maxPoolSize) { if (maxPoolSize < 1) { throw new IllegalArgumentException("'maxPoolSize' cannot be smaller than one."); } this.maxPoolSize = maxPoolSize; return this; } /** * The Jackson JSON {@code ObjectMapper} to use when marshalling data to * and from the service, defaults to {@link io.orchestrate.client.JacksonMapper#builder()}. * * @param objectMapper A Jackson JSON {@code ObjectMapper}. * @return This builder. */ public Builder mapper(final ObjectMapper objectMapper) { if (objectMapper == null) { throw new IllegalArgumentException("'objectMapper' cannot be null."); } return mapper(JacksonMapper.builder(objectMapper)); } /** * A {@code Builder} used to build the {@code JacksonMapper} to use when * marshalling data to and from the service. * * @param mapperBuilder A {@code JacksonMapper.Builder}. * @return This builder. */ public Builder mapper(final JacksonMapper.Builder mapperBuilder) { if (mapperBuilder == null) { throw new IllegalArgumentException("'mapperBuilder' cannot be null."); } return mapper(mapperBuilder.build()); } /** * A {@code JacksonMapper} to use when marshalling data to and from the * service. * * @param mapper A {@code JacksonMapper}. * @return This builder. */ public Builder mapper(final JacksonMapper mapper) { if (mapper == null) { throw new IllegalArgumentException("'mapper' cannot be null."); } this.mapper = mapper; return this; } /** * Creates a new {@code Client}. * * @return A new {@link Client}. */ public Client build() { return new Client(this); } } }
Make java generics happy
src/main/java/io/orchestrate/client/Client.java
Make java generics happy
Java
apache-2.0
8c7d0eb203e080390a14adc4fea6a27817509b02
0
SpineEventEngine/gae-java,SpineEventEngine/gae-java
/* * Copyright 2020, TeamDev. All rights reserved. * * Redistribution and use in source and/or binary forms, with or without * modification, must retain the above copyright notice and the following * disclaimer. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.spine.server.storage.datastore; import com.google.cloud.datastore.BaseEntity; import com.google.cloud.datastore.Datastore; import com.google.cloud.datastore.DatastoreException; import com.google.cloud.datastore.DatastoreReader; import com.google.cloud.datastore.DatastoreReaderWriter; import com.google.cloud.datastore.DatastoreWriter; import com.google.cloud.datastore.Entity; import com.google.cloud.datastore.FullEntity; import com.google.cloud.datastore.Key; import com.google.cloud.datastore.KeyFactory; import com.google.cloud.datastore.Query; import com.google.cloud.datastore.StructuredQuery; import com.google.cloud.datastore.Transaction; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Supplier; import com.google.common.collect.Streams; import io.spine.logging.Logging; import io.spine.server.storage.datastore.tenant.Namespace; import io.spine.server.storage.datastore.tenant.NamespaceSupplier; import org.checkerframework.checker.nullness.qual.Nullable; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Iterables.toArray; import static com.google.common.collect.Iterators.concat; import static com.google.common.collect.Iterators.unmodifiableIterator; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Lists.newLinkedList; import static com.google.common.collect.Streams.stream; import static java.lang.Math.min; import static java.util.Collections.emptyIterator; import static java.util.stream.Collectors.toList; /** * Adapts {@link Datastore} API for being used for storages. */ @SuppressWarnings("ClassWithTooManyMethods") public class DatastoreWrapper implements Logging { private static final String ACTIVE_TRANSACTION_CONDITION_MESSAGE = "Transaction should be active."; private static final String NOT_ACTIVE_TRANSACTION_CONDITION_MESSAGE = "Transaction should NOT be active."; private static final int MAX_KEYS_PER_READ_REQUEST = 1000; static final int MAX_ENTITIES_PER_WRITE_REQUEST = 500; private static final Map<DatastoreKind, KeyFactory> keyFactories = new HashMap<>(); private static final Key[] EMPTY_KEY_ARRAY = new Key[0]; private final NamespaceSupplier namespaceSupplier; private final Datastore datastore; private Transaction activeTransaction; private DatastoreReaderWriter actor; /** * Creates a new instance of {@code DatastoreWrapper}. * * @param datastore * {@link Datastore} to wrap * @param supplier * an instance of {@link Supplier Supplier&lt;Namespace&gt;} to get the namespaces for * the queries from the datastore */ protected DatastoreWrapper(Datastore datastore, NamespaceSupplier supplier) { this.namespaceSupplier = checkNotNull(supplier); this.datastore = checkNotNull(datastore); this.actor = datastore; } /** * Shortcut method for calling the constructor. */ static DatastoreWrapper wrap(Datastore datastore, NamespaceSupplier supplier) { return new DatastoreWrapper(datastore, supplier); } /** * Creates an instance of {@link com.google.cloud.datastore.Key} basing on the Datastore * entity {@code kind} and {@code recordId}. * * @param kind * the kind of the Datastore entity * @param recordId * the ID of the record * @return the Datastore {@code Key} instance */ Key keyFor(Kind kind, RecordId recordId) { KeyFactory keyFactory = keyFactory(kind); Key key = keyFactory.newKey(recordId.getValue()); return key; } /** * Writes new {@link Entity} into the Datastore. * * @param entity * new {@link Entity} to put into the Datastore * @throws DatastoreException * upon failure * @see DatastoreWriter#add(FullEntity) */ public void create(Entity entity) throws DatastoreException { actor.add(entity); } /** * Modifies an {@link Entity} in the Datastore. * * @param entity * the {@link Entity} to update * @throws DatastoreException * if the {@link Entity} with such {@link Key} does not exist * @see DatastoreWriter#update(Entity...) */ public void update(Entity entity) throws DatastoreException { actor.update(entity); } /** * Writes an {@link Entity} to the Datastore or modifies an existing one. * * @param entity * the {@link Entity} to write or update * @see DatastoreWrapper#create(Entity) * @see DatastoreWrapper#update(Entity) */ public void createOrUpdate(Entity entity) { actor.put(entity); } /** * Writes the {@link Entity entities} to the Datastore or modifies the existing ones. * * @param entities * the {@link Entity Entities} to write or update * @see DatastoreWrapper#createOrUpdate(Entity) */ public void createOrUpdate(Entity... entities) { if (entities.length <= MAX_ENTITIES_PER_WRITE_REQUEST) { writeSmallBulk(entities); } else { writeBulk(entities); } } /** * Writes the {@link Entity entities} to the Datastore or modifies the existing ones. * * @param entities * a {@link Collection} of {@link Entity Entities} to write or update * @see DatastoreWrapper#createOrUpdate(Entity) */ public void createOrUpdate(Collection<Entity> entities) { Entity[] array = new Entity[entities.size()]; entities.toArray(array); createOrUpdate(array); } /** * Retrieves an {@link Entity} with the given key from the Datastore. * * @param key * {@link Key} to search for * @return the {@link Entity} or {@code null} in case of no results for the key given * @see DatastoreReader#get(Key) */ public @Nullable Entity read(Key key) { return actor.get(key); } /** * Retrieves an {@link Entity} for each of the given keys. * * <p>The resulting {@code Iterator} is evaluated lazily. A call to * {@link Iterator#remove() Iterator.remove()} causes an {@link UnsupportedOperationException}. * * <p>The results are returned in an order matching that of the provided keys * with {@code null}s in place of missing and inactive entities. * * @param keys * {@link Key Keys} to search for * @return an {@code Iterator} over the found entities in the order of keys * (including {@code null} values for nonexistent keys) * @see DatastoreReader#get(Key...) */ public Iterator<@Nullable Entity> read(Iterable<Key> keys) { Iterator<@Nullable Entity> dsIterator = readByKeys(keys); Iterator<@Nullable Entity> result = orderByKeys(keys, dsIterator); return unmodifiableIterator(result); } private Iterator<@Nullable Entity> readByKeys(Iterable<Key> keys) { List<Key> keysList = newLinkedList(keys); return keysList.size() <= MAX_KEYS_PER_READ_REQUEST ? actor.get(toArray(keys, Key.class)) : readBulk(keysList); } private static Iterator<@Nullable Entity> orderByKeys(Iterable<Key> keys, Iterator<Entity> items) { List<Entity> entities = newLinkedList(() -> items); Iterator<Entity> entitiesIterator = stream(keys) .map(key -> getEntityOrNull(key, entities.iterator())) .iterator(); return entitiesIterator; } private static @Nullable Entity getEntityOrNull(Key key, Iterator<Entity> entities) { while (entities.hasNext()) { Entity entity = entities.next(); if (key.equals(entity.getKey())) { entities.remove(); return entity; } } return null; } /** * Queries the Datastore with the given arguments. * * <p>The Datastore may return a partial result set, so an execution of this method may * result in several Datastore queries. * * <p>The limit included in the {@link StructuredQuery}, will be a maximum count of objects * in the returned iterator. * * <p>The returned {@link DsQueryIterator} allows to {@linkplain DsQueryIterator#nextPageQuery() * create a query} to the next page of results reusing an existing cursor. * * <p>The resulting {@code Iterator} is evaluated lazily. A call to * {@link Iterator#remove() Iterator.remove()} causes an {@link UnsupportedOperationException}. * * @param query * {@link Query} to execute upon the Datastore * @param <R> * the type of queried objects * @return results fo the query as a lazily evaluated {@link Iterator} * @see DatastoreReader#run(Query) */ public <R> DsQueryIterator<R> read(StructuredQuery<R> query) { Namespace namespace = namespaceSupplier.get(); StructuredQuery<R> queryWithNamespace = query.toBuilder() .setNamespace(namespace.getValue()) .build(); _trace().log("Reading entities of `%s` kind in `%s` namespace.", query.getKind(), namespace.getValue()); DsQueryIterator<R> result = new DsQueryIterator<>(queryWithNamespace, actor); return result; } /** * Queries the Datastore for all entities matching query. * * <p>Read is performed from datastore using batches of the specified size, which leads to * multiple queries being executed. * * <p>The resulting {@code Iterator} is evaluated lazily. A call to * {@link Iterator#remove() Iterator.remove()} causes an {@link UnsupportedOperationException}. * * @param query * {@link Query} to execute upon the Datastore * @param pageSize * a non-zero number of elements to be returned per a single read from Datastore * @param <R> * the type of queried objects * @return results fo the query as a lazily evaluated {@link Iterator} * @throws IllegalArgumentException * if the provided {@linkplain StructuredQuery#getLimit() query includes a limit} */ public <R> Iterator<R> readAll(StructuredQuery<R> query, int pageSize) { return readAllPageByPage(query, pageSize); } /** * Queries the Datastore for all entities matching query. * * <p>Read is performed in batches until all of the matching entities are fetched, resulting * in multiple Datastore queries. * * <p>The resulting {@code Iterator} is evaluated lazily. A call to * {@link Iterator#remove() Iterator.remove()} causes an {@link UnsupportedOperationException}. * * @param query * {@link Query} to execute upon the Datastore * @param <R> * the type of queried objects * @return results fo the query as a lazily evaluated {@link Iterator} * @throws IllegalArgumentException * if the provided {@linkplain StructuredQuery#getLimit() query includes a limit} */ public <R> Iterator<R> readAll(StructuredQuery<R> query) { return readAllPageByPage(query, null); } /** * Queries the Datastore for all entities matching query, executing queries split in batches. * * <p>Read is performed from datastore using batches of the specified size, which leads to * multiple queries being executed. * * <p>The resulting {@code Iterator} is evaluated lazily. A call to * {@link Iterator#remove() Iterator.remove()} causes an {@link UnsupportedOperationException}. * * @param query * a {@link Query} to execute upon the Datastore * @param pageSize * a non-zero number of elements to be returned per a single read from Datastore; * if {@code null} the page size will be dictated by the Datastore * @param <R> * the type of queried objects * @return results fo the query as a lazily evaluated {@link Iterator} * @throws IllegalArgumentException * if the provided {@linkplain StructuredQuery#getLimit() query includes a limit} or * the provided {@code batchSize} is 0 */ @SuppressWarnings("unchecked") // Checked logically. private <R> Iterator<R> readAllPageByPage(StructuredQuery<R> query, @Nullable Integer pageSize) { checkArgument(query.getLimit() == null, "Cannot limit a number of entities for \"read all\" operation."); checkArgument(pageSize == null || pageSize != 0, "The size of a single read operation cannot be 0."); StructuredQuery<R> limitedQuery = limit(query, pageSize); return stream(new DsQueryPageIterator<>(limitedQuery, this)) .flatMap(Streams::stream) .iterator(); } private static <R> StructuredQuery<R> limit(StructuredQuery<R> query, @Nullable Integer batchSize) { return batchSize == null ? query : query.toBuilder() .setLimit(batchSize) .build(); } /** * Deletes all existing {@link Entity Entities} with the given keys. * * @param keys * {@link Key Keys} of the {@link Entity Entities} to delete. May be nonexistent */ public void delete(Key... keys) { actor.delete(keys); } /** * Deletes all existing {@link Entities} of a kind given. * * @param table * kind (a.k.a. type, table, etc.) of the records to delete */ @VisibleForTesting protected void dropTable(String table) { Namespace namespace = namespaceSupplier.get(); StructuredQuery<Entity> query = Query.newEntityQueryBuilder() .setNamespace(namespace.getValue()) .setKind(table) .build(); _trace().log("Deleting all entities of `%s` kind in `%s` namespace.", table, namespace.getValue()); Iterator<Entity> queryResult = read(query); List<Entity> entities = newArrayList(queryResult); deleteEntities(entities); } @VisibleForTesting protected void deleteEntities(Collection<Entity> entities) { List<Key> keyList = entities.stream() .map(BaseEntity::getKey) .collect(toList()); Key[] keys = new Key[keyList.size()]; keyList.toArray(keys); deleteEntities(keys); } private void deleteEntities(Key[] keys) { if (keys.length > MAX_ENTITIES_PER_WRITE_REQUEST) { int start = 0; int end = MAX_ENTITIES_PER_WRITE_REQUEST; while (true) { int length = end - start; if (length <= 0) { return; } Key[] keysSubarray = new Key[length]; System.arraycopy(keys, start, keysSubarray, 0, keysSubarray.length); delete(keysSubarray); start = end; end = min(MAX_ENTITIES_PER_WRITE_REQUEST, keys.length - end); } } else { delete(keys); } } /** * Starts a new database transaction. * * @return the new transaction * @see TransactionWrapper */ public final TransactionWrapper newTransaction() { Transaction tx = datastore.newTransaction(); return new TransactionWrapper(tx); } /** * Starts a transaction. * * <p>After this method is called, all {@code Entity} modifications performed through this * instance of {@code DatastoreWrapper} become transactional. This behaviour lasts until either * {@link #commitTransaction()} or {@link #rollbackTransaction()} is called. * * @throws IllegalStateException * if a transaction is already started on this instance of * {@code DatastoreWrapper} * @see #isTransactionActive() * @deprecated Use {@link #newTransaction()} instead. */ @Deprecated public void startTransaction() throws IllegalStateException { checkState(!isTransactionActive(), NOT_ACTIVE_TRANSACTION_CONDITION_MESSAGE); activeTransaction = datastore.newTransaction(); actor = activeTransaction; } /** * Commits a transaction. * * <p>Upon the method call, all the modifications within the active transaction are applied. * * <p>All next operations become non-transactional until {@link #startTransaction()} is called. * * @throws IllegalStateException * if no transaction is started on this instance of * {@code DatastoreWrapper} * @see #isTransactionActive() * @deprecated Use {@link #newTransaction()} instead. */ @Deprecated public void commitTransaction() throws IllegalStateException { checkState(isTransactionActive(), ACTIVE_TRANSACTION_CONDITION_MESSAGE); activeTransaction.commit(); this.actor = datastore; } /** * Rollbacks a transaction. * * <p>Upon the method call, all the modifications within the active transaction * canceled permanently. * * <p>After this method execution is over, all the further modifications made through * the current instance of {@code DatastoreWrapper} become non-transactional. * * @throws IllegalStateException * if no transaction is active for the current * instance of {@code DatastoreWrapper} * @see #isTransactionActive() * @deprecated Use {@link #newTransaction()} instead. */ @Deprecated public void rollbackTransaction() throws IllegalStateException { checkState(isTransactionActive(), ACTIVE_TRANSACTION_CONDITION_MESSAGE); activeTransaction.rollback(); this.actor = datastore; } /** * Checks whether there is an active transaction on this instance of {@code DatastoreWrapper}. * * @return {@code true} if there is an active transaction, {@code false} otherwise * @deprecated Use {@link #newTransaction()} instead. */ @Deprecated public boolean isTransactionActive() { return activeTransaction != null && activeTransaction.isActive(); } /** * Retrieves an instance of {@link KeyFactory} unique for given Kind of data * regarding the current namespace. * * @param kind * kind of {@link Entity} to generate keys for * @return an instance of {@link KeyFactory} for given kind */ public KeyFactory keyFactory(Kind kind) { DatastoreKind datastoreKind = new DatastoreKind(projectId(), kind); KeyFactory keyFactory = keyFactories.get(datastoreKind); if (keyFactory == null) { keyFactory = initKeyFactory(kind); } Namespace namespace = namespaceSupplier.get(); _trace().log("Retrieving KeyFactory for kind `%s` in `%s` namespace.", kind, namespace.getValue()); keyFactory.setNamespace(namespace.getValue()); return keyFactory; } @VisibleForTesting public Datastore datastore() { return datastore; } private KeyFactory initKeyFactory(Kind kind) { KeyFactory keyFactory = datastore.newKeyFactory() .setKind(kind.value()); DatastoreKind datastoreKind = new DatastoreKind(projectId(), kind); keyFactories.put(datastoreKind, keyFactory); return keyFactory; } private ProjectId projectId() { String projectId = datastore.getOptions() .getProjectId(); ProjectId result = ProjectId.of(projectId); return result; } /** * Reads big number of records. * * <p>Google App Engine Datastore has a limitation on the amount of entities queried with a * single call — 1000 entities per query. To deal with this limitation we read the entities in * pagination fashion 1000 entity per page. * * @param keys * {@link Key keys} to find the entities for * @return ordered sequence of {@link Entity entities} * @see #read(Iterable) */ private Iterator<Entity> readBulk(List<Key> keys) { int pageCount = keys.size() / MAX_KEYS_PER_READ_REQUEST + 1; _trace().log("Reading a big bulk of entities synchronously. The data is read as %d pages.", pageCount); int lowerBound = 0; int higherBound = MAX_KEYS_PER_READ_REQUEST; int keysLeft = keys.size(); Iterator<Entity> result = emptyIterator(); for (int i = 0; i < pageCount; i++) { List<Key> keysPage = keys.subList(lowerBound, higherBound); Iterator<Entity> page = actor.get(keysPage.toArray(EMPTY_KEY_ARRAY)); result = concat(result, page); keysLeft -= keysPage.size(); lowerBound = higherBound; higherBound += min(keysLeft, MAX_KEYS_PER_READ_REQUEST); } return result; } private void writeBulk(Entity[] entities) { int partsCount = entities.length / MAX_ENTITIES_PER_WRITE_REQUEST + 1; for (int i = 0; i < partsCount; i++) { int partHead = i * MAX_ENTITIES_PER_WRITE_REQUEST; int partTail = min(partHead + MAX_ENTITIES_PER_WRITE_REQUEST, entities.length); Entity[] part = Arrays.copyOfRange(entities, partHead, partTail); writeSmallBulk(part); } } private void writeSmallBulk(Entity[] entities) { actor.put(entities); } /** * A Datastore {@link Kind} by project ID. */ private static class DatastoreKind { private final ProjectId projectId; private final Kind kind; private DatastoreKind(ProjectId projectId, Kind kind) { this.projectId = projectId; this.kind = kind; } @SuppressWarnings("EqualsGetClass") // The class is effectively final. @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } DatastoreKind kind1 = (DatastoreKind) o; return Objects.equals(projectId, kind1.projectId) && Objects.equals(kind, kind1.kind); } @Override public int hashCode() { return Objects.hash(projectId, kind); } } }
datastore/src/main/java/io/spine/server/storage/datastore/DatastoreWrapper.java
/* * Copyright 2020, TeamDev. All rights reserved. * * Redistribution and use in source and/or binary forms, with or without * modification, must retain the above copyright notice and the following * disclaimer. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.spine.server.storage.datastore; import com.google.cloud.datastore.BaseEntity; import com.google.cloud.datastore.Datastore; import com.google.cloud.datastore.DatastoreException; import com.google.cloud.datastore.DatastoreReader; import com.google.cloud.datastore.DatastoreReaderWriter; import com.google.cloud.datastore.DatastoreWriter; import com.google.cloud.datastore.Entity; import com.google.cloud.datastore.FullEntity; import com.google.cloud.datastore.Key; import com.google.cloud.datastore.KeyFactory; import com.google.cloud.datastore.Query; import com.google.cloud.datastore.StructuredQuery; import com.google.cloud.datastore.Transaction; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Supplier; import com.google.common.collect.Streams; import io.spine.logging.Logging; import io.spine.server.storage.datastore.tenant.Namespace; import io.spine.server.storage.datastore.tenant.NamespaceSupplier; import org.checkerframework.checker.nullness.qual.Nullable; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Iterables.toArray; import static com.google.common.collect.Iterators.concat; import static com.google.common.collect.Iterators.unmodifiableIterator; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Lists.newLinkedList; import static com.google.common.collect.Streams.stream; import static java.lang.Math.min; import static java.util.Collections.emptyIterator; import static java.util.stream.Collectors.toList; /** * Adapts {@link Datastore} API for being used for storages. */ @SuppressWarnings("ClassWithTooManyMethods") public class DatastoreWrapper implements Logging { private static final String ACTIVE_TRANSACTION_CONDITION_MESSAGE = "Transaction should be active."; private static final String NOT_ACTIVE_TRANSACTION_CONDITION_MESSAGE = "Transaction should NOT be active."; private static final int MAX_KEYS_PER_READ_REQUEST = 1000; static final int MAX_ENTITIES_PER_WRITE_REQUEST = 500; private static final Map<DatastoreKind, KeyFactory> keyFactories = new HashMap<>(); private static final Key[] EMPTY_KEY_ARRAY = new Key[0]; private final NamespaceSupplier namespaceSupplier; private final Datastore datastore; private Transaction activeTransaction; private DatastoreReaderWriter actor; /** * Creates a new instance of {@code DatastoreWrapper}. * * @param datastore * {@link Datastore} to wrap * @param supplier * an instance of {@link Supplier Supplier&lt;Namespace&gt;} to get the namespaces for * the queries from the datastore */ protected DatastoreWrapper(Datastore datastore, NamespaceSupplier supplier) { this.namespaceSupplier = checkNotNull(supplier); this.datastore = checkNotNull(datastore); this.actor = datastore; } /** * Shortcut method for calling the constructor. */ static DatastoreWrapper wrap(Datastore datastore, NamespaceSupplier supplier) { return new DatastoreWrapper(datastore, supplier); } /** * Creates an instance of {@link com.google.cloud.datastore.Key} basing on the Datastore * entity {@code kind} and {@code recordId}. * * @param kind * the kind of the Datastore entity * @param recordId * the ID of the record * @return the Datastore {@code Key} instance */ Key keyFor(Kind kind, RecordId recordId) { KeyFactory keyFactory = keyFactory(kind); Key key = keyFactory.newKey(recordId.getValue()); return key; } /** * Writes new {@link Entity} into the Datastore. * * @param entity * new {@link Entity} to put into the Datastore * @throws DatastoreException * upon failure * @see DatastoreWriter#put(FullEntity) */ public void create(Entity entity) throws DatastoreException { actor.add(entity); } /** * Modifies an {@link Entity} in the Datastore. * * @param entity * the {@link Entity} to update * @throws DatastoreException * if the {@link Entity} with such {@link Key} does not exist * @see DatastoreWriter#update(Entity...) */ public void update(Entity entity) throws DatastoreException { actor.update(entity); } /** * Writes an {@link Entity} to the Datastore or modifies an existing one. * * @param entity * the {@link Entity} to write or update * @see DatastoreWrapper#create(Entity) * @see DatastoreWrapper#update(Entity) */ public void createOrUpdate(Entity entity) { actor.put(entity); } /** * Writes the {@link Entity entities} to the Datastore or modifies the existing ones. * * @param entities * the {@link Entity Entities} to write or update * @see DatastoreWrapper#createOrUpdate(Entity) */ public void createOrUpdate(Entity... entities) { if (entities.length <= MAX_ENTITIES_PER_WRITE_REQUEST) { writeSmallBulk(entities); } else { writeBulk(entities); } } /** * Writes the {@link Entity entities} to the Datastore or modifies the existing ones. * * @param entities * a {@link Collection} of {@link Entity Entities} to write or update * @see DatastoreWrapper#createOrUpdate(Entity) */ public void createOrUpdate(Collection<Entity> entities) { Entity[] array = new Entity[entities.size()]; entities.toArray(array); createOrUpdate(array); } /** * Retrieves an {@link Entity} with the given key from the Datastore. * * @param key * {@link Key} to search for * @return the {@link Entity} or {@code null} in case of no results for the key given * @see DatastoreReader#get(Key) */ public @Nullable Entity read(Key key) { return actor.get(key); } /** * Retrieves an {@link Entity} for each of the given keys. * * <p>The resulting {@code Iterator} is evaluated lazily. A call to * {@link Iterator#remove() Iterator.remove()} causes an {@link UnsupportedOperationException}. * * <p>The results are returned in an order matching that of the provided keys * with {@code null}s in place of missing and inactive entities. * * @param keys * {@link Key Keys} to search for * @return an {@code Iterator} over the found entities in the order of keys * (including {@code null} values for nonexistent keys) * @see DatastoreReader#get(Key...) */ public Iterator<@Nullable Entity> read(Iterable<Key> keys) { Iterator<@Nullable Entity> dsIterator = readByKeys(keys); Iterator<@Nullable Entity> result = orderByKeys(keys, dsIterator); return unmodifiableIterator(result); } private Iterator<@Nullable Entity> readByKeys(Iterable<Key> keys) { List<Key> keysList = newLinkedList(keys); return keysList.size() <= MAX_KEYS_PER_READ_REQUEST ? actor.get(toArray(keys, Key.class)) : readBulk(keysList); } private static Iterator<@Nullable Entity> orderByKeys(Iterable<Key> keys, Iterator<Entity> items) { List<Entity> entities = newLinkedList(() -> items); Iterator<Entity> entitiesIterator = stream(keys) .map(key -> getEntityOrNull(key, entities.iterator())) .iterator(); return entitiesIterator; } private static @Nullable Entity getEntityOrNull(Key key, Iterator<Entity> entities) { while (entities.hasNext()) { Entity entity = entities.next(); if (key.equals(entity.getKey())) { entities.remove(); return entity; } } return null; } /** * Queries the Datastore with the given arguments. * * <p>The Datastore may return a partial result set, so an execution of this method may * result in several Datastore queries. * * <p>The limit included in the {@link StructuredQuery}, will be a maximum count of objects * in the returned iterator. * * <p>The returned {@link DsQueryIterator} allows to {@linkplain DsQueryIterator#nextPageQuery() * create a query} to the next page of results reusing an existing cursor. * * <p>The resulting {@code Iterator} is evaluated lazily. A call to * {@link Iterator#remove() Iterator.remove()} causes an {@link UnsupportedOperationException}. * * @param query * {@link Query} to execute upon the Datastore * @param <R> * the type of queried objects * @return results fo the query as a lazily evaluated {@link Iterator} * @see DatastoreReader#run(Query) */ public <R> DsQueryIterator<R> read(StructuredQuery<R> query) { Namespace namespace = namespaceSupplier.get(); StructuredQuery<R> queryWithNamespace = query.toBuilder() .setNamespace(namespace.getValue()) .build(); _trace().log("Reading entities of `%s` kind in `%s` namespace.", query.getKind(), namespace.getValue()); DsQueryIterator<R> result = new DsQueryIterator<>(queryWithNamespace, actor); return result; } /** * Queries the Datastore for all entities matching query. * * <p>Read is performed from datastore using batches of the specified size, which leads to * multiple queries being executed. * * <p>The resulting {@code Iterator} is evaluated lazily. A call to * {@link Iterator#remove() Iterator.remove()} causes an {@link UnsupportedOperationException}. * * @param query * {@link Query} to execute upon the Datastore * @param pageSize * a non-zero number of elements to be returned per a single read from Datastore * @param <R> * the type of queried objects * @return results fo the query as a lazily evaluated {@link Iterator} * @throws IllegalArgumentException * if the provided {@linkplain StructuredQuery#getLimit() query includes a limit} */ public <R> Iterator<R> readAll(StructuredQuery<R> query, int pageSize) { return readAllPageByPage(query, pageSize); } /** * Queries the Datastore for all entities matching query. * * <p>Read is performed in batches until all of the matching entities are fetched, resulting * in multiple Datastore queries. * * <p>The resulting {@code Iterator} is evaluated lazily. A call to * {@link Iterator#remove() Iterator.remove()} causes an {@link UnsupportedOperationException}. * * @param query * {@link Query} to execute upon the Datastore * @param <R> * the type of queried objects * @return results fo the query as a lazily evaluated {@link Iterator} * @throws IllegalArgumentException * if the provided {@linkplain StructuredQuery#getLimit() query includes a limit} */ public <R> Iterator<R> readAll(StructuredQuery<R> query) { return readAllPageByPage(query, null); } /** * Queries the Datastore for all entities matching query, executing queries split in batches. * * <p>Read is performed from datastore using batches of the specified size, which leads to * multiple queries being executed. * * <p>The resulting {@code Iterator} is evaluated lazily. A call to * {@link Iterator#remove() Iterator.remove()} causes an {@link UnsupportedOperationException}. * * @param query * a {@link Query} to execute upon the Datastore * @param pageSize * a non-zero number of elements to be returned per a single read from Datastore; * if {@code null} the page size will be dictated by the Datastore * @param <R> * the type of queried objects * @return results fo the query as a lazily evaluated {@link Iterator} * @throws IllegalArgumentException * if the provided {@linkplain StructuredQuery#getLimit() query includes a limit} or * the provided {@code batchSize} is 0 */ @SuppressWarnings("unchecked") // Checked logically. private <R> Iterator<R> readAllPageByPage(StructuredQuery<R> query, @Nullable Integer pageSize) { checkArgument(query.getLimit() == null, "Cannot limit a number of entities for \"read all\" operation."); checkArgument(pageSize == null || pageSize != 0, "The size of a single read operation cannot be 0."); StructuredQuery<R> limitedQuery = limit(query, pageSize); return stream(new DsQueryPageIterator<>(limitedQuery, this)) .flatMap(Streams::stream) .iterator(); } private static <R> StructuredQuery<R> limit(StructuredQuery<R> query, @Nullable Integer batchSize) { return batchSize == null ? query : query.toBuilder() .setLimit(batchSize) .build(); } /** * Deletes all existing {@link Entity Entities} with the given keys. * * @param keys * {@link Key Keys} of the {@link Entity Entities} to delete. May be nonexistent */ public void delete(Key... keys) { actor.delete(keys); } /** * Deletes all existing {@link Entities} of a kind given. * * @param table * kind (a.k.a. type, table, etc.) of the records to delete */ @VisibleForTesting protected void dropTable(String table) { Namespace namespace = namespaceSupplier.get(); StructuredQuery<Entity> query = Query.newEntityQueryBuilder() .setNamespace(namespace.getValue()) .setKind(table) .build(); _trace().log("Deleting all entities of `%s` kind in `%s` namespace.", table, namespace.getValue()); Iterator<Entity> queryResult = read(query); List<Entity> entities = newArrayList(queryResult); deleteEntities(entities); } @VisibleForTesting protected void deleteEntities(Collection<Entity> entities) { List<Key> keyList = entities.stream() .map(BaseEntity::getKey) .collect(toList()); Key[] keys = new Key[keyList.size()]; keyList.toArray(keys); deleteEntities(keys); } private void deleteEntities(Key[] keys) { if (keys.length > MAX_ENTITIES_PER_WRITE_REQUEST) { int start = 0; int end = MAX_ENTITIES_PER_WRITE_REQUEST; while (true) { int length = end - start; if (length <= 0) { return; } Key[] keysSubarray = new Key[length]; System.arraycopy(keys, start, keysSubarray, 0, keysSubarray.length); delete(keysSubarray); start = end; end = min(MAX_ENTITIES_PER_WRITE_REQUEST, keys.length - end); } } else { delete(keys); } } /** * Starts a new database transaction. * * @return the new transaction * @see TransactionWrapper */ public final TransactionWrapper newTransaction() { Transaction tx = datastore.newTransaction(); return new TransactionWrapper(tx); } /** * Starts a transaction. * * <p>After this method is called, all {@code Entity} modifications performed through this * instance of {@code DatastoreWrapper} become transactional. This behaviour lasts until either * {@link #commitTransaction()} or {@link #rollbackTransaction()} is called. * * @throws IllegalStateException * if a transaction is already started on this instance of * {@code DatastoreWrapper} * @see #isTransactionActive() * @deprecated Use {@link #newTransaction()} instead. */ @Deprecated public void startTransaction() throws IllegalStateException { checkState(!isTransactionActive(), NOT_ACTIVE_TRANSACTION_CONDITION_MESSAGE); activeTransaction = datastore.newTransaction(); actor = activeTransaction; } /** * Commits a transaction. * * <p>Upon the method call, all the modifications within the active transaction are applied. * * <p>All next operations become non-transactional until {@link #startTransaction()} is called. * * @throws IllegalStateException * if no transaction is started on this instance of * {@code DatastoreWrapper} * @see #isTransactionActive() * @deprecated Use {@link #newTransaction()} instead. */ @Deprecated public void commitTransaction() throws IllegalStateException { checkState(isTransactionActive(), ACTIVE_TRANSACTION_CONDITION_MESSAGE); activeTransaction.commit(); this.actor = datastore; } /** * Rollbacks a transaction. * * <p>Upon the method call, all the modifications within the active transaction * canceled permanently. * * <p>After this method execution is over, all the further modifications made through * the current instance of {@code DatastoreWrapper} become non-transactional. * * @throws IllegalStateException * if no transaction is active for the current * instance of {@code DatastoreWrapper} * @see #isTransactionActive() * @deprecated Use {@link #newTransaction()} instead. */ @Deprecated public void rollbackTransaction() throws IllegalStateException { checkState(isTransactionActive(), ACTIVE_TRANSACTION_CONDITION_MESSAGE); activeTransaction.rollback(); this.actor = datastore; } /** * Checks whether there is an active transaction on this instance of {@code DatastoreWrapper}. * * @return {@code true} if there is an active transaction, {@code false} otherwise * @deprecated Use {@link #newTransaction()} instead. */ @Deprecated public boolean isTransactionActive() { return activeTransaction != null && activeTransaction.isActive(); } /** * Retrieves an instance of {@link KeyFactory} unique for given Kind of data * regarding the current namespace. * * @param kind * kind of {@link Entity} to generate keys for * @return an instance of {@link KeyFactory} for given kind */ public KeyFactory keyFactory(Kind kind) { DatastoreKind datastoreKind = new DatastoreKind(projectId(), kind); KeyFactory keyFactory = keyFactories.get(datastoreKind); if (keyFactory == null) { keyFactory = initKeyFactory(kind); } Namespace namespace = namespaceSupplier.get(); _trace().log("Retrieving KeyFactory for kind `%s` in `%s` namespace.", kind, namespace.getValue()); keyFactory.setNamespace(namespace.getValue()); return keyFactory; } @VisibleForTesting public Datastore datastore() { return datastore; } private KeyFactory initKeyFactory(Kind kind) { KeyFactory keyFactory = datastore.newKeyFactory() .setKind(kind.value()); DatastoreKind datastoreKind = new DatastoreKind(projectId(), kind); keyFactories.put(datastoreKind, keyFactory); return keyFactory; } private ProjectId projectId() { String projectId = datastore.getOptions() .getProjectId(); ProjectId result = ProjectId.of(projectId); return result; } /** * Reads big number of records. * * <p>Google App Engine Datastore has a limitation on the amount of entities queried with a * single call — 1000 entities per query. To deal with this limitation we read the entities in * pagination fashion 1000 entity per page. * * @param keys * {@link Key keys} to find the entities for * @return ordered sequence of {@link Entity entities} * @see #read(Iterable) */ private Iterator<Entity> readBulk(List<Key> keys) { int pageCount = keys.size() / MAX_KEYS_PER_READ_REQUEST + 1; _trace().log("Reading a big bulk of entities synchronously. The data is read as %d pages.", pageCount); int lowerBound = 0; int higherBound = MAX_KEYS_PER_READ_REQUEST; int keysLeft = keys.size(); Iterator<Entity> result = emptyIterator(); for (int i = 0; i < pageCount; i++) { List<Key> keysPage = keys.subList(lowerBound, higherBound); Iterator<Entity> page = actor.get(keysPage.toArray(EMPTY_KEY_ARRAY)); result = concat(result, page); keysLeft -= keysPage.size(); lowerBound = higherBound; higherBound += min(keysLeft, MAX_KEYS_PER_READ_REQUEST); } return result; } private void writeBulk(Entity[] entities) { int partsCount = entities.length / MAX_ENTITIES_PER_WRITE_REQUEST + 1; for (int i = 0; i < partsCount; i++) { int partHead = i * MAX_ENTITIES_PER_WRITE_REQUEST; int partTail = min(partHead + MAX_ENTITIES_PER_WRITE_REQUEST, entities.length); Entity[] part = Arrays.copyOfRange(entities, partHead, partTail); writeSmallBulk(part); } } private void writeSmallBulk(Entity[] entities) { actor.put(entities); } /** * A Datastore {@link Kind} by project ID. */ private static class DatastoreKind { private final ProjectId projectId; private final Kind kind; private DatastoreKind(ProjectId projectId, Kind kind) { this.projectId = projectId; this.kind = kind; } @SuppressWarnings("EqualsGetClass") // The class is effectively final. @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } DatastoreKind kind1 = (DatastoreKind) o; return Objects.equals(projectId, kind1.projectId) && Objects.equals(kind, kind1.kind); } @Override public int hashCode() { return Objects.hash(projectId, kind); } } }
Update doc
datastore/src/main/java/io/spine/server/storage/datastore/DatastoreWrapper.java
Update doc
Java
apache-2.0
428368875b9a52a99fbfeac5146c484ee54ece4f
0
jkschneider/eureka,brharrington/eureka,Netflix/eureka,OnePaaS/eureka,mchlstckl/eureka,bondj/eureka,mchlstckl/eureka,william-tran/eureka,fullcontact/eureka,C0rWin/eureka,ccortezb/eureka,ouyangkongtong/eureka,yonglehou/eureka,edyeus/eureka,edyeus/eureka,yonglehou/eureka,spencergibb/eureka,elandau/eureka,krutsko/eureka,schibsted/eureka-jersey2,ccortezb/eureka,Fsero/eureka,spencergibb/eureka,tbak/eureka,schibsted/eureka-jersey2,tbak/eureka,jaume-pinyol/eureka,brharrington/eureka,gorcz/eureka,Muktesh01/eureka,wgpshashank/eureka,yonglehou/eureka,C0rWin/eureka,Fsero/eureka,jmnarloch/eureka,bhalothia/eureka,Muktesh01/eureka,OnePaaS/eureka,gorcz/eureka,bhalothia/eureka,jaume-pinyol/eureka,jmnarloch/eureka,gorcz/eureka,schibsted/eureka-jersey2,qiangdavidliu/eureka,wgpshashank/eureka,tbak/eureka,mchlstckl/eureka,ouyangkongtong/eureka,C0rWin/eureka,bhalothia/eureka,bondj/eureka,jmnarloch/eureka,Muktesh01/eureka,Netflix/eureka,bondj/eureka,fullcontact/eureka,jkschneider/eureka,jaume-pinyol/eureka,william-tran/eureka,fullcontact/eureka,edyeus/eureka,Fsero/eureka,ouyangkongtong/eureka,qiangdavidliu/eureka,OnePaaS/eureka,jkschneider/eureka,william-tran/eureka,wgpshashank/eureka,ccortezb/eureka,krutsko/eureka,elandau/eureka
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.appinfo; import java.io.IOException; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import org.apache.commons.configuration.Configuration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.netflix.config.ConfigurationManager; import com.netflix.config.DynamicPropertyFactory; import com.netflix.config.DynamicStringProperty; /** * A properties based {@link InstanceInfo} configuration. * * <p> * The information required for registration with eureka server is provided in a * configuration file.The configuration file is searched for in the classpath * with the name specified by the property <em>eureka.client.props</em> and with * the suffix <em>.properties</em>. If the property is not specified, * <em>eureka-client.properties</em> is assumed as the default.The properties * that are looked up uses the <em>namespace</em> passed on to this class. * </p> * * <p> * If the <em>eureka.environment</em> property is specified, additionally * <em>eureka-client-<eureka.environment>.properties</em> is loaded in addition * to <em>eureka-client.properties</em>. * </p> * * @author Karthik Ranganathan * */ public abstract class PropertiesInstanceConfig extends AbstractInstanceConfig implements EurekaInstanceConfig { private static final String TEST = "test"; private static final String ARCHAIUS_DEPLOYMENT_ENVIRONMENT = "archaius.deployment.environment"; private static final String EUREKA_ENVIRONMENT = "eureka.environment"; private static final Logger logger = LoggerFactory .getLogger(PropertiesInstanceConfig.class); protected String namespace = "eureka."; private static final DynamicStringProperty EUREKA_PROPS_FILE = DynamicPropertyFactory .getInstance().getStringProperty("eureka.client.props", "eureka-client"); private static final DynamicPropertyFactory INSTANCE = com.netflix.config.DynamicPropertyFactory .getInstance(); private static final String UNKNOWN_APPLICATION = "unknown"; private static final String DEFAULT_STATUSPAGE_URLPATH = "/Status"; private static final String DEFAULT_HOMEPAGE_URLPATH = "/"; private static final String DEFAULT_HEALTHCHECK_URLPATH = "/healthcheck"; private String propSecurePort = namespace + "securePort"; private String propSecurePortEnabled = propSecurePort + ".enabled"; private String propNonSecurePort; private String propName; private String propPortEnabled; private String propLeaseRenewalIntervalInSeconds; private String propLeaseExpirationDurationInSeconds; private String propSecureVirtualHostname; private String propVirtualHostname; private String propMetadataNamespace; private String propASGName; public PropertiesInstanceConfig() { init(namespace); } public PropertiesInstanceConfig(String namespace, DataCenterInfo info) { super(info); init(namespace); } public PropertiesInstanceConfig(String namespace) { init(namespace); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#isInstanceEnabledOnit() */ @Override public boolean isInstanceEnabledOnit() { return INSTANCE.getBooleanProperty(namespace + "traffic.enabled", super.isInstanceEnabledOnit()).get(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getNonSecurePort() */ @Override public int getNonSecurePort() { return INSTANCE.getIntProperty(propNonSecurePort, super.getNonSecurePort()).get(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getSecurePort() */ @Override public int getSecurePort() { return INSTANCE.getIntProperty(propSecurePort, super.getSecurePort()) .get(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#isNonSecurePortEnabled() */ @Override public boolean isNonSecurePortEnabled() { return INSTANCE.getBooleanProperty(propPortEnabled, super.isNonSecurePortEnabled()).get(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getSecurePortEnabled() */ @Override public boolean getSecurePortEnabled() { return INSTANCE.getBooleanProperty(propSecurePortEnabled, super.getSecurePortEnabled()).get(); } /* * (non-Javadoc) * * @see * com.netflix.appinfo.AbstractInstanceConfig#getLeaseRenewalIntervalInSeconds * () */ @Override public int getLeaseRenewalIntervalInSeconds() { return INSTANCE.getIntProperty(propLeaseRenewalIntervalInSeconds, super.getLeaseRenewalIntervalInSeconds()).get(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig# * getLeaseExpirationDurationInSeconds() */ @Override public int getLeaseExpirationDurationInSeconds() { return INSTANCE.getIntProperty(propLeaseExpirationDurationInSeconds, super.getLeaseExpirationDurationInSeconds()).get(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getVirtualHostName() */ @Override public String getVirtualHostName() { if (this.isNonSecurePortEnabled()) { return INSTANCE.getStringProperty(propVirtualHostname, super.getVirtualHostName()).get(); } else { return null; } } /* * (non-Javadoc) * * @see * com.netflix.appinfo.AbstractInstanceConfig#getSecureVirtualHostName() */ @Override public String getSecureVirtualHostName() { if (this.getSecurePortEnabled()) { return INSTANCE.getStringProperty(propSecureVirtualHostname, super.getSecureVirtualHostName()).get(); } else { return null; } } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getASGName() */ @Override public String getASGName() { return INSTANCE.getStringProperty(propASGName, super.getASGName()) .get(); } /** * Gets the metadata map associated with the instance. The properties that * will be looked up for this will be <code>namespace + ".metadata"</code>. * * <p> * For instance, if the given namespace is <code>eureka.appinfo</code>, the * metadata keys are searched under the namespace * <code>eureka.appinfo.metadata</code>. * </p> */ @Override public Map<String, String> getMetadataMap() { Map<String, String> metadataMap = new LinkedHashMap<String, String>(); Configuration config = (Configuration) INSTANCE .getBackingConfigurationSource(); String subsetPrefix = propMetadataNamespace.substring(0, propMetadataNamespace.length() - 1); for (Iterator<String> iter = config.subset(subsetPrefix) .getKeys(); iter.hasNext();) { String key = iter.next(); String value = config.getString(propMetadataNamespace + key); metadataMap.put(key, value); } return metadataMap; } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getAppname() */ @Override public String getAppname() { return INSTANCE.getStringProperty(propName, UNKNOWN_APPLICATION).get() .trim(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getIpAddress() */ public String getIpAddress() { return super.getIpAddress(); } @Override public String getStatusPageUrlPath() { return INSTANCE.getStringProperty(namespace + "statusPageUrlPath", DEFAULT_STATUSPAGE_URLPATH).get(); } @Override public String getStatusPageUrl() { return INSTANCE.getStringProperty(namespace + "statusPageUrl", null) .get(); } @Override public String getHomePageUrlPath() { return INSTANCE.getStringProperty(namespace + "homePageUrlPath", DEFAULT_HOMEPAGE_URLPATH).get(); } @Override public String getHomePageUrl() { return INSTANCE.getStringProperty(namespace + "homePageUrl", null) .get(); } @Override public String getHealthCheckUrlPath() { return INSTANCE.getStringProperty( namespace + "healthCheckUrlPath", DEFAULT_HEALTHCHECK_URLPATH).get(); } @Override public String getHealthCheckUrl() { return INSTANCE.getStringProperty(namespace + "healthCheckUrl", null) .get(); } @Override public String getSecureHealthCheckUrl() { return INSTANCE.getStringProperty(namespace + "secureHealthCheckUrl", null).get(); } @Override public String getNamespace() { return this.namespace; } private void init(String namespace) { this.namespace = namespace; propSecurePort = namespace + "securePort"; propSecurePortEnabled = propSecurePort + ".enabled"; propNonSecurePort = namespace + "port"; propName = namespace + "name"; propPortEnabled = propNonSecurePort + ".enabled"; propLeaseRenewalIntervalInSeconds = namespace + "lease.renewalInterval"; propLeaseExpirationDurationInSeconds = namespace + "lease.duration"; propSecureVirtualHostname = namespace + "secureVipAddress"; propVirtualHostname = namespace + "vipAddress"; propMetadataNamespace = namespace + "metadata."; propASGName = namespace + "asgName"; String env = ConfigurationManager.getConfigInstance().getString( EUREKA_ENVIRONMENT, TEST); ConfigurationManager.getConfigInstance().setProperty( ARCHAIUS_DEPLOYMENT_ENVIRONMENT, env); String eurekaPropsFile = EUREKA_PROPS_FILE.get(); try { ConfigurationManager .loadCascadedPropertiesFromResources(eurekaPropsFile); } catch (IOException e) { logger.warn( "Cannot find the properties specified : {}. This may be okay if there are other environment specific properties or the configuration is installed with a different mechanism.", eurekaPropsFile); } } }
eureka-client/src/main/java/com/netflix/appinfo/PropertiesInstanceConfig.java
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.appinfo; import java.io.IOException; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import org.apache.commons.configuration.Configuration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.netflix.config.ConfigurationManager; import com.netflix.config.DynamicPropertyFactory; import com.netflix.config.DynamicStringProperty; /** * A properties based {@link InstanceInfo} configuration. * * <p> * The information required for registration with eureka server is provided in a * configuration file.The configuration file is searched for in the classpath * with the name specified by the property <em>eureka.client.props</em> and with * the suffix <em>.properties</em>. If the property is not specified, * <em>eureka-client.properties</em> is assumed as the default.The properties * that are looked up uses the <em>namespace</em> passed on to this class. * </p> * * <p> * If the <em>eureka.environment</em> property is specified, additionally * <em>eureka-client-<eureka.environment>.properties</em> is loaded in addition * to <em>eureka-client.properties</em>. * </p> * * @author Karthik Ranganathan * */ public abstract class PropertiesInstanceConfig extends AbstractInstanceConfig implements EurekaInstanceConfig { private static final String TEST = "test"; private static final String ARCHAIUS_DEPLOYMENT_ENVIRONMENT = "archaius.deployment.environment"; private static final String EUREKA_ENVIRONMENT = "eureka.environment"; private static final Logger logger = LoggerFactory .getLogger(PropertiesInstanceConfig.class); protected String namespace = "eureka."; private static final DynamicStringProperty EUREKA_PROPS_FILE = DynamicPropertyFactory .getInstance().getStringProperty("eureka.client.props", "eureka-client"); private static final DynamicPropertyFactory INSTANCE = com.netflix.config.DynamicPropertyFactory .getInstance(); private static final String UNKNOWN_APPLICATION = "unknown"; private static final String DEFAULT_STATUSPAGE_URLPATH = "/Status"; private static final String DEFAULT_HOMEPAGE_URLPATH = "/"; private static final String DEFAULT_HEALTHCHECK_URLPATH = "/healthcheck"; private String propSecurePort = namespace + "securePort"; private String propSecurePortEnabled = propSecurePort + ".enabled"; private String propNonSecurePort; private String propName; private String propPortEnabled; private String propLeaseRenewalIntervalInSeconds; private String propLeaseExpirationDurationInSeconds; private String propSecureVirtualHostname; private String propVirtualHostname; private String propMetadataNamespace; private String propASGName; public PropertiesInstanceConfig() { init(namespace); } public PropertiesInstanceConfig(String namespace, DataCenterInfo info) { super(info); init(namespace); } public PropertiesInstanceConfig(String namespace) { init(namespace); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#isInstanceEnabledOnit() */ @Override public boolean isInstanceEnabledOnit() { return INSTANCE.getBooleanProperty(namespace + "traffic.enabled", super.isInstanceEnabledOnit()).get(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getNonSecurePort() */ @Override public int getNonSecurePort() { return INSTANCE.getIntProperty(propNonSecurePort, super.getNonSecurePort()).get(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getSecurePort() */ @Override public int getSecurePort() { return INSTANCE.getIntProperty(propSecurePort, super.getSecurePort()) .get(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#isNonSecurePortEnabled() */ @Override public boolean isNonSecurePortEnabled() { return INSTANCE.getBooleanProperty(propPortEnabled, super.isNonSecurePortEnabled()).get(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getSecurePortEnabled() */ @Override public boolean getSecurePortEnabled() { return INSTANCE.getBooleanProperty(propSecurePortEnabled, super.getSecurePortEnabled()).get(); } /* * (non-Javadoc) * * @see * com.netflix.appinfo.AbstractInstanceConfig#getLeaseRenewalIntervalInSeconds * () */ @Override public int getLeaseRenewalIntervalInSeconds() { return INSTANCE.getIntProperty(propLeaseRenewalIntervalInSeconds, super.getLeaseRenewalIntervalInSeconds()).get(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig# * getLeaseExpirationDurationInSeconds() */ @Override public int getLeaseExpirationDurationInSeconds() { return INSTANCE.getIntProperty(propLeaseExpirationDurationInSeconds, super.getLeaseExpirationDurationInSeconds()).get(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getVirtualHostName() */ @Override public String getVirtualHostName() { if (this.isNonSecurePortEnabled()) { return INSTANCE.getStringProperty(propVirtualHostname, super.getVirtualHostName()).get(); } else { return null; } } /* * (non-Javadoc) * * @see * com.netflix.appinfo.AbstractInstanceConfig#getSecureVirtualHostName() */ @Override public String getSecureVirtualHostName() { if (this.getSecurePortEnabled()) { return INSTANCE.getStringProperty(propSecureVirtualHostname, super.getSecureVirtualHostName()).get(); } else { return null; } } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getASGName() */ @Override public String getASGName() { return INSTANCE.getStringProperty(propASGName, super.getASGName()) .get(); } /** * Gets the metadata map associated with the instance. The properties that * will be looked up for this will be <code>namespace + ".metadata"</code>. * * <p> * For instance, if the given namespace is <code>eureka.appinfo</code>, the * metadata keys are searched under the namespace * <code>eureka.appinfo.metadata</code>. * </p> */ @Override public Map<String, String> getMetadataMap() { Map<String, String> metadataMap = new LinkedHashMap<String, String>(); Configuration config = (Configuration) INSTANCE .getBackingConfigurationSource(); for (Iterator<String> iter = config.subset(propMetadataNamespace) .getKeys(); iter.hasNext();) { String key = iter.next(); String value = config.getString(propMetadataNamespace + key); metadataMap.put(key, value); } return metadataMap; } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getAppname() */ @Override public String getAppname() { return INSTANCE.getStringProperty(propName, UNKNOWN_APPLICATION).get() .trim(); } /* * (non-Javadoc) * * @see com.netflix.appinfo.AbstractInstanceConfig#getIpAddress() */ public String getIpAddress() { return super.getIpAddress(); } @Override public String getStatusPageUrlPath() { return INSTANCE.getStringProperty(namespace + "statusPageUrlPath", DEFAULT_STATUSPAGE_URLPATH).get(); } @Override public String getStatusPageUrl() { return INSTANCE.getStringProperty(namespace + "statusPageUrl", null) .get(); } @Override public String getHomePageUrlPath() { return INSTANCE.getStringProperty(namespace + "homePageUrlPath", DEFAULT_HOMEPAGE_URLPATH).get(); } @Override public String getHomePageUrl() { return INSTANCE.getStringProperty(namespace + "homePageUrl", null) .get(); } @Override public String getHealthCheckUrlPath() { return INSTANCE.getStringProperty( namespace + "healthCheckUrlPath", DEFAULT_HEALTHCHECK_URLPATH).get(); } @Override public String getHealthCheckUrl() { return INSTANCE.getStringProperty(namespace + "healthCheckUrl", null) .get(); } @Override public String getSecureHealthCheckUrl() { return INSTANCE.getStringProperty(namespace + "secureHealthCheckUrl", null).get(); } @Override public String getNamespace() { return this.namespace; } private void init(String namespace) { this.namespace = namespace; propSecurePort = namespace + "securePort"; propSecurePortEnabled = propSecurePort + ".enabled"; propNonSecurePort = namespace + "port"; propName = namespace + "name"; propPortEnabled = propNonSecurePort + ".enabled"; propLeaseRenewalIntervalInSeconds = namespace + "lease.renewalInterval"; propLeaseExpirationDurationInSeconds = namespace + "lease.duration"; propSecureVirtualHostname = namespace + "secureVipAddress"; propVirtualHostname = namespace + "vipAddress"; propMetadataNamespace = namespace + "metadata."; propASGName = namespace + "asgName"; String env = ConfigurationManager.getConfigInstance().getString( EUREKA_ENVIRONMENT, TEST); ConfigurationManager.getConfigInstance().setProperty( ARCHAIUS_DEPLOYMENT_ENVIRONMENT, env); String eurekaPropsFile = EUREKA_PROPS_FILE.get(); try { ConfigurationManager .loadCascadedPropertiesFromResources(eurekaPropsFile); } catch (IOException e) { logger.warn( "Cannot find the properties specified : {}. This may be okay if there are other environment specific properties or the configuration is installed with a different mechanism.", eurekaPropsFile); } } }
Fix the bug that unable to read metadata from the property file.
eureka-client/src/main/java/com/netflix/appinfo/PropertiesInstanceConfig.java
Fix the bug that unable to read metadata from the property file.
Java
apache-2.0
3d9fdc36757004b741520babd008f53d892eb165
0
madanadit/alluxio,wwjiang007/alluxio,wwjiang007/alluxio,maobaolong/alluxio,Alluxio/alluxio,ShailShah/alluxio,WilliamZapata/alluxio,jswudi/alluxio,jswudi/alluxio,wwjiang007/alluxio,aaudiber/alluxio,PasaLab/tachyon,Reidddddd/alluxio,uronce-cc/alluxio,WilliamZapata/alluxio,riversand963/alluxio,ChangerYoung/alluxio,ShailShah/alluxio,wwjiang007/alluxio,Reidddddd/mo-alluxio,aaudiber/alluxio,jswudi/alluxio,EvilMcJerkface/alluxio,yuluo-ding/alluxio,jswudi/alluxio,apc999/alluxio,wwjiang007/alluxio,jsimsa/alluxio,apc999/alluxio,Alluxio/alluxio,calvinjia/tachyon,calvinjia/tachyon,madanadit/alluxio,Reidddddd/mo-alluxio,aaudiber/alluxio,madanadit/alluxio,ChangerYoung/alluxio,PasaLab/tachyon,Alluxio/alluxio,ShailShah/alluxio,Reidddddd/alluxio,Alluxio/alluxio,ChangerYoung/alluxio,bf8086/alluxio,aaudiber/alluxio,maobaolong/alluxio,yuluo-ding/alluxio,madanadit/alluxio,ShailShah/alluxio,uronce-cc/alluxio,maboelhassan/alluxio,maobaolong/alluxio,Reidddddd/alluxio,apc999/alluxio,madanadit/alluxio,jswudi/alluxio,WilliamZapata/alluxio,maobaolong/alluxio,wwjiang007/alluxio,Reidddddd/alluxio,PasaLab/tachyon,yuluo-ding/alluxio,uronce-cc/alluxio,riversand963/alluxio,ChangerYoung/alluxio,bf8086/alluxio,apc999/alluxio,WilliamZapata/alluxio,jsimsa/alluxio,uronce-cc/alluxio,EvilMcJerkface/alluxio,EvilMcJerkface/alluxio,maboelhassan/alluxio,bf8086/alluxio,aaudiber/alluxio,madanadit/alluxio,uronce-cc/alluxio,uronce-cc/alluxio,apc999/alluxio,EvilMcJerkface/alluxio,yuluo-ding/alluxio,Alluxio/alluxio,maboelhassan/alluxio,madanadit/alluxio,bf8086/alluxio,yuluo-ding/alluxio,calvinjia/tachyon,yuluo-ding/alluxio,maboelhassan/alluxio,ChangerYoung/alluxio,Alluxio/alluxio,EvilMcJerkface/alluxio,madanadit/alluxio,ShailShah/alluxio,calvinjia/tachyon,Alluxio/alluxio,riversand963/alluxio,bf8086/alluxio,jsimsa/alluxio,bf8086/alluxio,jsimsa/alluxio,aaudiber/alluxio,maobaolong/alluxio,bf8086/alluxio,WilliamZapata/alluxio,ChangerYoung/alluxio,riversand963/alluxio,maboelhassan/alluxio,Alluxio/alluxio,jsimsa/alluxio,ShailShah/alluxio,jsimsa/alluxio,EvilMcJerkface/alluxio,EvilMcJerkface/alluxio,aaudiber/alluxio,bf8086/alluxio,PasaLab/tachyon,Reidddddd/mo-alluxio,maboelhassan/alluxio,Reidddddd/mo-alluxio,maobaolong/alluxio,riversand963/alluxio,PasaLab/tachyon,calvinjia/tachyon,apc999/alluxio,maobaolong/alluxio,Reidddddd/alluxio,EvilMcJerkface/alluxio,calvinjia/tachyon,Reidddddd/alluxio,Reidddddd/alluxio,wwjiang007/alluxio,PasaLab/tachyon,Alluxio/alluxio,maboelhassan/alluxio,maobaolong/alluxio,maobaolong/alluxio,Reidddddd/mo-alluxio,wwjiang007/alluxio,PasaLab/tachyon,wwjiang007/alluxio,jswudi/alluxio,wwjiang007/alluxio,calvinjia/tachyon,Reidddddd/mo-alluxio,maobaolong/alluxio,riversand963/alluxio,WilliamZapata/alluxio,Alluxio/alluxio,apc999/alluxio,calvinjia/tachyon
/* * Licensed to the University of California, Berkeley under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package tachyon.yarn; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.Options; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.client.api.NMClient; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.Records; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ConcurrentHashMultiset; import com.google.common.collect.Lists; import com.google.common.collect.Multiset; import tachyon.Constants; import tachyon.conf.TachyonConf; import tachyon.exception.ExceptionMessage; import tachyon.util.FormatUtils; import tachyon.util.network.NetworkAddressUtils; /** * Actual owner of Tachyon running on Yarn. The YARN ResourceManager will launch this * ApplicationMaster on an allocated container. The ApplicationMaster communicates with the YARN * cluster, and handles application execution. It performs operations asynchronously. */ public final class ApplicationMaster implements AMRMClientAsync.CallbackHandler { private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE); /** Maximum number of rounds of requesting and re-requesting worker containers */ // TODO(andrew): make this configurable private static final int MAX_WORKER_CONTAINER_REQUEST_ROUNDS = 20; private static final String TACHYON_SETUP_SCRIPT = "tachyon-yarn-setup.sh"; /** This tarball is generated by tachyon-yarn-submit.sh */ private static final String TACHYON_TARBALL = "tachyon.tar.gz"; /** * Resources needed by the master and worker containers. Yarn will copy these to the container * before running the container's command. */ private static final List<String> LOCAL_RESOURCE_NAMES = Lists.newArrayList(TACHYON_TARBALL, TACHYON_SETUP_SCRIPT); /** Container request priorities are intra-application */ private static final Priority MASTER_PRIORITY = Priority.newInstance(0); /** * We set master and worker container request priorities to different values because * Yarn doesn't allow both relaxed locality and non-relaxed locality requests to be made * at the same priority level */ private static final Priority WORKER_PRIORITY = Priority.newInstance(1); /** Parameters sent from Client */ private final int mMasterCpu; private final int mWorkerCpu; private final int mMasterMemInMB; private final int mWorkerMemInMB; private final int mRamdiskMemInMB; private final int mNumWorkers; private final String mMasterAddress; private final boolean mOneWorkerPerHost; private final String mResourcePath; /** Set of hostnames for launched workers. The implementation must be thread safe */ private final Multiset<String> mWorkerHosts; private final YarnConfiguration mYarnConf = new YarnConfiguration(); private final TachyonConf mTachyonConf = new TachyonConf(); /** The count starts at 1, then becomes 0 when we allocate a container for the Tachyon master */ private final CountDownLatch mMasterContainerAllocatedLatch; /** The count starts at 1, then becomes 0 when the application is done */ private final CountDownLatch mApplicationDoneLatch; /** Client to talk to Resource Manager */ private AMRMClientAsync<ContainerRequest> mRMClient; /** Client to talk to Node Manager */ private NMClient mNMClient; /** Client Resource Manager Service */ private YarnClient mYarnClient; /** Network address of the container allocated for Tachyon master */ private String mMasterContainerNetAddress; /** * The number of worker container requests we are waiting to hear back from. Initialized during * {@link #requestWorkerContainers()} and decremented during * {@link #launchTachyonWorkerContainers(List)}. */ private CountDownLatch mOutstandingWorkerContainerRequestsLatch = null; public ApplicationMaster(int numWorkers, String masterAddress, String resourcePath) { mMasterCpu = mTachyonConf.getInt(Constants.INTEGRATION_MASTER_RESOURCE_CPU); mMasterMemInMB = (int) mTachyonConf.getBytes(Constants.INTEGRATION_MASTER_RESOURCE_MEM) / Constants.MB; mWorkerCpu = mTachyonConf.getInt(Constants.INTEGRATION_WORKER_RESOURCE_CPU); // TODO(binfan): request worker container and ramdisk container separately // memory for running worker mWorkerMemInMB = (int) mTachyonConf.getBytes(Constants.INTEGRATION_WORKER_RESOURCE_MEM) / Constants.MB; // memory for running ramdisk mRamdiskMemInMB = (int) mTachyonConf.getBytes(Constants.WORKER_MEMORY_SIZE) / Constants.MB; mOneWorkerPerHost = mTachyonConf.getBoolean(Constants.INTEGRATION_YARN_ONE_WORKER_PER_HOST); mNumWorkers = numWorkers; mMasterAddress = masterAddress; mResourcePath = resourcePath; mWorkerHosts = ConcurrentHashMultiset.create(); mMasterContainerAllocatedLatch = new CountDownLatch(1); mApplicationDoneLatch = new CountDownLatch(1); } /** * @param args Command line arguments to launch application master */ public static void main(String[] args) { Options options = new Options(); options.addOption("num_workers", true, "Number of Tachyon workers to launch. Default 1"); options.addOption("tachyon_home", true, "Path of the home dir of Tachyon deployment on YARN slave machines"); options.addOption("master_address", true, "(Required) Address to run Tachyon master"); options.addOption("resource_path", true, "(Required) HDFS path containing the Application Master"); try { LOG.info("Starting Application Master with args {}", Arrays.toString(args)); CommandLine cliParser = new GnuParser().parse(options, args); int numWorkers = Integer.parseInt(cliParser.getOptionValue("num_workers", "1")); String masterAddress = cliParser.getOptionValue("master_address"); String resourcePath = cliParser.getOptionValue("resource_path"); ApplicationMaster applicationMaster = new ApplicationMaster(numWorkers, masterAddress, resourcePath); applicationMaster.start(); applicationMaster.requestContainers(); applicationMaster.stop(); } catch (Exception e) { LOG.error("Error running Application Master ", e); System.exit(1); } } @Override public void onContainersAllocated(List<Container> containers) { if (mMasterContainerAllocatedLatch.getCount() != 0) { launchTachyonMasterContainers(containers); } else { launchTachyonWorkerContainers(containers); } } @Override public void onContainersCompleted(List<ContainerStatus> statuses) { for (ContainerStatus status : statuses) { // Releasing worker containers because we already have workers on their host will generate a // callback to this method, so we use info instead of error. if (status.getExitStatus() == ContainerExitStatus.ABORTED) { LOG.info("Aborted container {}", status.getContainerId()); } else { LOG.error("Container {} completed with exit status {}", status.getContainerId(), status.getExitStatus()); } } } @Override public void onNodesUpdated(List<NodeReport> updated) {} @Override public void onShutdownRequest() { mApplicationDoneLatch.countDown(); } @Override public void onError(Throwable t) {} @Override public float getProgress() { return 0; } public void start() throws IOException, YarnException { // create a client to talk to NodeManager mNMClient = NMClient.createNMClient(); mNMClient.init(mYarnConf); mNMClient.start(); // Create a client to talk to the ResourceManager mRMClient = AMRMClientAsync.createAMRMClientAsync(100, this); mRMClient.init(mYarnConf); mRMClient.start(); // Create a client to talk to Yarn e.g. to find out what nodes exist in the cluster mYarnClient = YarnClient.createYarnClient(); mYarnClient.init(mYarnConf); mYarnClient.start(); // Register with ResourceManager String hostname = NetworkAddressUtils.getLocalHostName(new TachyonConf()); mRMClient.registerApplicationMaster(hostname, 0 /* port */, "" /* tracking url */); LOG.info("ApplicationMaster registered"); } public void requestContainers() throws Exception { requestMasterContainer(); // Request Tachyon worker containers until they have all been allocated. This is done in // rounds of // (1) asking for just enough worker containers to reach the desired mNumWorkers // (2) waiting for all container requests to resolve. Some containers may be rejected because // they are located on hosts which already contain workers. // // When worker container requests are made during (1), mOutstandingWorkerContainerRequestsLatch // is initialized to the number of requests made. (2) is then achieved by counting down whenever // a container is allocated, and waiting here for the number of outstanding requests to hit 0. int round = 0; while (mWorkerHosts.size() < mNumWorkers && round < MAX_WORKER_CONTAINER_REQUEST_ROUNDS) { requestWorkerContainers(); LOG.info("Waiting for {} worker containers to be allocated", mOutstandingWorkerContainerRequestsLatch.getCount()); // TODO(andrew): Handle the case where something goes wrong and some worker containers never // get allocated. See TACHYON-1410 mOutstandingWorkerContainerRequestsLatch.await(); round ++; } if (mWorkerHosts.size() < mNumWorkers) { LOG.error( "Could not request {} workers from yarn resource manager after {} tries. " + "Proceeding with {} workers", mNumWorkers, MAX_WORKER_CONTAINER_REQUEST_ROUNDS, mWorkerHosts.size()); } LOG.info("Master and workers are launched"); mApplicationDoneLatch.await(); } /** * Requests a container for the master and blocks until it is allocated in * {@link #launchTachyonMasterContainers(List)}. */ private void requestMasterContainer() throws Exception { LOG.info("Requesting master container"); // Resource requirements for master containers Resource masterResource = Records.newRecord(Resource.class); masterResource.setMemory(mMasterMemInMB); masterResource.setVirtualCores(mMasterCpu); String[] nodes = {mMasterAddress}; // Make container request for Tachyon master to ResourceManager boolean relaxLocality = true; if (!mMasterAddress.equals("localhost")) { relaxLocality = false; } ContainerRequest masterContainerAsk = new ContainerRequest(masterResource, nodes, null /* any racks */, MASTER_PRIORITY, relaxLocality); LOG.info("Making resource request for Tachyon master: cpu {} memory {} MB on node {}", masterResource.getVirtualCores(), masterResource.getMemory(), mMasterAddress); mRMClient.addContainerRequest(masterContainerAsk); LOG.info("Waiting for master container to be allocated"); // Wait for the latch to be decremented in launchTachyonMasterContainers // TODO(andrew): Handle the case where something goes wrong and a master container never // gets allocated. See TACHYON-1410 mMasterContainerAllocatedLatch.await(); } /** * Requests containers for the workers, attempting to get containers on separate nodes. */ private void requestWorkerContainers() throws Exception { LOG.info("Requesting worker containers"); // Resource requirements for worker containers Resource workerResource = Records.newRecord(Resource.class); workerResource.setMemory(mWorkerMemInMB + mRamdiskMemInMB); workerResource.setVirtualCores(mWorkerCpu); int currentNumWorkers = mWorkerHosts.size(); int neededWorkers = mNumWorkers - currentNumWorkers; mOutstandingWorkerContainerRequestsLatch = new CountDownLatch(neededWorkers); String[] hosts; boolean relaxLocality = !mOneWorkerPerHost; if (mOneWorkerPerHost) { hosts = getUnusedWorkerHosts(); if (hosts.length < neededWorkers) { throw new RuntimeException( ExceptionMessage.YARN_NOT_ENOUGH_HOSTS.getMessage(neededWorkers, hosts.length)); } } else { hosts = null; } // Make container requests for workers to ResourceManager for (int i = currentNumWorkers; i < mNumWorkers; i ++) { // TODO(andrew): Consider partitioning the available hosts among the worker requests ContainerRequest containerAsk = new ContainerRequest(workerResource, hosts, null /* any racks */, WORKER_PRIORITY, relaxLocality); LOG.info("Making resource request for Tachyon worker {}: cpu {} memory {} MB on hosts {}", i, workerResource.getVirtualCores(), workerResource.getMemory(), hosts); mRMClient.addContainerRequest(containerAsk); } } /** * @return the hostnames in the cluster which are not being used by a Tachyon worker, returning an * empty array if there are none */ private String[] getUnusedWorkerHosts() throws Exception { List<String> unusedHosts = Lists.newArrayList(); for (String host : YarnUtils.getNodeHosts(mYarnClient)) { if (!mWorkerHosts.contains(host)) { unusedHosts.add(host); } } return unusedHosts.toArray(new String[] {}); } public void stop() { try { mRMClient.unregisterApplicationMaster(FinalApplicationStatus.SUCCEEDED, "", ""); } catch (YarnException e) { LOG.error("Failed to unregister application", e); } catch (IOException e) { LOG.error("Failed to unregister application", e); } mRMClient.stop(); // TODO(andrew): Think about whether we should stop mNMClient here mYarnClient.stop(); } private void launchTachyonMasterContainers(List<Container> containers) { if (containers.size() == 0) { LOG.warn("launchTachyonMasterContainers was called with no containers"); return; } else if (containers.size() >= 2) { // NOTE: We can remove this check if we decide to support YARN multi-master in the future LOG.warn("{} containers were allocated for the Tachyon Master. Ignoring all but one.", containers.size()); } Container container = containers.get(0); final String command = new CommandBuilder("./" + TACHYON_SETUP_SCRIPT).addArg("master") .addArg("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout") .addArg("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stderr").toString(); try { ContainerLaunchContext ctx = Records.newRecord(ContainerLaunchContext.class); ctx.setCommands(Lists.newArrayList(command)); ctx.setLocalResources(setupLocalResources(mResourcePath)); ctx.setEnvironment(setupMasterEnvironment()); LOG.info("Launching container {} for Tachyon master on {} with master command: {}", container.getId(), container.getNodeHttpAddress(), command); mNMClient.startContainer(container, ctx); String containerUri = container.getNodeHttpAddress(); // in the form of 1.2.3.4:8042 mMasterContainerNetAddress = containerUri.split(":")[0]; LOG.info("Master address: {}", mMasterContainerNetAddress); mMasterContainerAllocatedLatch.countDown(); return; } catch (Exception e) { LOG.error("Error launching container {}", container.getId(), e); } } private void launchTachyonWorkerContainers(List<Container> containers) { final String command = new CommandBuilder("./" + TACHYON_SETUP_SCRIPT).addArg("worker") .addArg("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout") .addArg("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stderr").toString(); ContainerLaunchContext ctx = Records.newRecord(ContainerLaunchContext.class); ctx.setCommands(Lists.newArrayList(command)); ctx.setLocalResources(setupLocalResources(mResourcePath)); ctx.setEnvironment(setupWorkerEnvironment(mMasterContainerNetAddress, mRamdiskMemInMB)); for (Container container : containers) { synchronized (mWorkerHosts) { if (mWorkerHosts.size() >= mNumWorkers || (mOneWorkerPerHost && mWorkerHosts.contains(container.getNodeId().getHost()))) { // 1. Yarn will sometimes offer more containers than were requested, so we ignore offers // when mWorkerHosts.size() >= mNumWorkers // 2. Avoid re-using nodes if mOneWorkerPerHost is true LOG.info("Releasing assigned container on {}", container.getNodeId().getHost()); mRMClient.releaseAssignedContainer(container.getId()); } else { try { LOG.info("Launching container {} for Tachyon worker {} on {} with worker command: {}", container.getId(), mWorkerHosts.size(), container.getNodeHttpAddress(), command); mNMClient.startContainer(container, ctx); mWorkerHosts.add(container.getNodeId().getHost()); } catch (Exception e) { LOG.error("Error launching container {}", container.getId(), e); } } mOutstandingWorkerContainerRequestsLatch.countDown(); } } } private static Map<String, LocalResource> setupLocalResources(String resourcePath) { try { Map<String, LocalResource> localResources = new HashMap<String, LocalResource>(); for (String resourceName : LOCAL_RESOURCE_NAMES) { localResources.put(resourceName, Utils.createLocalResourceOfFile(new YarnConfiguration(), resourcePath + resourceName)); } return localResources; } catch (IOException e) { throw new RuntimeException("Cannot find resource", e); } } private static Map<String, String> setupMasterEnvironment() { return setupCommonEnvironment(); } private static Map<String, String> setupWorkerEnvironment(String masterContainerNetAddress, int ramdiskMemInMB) { Map<String, String> env = setupCommonEnvironment(); env.put("TACHYON_MASTER_ADDRESS", masterContainerNetAddress); env.put("TACHYON_WORKER_MEMORY_SIZE", FormatUtils.getSizeFromBytes((long) ramdiskMemInMB * Constants.MB)); return env; } private static Map<String, String> setupCommonEnvironment() { // Setup the environment needed for the launch context. // Because our jars are available as local resources in the working directory from which // the command will be run, we need to append "." to the path. Map<String, String> env = new HashMap<String, String>(); String classPath = new StringBuilder(ApplicationConstants.Environment.CLASSPATH.$()) .append(File.pathSeparatorChar).append("./*").toString(); env.put("CLASSPATH", classPath); env.put("TACHYON_HOME", ApplicationConstants.Environment.PWD.$()); return env; } }
integration/yarn/src/main/java/tachyon/yarn/ApplicationMaster.java
/* * Licensed to the University of California, Berkeley under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package tachyon.yarn; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.Options; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.client.api.NMClient; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.Records; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ConcurrentHashMultiset; import com.google.common.collect.Lists; import com.google.common.collect.Multiset; import tachyon.Constants; import tachyon.conf.TachyonConf; import tachyon.exception.ExceptionMessage; import tachyon.util.FormatUtils; import tachyon.util.network.NetworkAddressUtils; /** * Actual owner of Tachyon running on Yarn. The YARN ResourceManager will launch this * ApplicationMaster on an allocated container. The ApplicationMaster communicates with the YARN * cluster, and handles application execution. It performs operations asynchronously. */ public final class ApplicationMaster implements AMRMClientAsync.CallbackHandler { private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE); /** Maximum number of rounds of requesting and re-requesting worker containers */ // TODO(andrew): make this configurable private static final int MAX_WORKER_CONTAINER_REQUEST_ROUNDS = 20; // Container request priorities are intra-application private static final Priority MASTER_PRIORITY = Priority.newInstance(0); // We set master and worker container request priorities to different values because // Yarn doesn't allow both relaxed locality and non-relaxed locality requests to be made // at the same priority level private static final Priority WORKER_PRIORITY = Priority.newInstance(1); // Parameters sent from Client private final int mMasterCpu; private final int mWorkerCpu; private final int mMasterMemInMB; private final int mWorkerMemInMB; private final int mRamdiskMemInMB; private final int mNumWorkers; private final String mMasterAddress; private final boolean mOneWorkerPerHost; private final String mResourcePath; /** Set of hostnames for launched workers. The implementation must be thread safe */ private final Multiset<String> mWorkerHosts; private final YarnConfiguration mYarnConf = new YarnConfiguration(); private final TachyonConf mTachyonConf = new TachyonConf(); /** The count starts at 1, then becomes 0 when we allocate a container for the Tachyon master */ private final CountDownLatch mMasterContainerAllocatedLatch; /** The count starts at 1, then becomes 0 when the application is done */ private final CountDownLatch mApplicationDoneLatch; /** Client to talk to Resource Manager */ private AMRMClientAsync<ContainerRequest> mRMClient; /** Client to talk to Node Manager */ private NMClient mNMClient; /** Client Resource Manager Service */ private YarnClient mYarnClient; /** Network address of the container allocated for Tachyon master */ private String mMasterContainerNetAddress; /** * The number of worker container requests we are waiting to hear back from. Initialized during * {@link #requestWorkerContainers()} and decremented during * {@link #launchTachyonWorkerContainers(List)}. */ private CountDownLatch mOutstandingWorkerContainerRequestsLatch = null; public ApplicationMaster(int numWorkers, String masterAddress, String resourcePath) { mMasterCpu = mTachyonConf.getInt(Constants.INTEGRATION_MASTER_RESOURCE_CPU); mMasterMemInMB = (int) mTachyonConf.getBytes(Constants.INTEGRATION_MASTER_RESOURCE_MEM) / Constants.MB; mWorkerCpu = mTachyonConf.getInt(Constants.INTEGRATION_WORKER_RESOURCE_CPU); // TODO(binfan): request worker container and ramdisk container separately // memory for running worker mWorkerMemInMB = (int) mTachyonConf.getBytes(Constants.INTEGRATION_WORKER_RESOURCE_MEM) / Constants.MB; // memory for running ramdisk mRamdiskMemInMB = (int) mTachyonConf.getBytes(Constants.WORKER_MEMORY_SIZE) / Constants.MB; mOneWorkerPerHost = mTachyonConf.getBoolean(Constants.INTEGRATION_YARN_ONE_WORKER_PER_HOST); mNumWorkers = numWorkers; mMasterAddress = masterAddress; mResourcePath = resourcePath; mWorkerHosts = ConcurrentHashMultiset.create(); mMasterContainerAllocatedLatch = new CountDownLatch(1); mApplicationDoneLatch = new CountDownLatch(1); } /** * @param args Command line arguments to launch application master */ public static void main(String[] args) { Options options = new Options(); options.addOption("num_workers", true, "Number of Tachyon workers to launch. Default 1"); options.addOption("tachyon_home", true, "Path of the home dir of Tachyon deployment on YARN slave machines"); options.addOption("master_address", true, "(Required) Address to run Tachyon master"); options.addOption("resource_path", true, "(Required) HDFS path containing the Application Master"); try { LOG.info("Starting Application Master with args {}", Arrays.toString(args)); CommandLine cliParser = new GnuParser().parse(options, args); int numWorkers = Integer.parseInt(cliParser.getOptionValue("num_workers", "1")); String masterAddress = cliParser.getOptionValue("master_address"); String resourcePath = cliParser.getOptionValue("resource_path"); ApplicationMaster applicationMaster = new ApplicationMaster(numWorkers, masterAddress, resourcePath); applicationMaster.start(); applicationMaster.requestContainers(); applicationMaster.stop(); } catch (Exception e) { LOG.error("Error running Application Master ", e); System.exit(1); } } @Override public void onContainersAllocated(List<Container> containers) { if (mMasterContainerAllocatedLatch.getCount() != 0) { launchTachyonMasterContainers(containers); } else { launchTachyonWorkerContainers(containers); } } @Override public void onContainersCompleted(List<ContainerStatus> statuses) { for (ContainerStatus status : statuses) { // Releasing worker containers because we already have workers on their host will generate a // callback to this method, so we use info instead of error. if (status.getExitStatus() == ContainerExitStatus.ABORTED) { LOG.info("Aborted container {}", status.getContainerId()); } else { LOG.error("Container {} completed with exit status {}", status.getContainerId(), status.getExitStatus()); } } } @Override public void onNodesUpdated(List<NodeReport> updated) {} @Override public void onShutdownRequest() { mApplicationDoneLatch.countDown(); } @Override public void onError(Throwable t) {} @Override public float getProgress() { return 0; } public void start() throws IOException, YarnException { // create a client to talk to NodeManager mNMClient = NMClient.createNMClient(); mNMClient.init(mYarnConf); mNMClient.start(); // Create a client to talk to the ResourceManager mRMClient = AMRMClientAsync.createAMRMClientAsync(100, this); mRMClient.init(mYarnConf); mRMClient.start(); // Create a client to talk to Yarn e.g. to find out what nodes exist in the cluster mYarnClient = YarnClient.createYarnClient(); mYarnClient.init(mYarnConf); mYarnClient.start(); // Register with ResourceManager String hostname = NetworkAddressUtils.getLocalHostName(new TachyonConf()); mRMClient.registerApplicationMaster(hostname, 0 /* port */, "" /* tracking url */); LOG.info("ApplicationMaster registered"); } public void requestContainers() throws Exception { requestMasterContainer(); // Request Tachyon worker containers until they have all been allocated. This is done in // rounds of // (1) asking for just enough worker containers to reach the desired mNumWorkers // (2) waiting for all container requests to resolve. Some containers may be rejected because // they are located on hosts which already contain workers. // // When worker container requests are made during (1), mOutstandingWorkerContainerRequestsLatch // is initialized to the number of requests made. (2) is then achieved by counting down whenever // a container is allocated, and waiting here for the number of outstanding requests to hit 0. int round = 0; while (mWorkerHosts.size() < mNumWorkers && round < MAX_WORKER_CONTAINER_REQUEST_ROUNDS) { requestWorkerContainers(); LOG.info("Waiting for {} worker containers to be allocated", mOutstandingWorkerContainerRequestsLatch.getCount()); // TODO(andrew): Handle the case where something goes wrong and some worker containers never // get allocated. See TACHYON-1410 mOutstandingWorkerContainerRequestsLatch.await(); round ++; } if (mWorkerHosts.size() < mNumWorkers) { LOG.error( "Could not request {} workers from yarn resource manager after {} tries. " + "Proceeding with {} workers", mNumWorkers, MAX_WORKER_CONTAINER_REQUEST_ROUNDS, mWorkerHosts.size()); } LOG.info("Master and workers are launched"); mApplicationDoneLatch.await(); } /** * Requests a container for the master and blocks until it is allocated in * {@link #launchTachyonMasterContainers(List)}. */ private void requestMasterContainer() throws Exception { LOG.info("Requesting master container"); // Resource requirements for master containers Resource masterResource = Records.newRecord(Resource.class); masterResource.setMemory(mMasterMemInMB); masterResource.setVirtualCores(mMasterCpu); String[] nodes = {mMasterAddress}; // Make container request for Tachyon master to ResourceManager boolean relaxLocality = true; if (!mMasterAddress.equals("localhost")) { relaxLocality = false; } ContainerRequest masterContainerAsk = new ContainerRequest(masterResource, nodes, null /* any racks */, MASTER_PRIORITY, relaxLocality); LOG.info("Making resource request for Tachyon master: cpu {} memory {} MB on node {}", masterResource.getVirtualCores(), masterResource.getMemory(), mMasterAddress); mRMClient.addContainerRequest(masterContainerAsk); LOG.info("Waiting for master container to be allocated"); // Wait for the latch to be decremented in launchTachyonMasterContainers // TODO(andrew): Handle the case where something goes wrong and a master container never // gets allocated. See TACHYON-1410 mMasterContainerAllocatedLatch.await(); } /** * Requests containers for the workers, attempting to get containers on separate nodes. */ private void requestWorkerContainers() throws Exception { LOG.info("Requesting worker containers"); // Resource requirements for worker containers Resource workerResource = Records.newRecord(Resource.class); workerResource.setMemory(mWorkerMemInMB + mRamdiskMemInMB); workerResource.setVirtualCores(mWorkerCpu); int currentNumWorkers = mWorkerHosts.size(); int neededWorkers = mNumWorkers - currentNumWorkers; mOutstandingWorkerContainerRequestsLatch = new CountDownLatch(neededWorkers); String[] hosts; boolean relaxLocality = !mOneWorkerPerHost; if (mOneWorkerPerHost) { hosts = getUnusedWorkerHosts(); if (hosts.length < neededWorkers) { throw new RuntimeException( ExceptionMessage.YARN_NOT_ENOUGH_HOSTS.getMessage(neededWorkers, hosts.length)); } } else { hosts = null; } // Make container requests for workers to ResourceManager for (int i = currentNumWorkers; i < mNumWorkers; i ++) { // TODO(andrew): Consider partitioning the available hosts among the worker requests ContainerRequest containerAsk = new ContainerRequest(workerResource, hosts, null /* any racks */, WORKER_PRIORITY, relaxLocality); LOG.info("Making resource request for Tachyon worker {}: cpu {} memory {} MB on hosts {}", i, workerResource.getVirtualCores(), workerResource.getMemory(), hosts); mRMClient.addContainerRequest(containerAsk); } } /** * @return the hostnames in the cluster which are not being used by a Tachyon worker, returning an * empty array if there are none */ private String[] getUnusedWorkerHosts() throws Exception { List<String> unusedHosts = Lists.newArrayList(); for (String host : YarnUtils.getNodeHosts(mYarnClient)) { if (!mWorkerHosts.contains(host)) { unusedHosts.add(host); } } return unusedHosts.toArray(new String[] {}); } public void stop() { try { mRMClient.unregisterApplicationMaster(FinalApplicationStatus.SUCCEEDED, "", ""); } catch (YarnException e) { LOG.error("Failed to unregister application", e); } catch (IOException e) { LOG.error("Failed to unregister application", e); } mRMClient.stop(); // TODO(andrew): Think about whether we should stop mNMClient here mYarnClient.stop(); } private void launchTachyonMasterContainers(List<Container> containers) { if (containers.size() == 0) { LOG.warn("launchTachyonMasterContainers was called with no containers"); return; } else if (containers.size() >= 2) { // NOTE: We can remove this check if we decide to support YARN multi-master in the future LOG.warn("{} containers were allocated for the Tachyon Master. Ignoring all but one.", containers.size()); } Container container = containers.get(0); final String command = new CommandBuilder("./tachyon-yarn-setup.sh").addArg("master") .addArg("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout") .addArg("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stderr").toString(); List<String> commands = Lists.newArrayList(command); try { ContainerLaunchContext ctx = Records.newRecord(ContainerLaunchContext.class); // Setup commands ctx.setCommands(commands); // Setup local resources Map<String, LocalResource> localResources = new HashMap<String, LocalResource>(); localResources.put("tachyon.tar.gz", Utils.createLocalResourceOfFile(mYarnConf, mResourcePath + "/tachyon.tar.gz")); localResources.put("tachyon-yarn-setup.sh", Utils.createLocalResourceOfFile(mYarnConf, mResourcePath + "/tachyon-yarn-setup.sh")); ctx.setLocalResources(localResources); // Setup the environment needed for the launch context. // Because our jars are available as local resources in the working directory from which // the command will be run, we need to append "." to the path. Map<String, String> env = new HashMap<String, String>(); String classPath = new StringBuilder(ApplicationConstants.Environment.CLASSPATH.$()) .append(File.pathSeparatorChar).append("./*").toString(); env.put("CLASSPATH", classPath); env.put("TACHYON_HOME", ApplicationConstants.Environment.PWD.$()); ctx.setEnvironment(env); LOG.info("Launching container {} for Tachyon master on {} with master command: {}", container.getId(), container.getNodeHttpAddress(), commands); mNMClient.startContainer(container, ctx); String containerUri = container.getNodeHttpAddress(); // in the form of 1.2.3.4:8042 mMasterContainerNetAddress = containerUri.split(":")[0]; LOG.info("Master address: {}", mMasterContainerNetAddress); mMasterContainerAllocatedLatch.countDown(); return; } catch (Exception e) { LOG.error("Error launching container {}", container.getId(), e); } } private void launchTachyonWorkerContainers(List<Container> containers) { final String command = new CommandBuilder("./tachyon-yarn-setup.sh").addArg("worker") .addArg("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout") .addArg("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stderr").toString(); List<String> commands = Lists.newArrayList(command); // Setup the environment needed for the launch context. // Because our jars are available as local resources in the working directory from which // the command will be run, we need to append "." to the path. Map<String, String> env = new HashMap<String, String>(); String classPath = new StringBuilder(ApplicationConstants.Environment.CLASSPATH.$()) .append(File.pathSeparatorChar).append("./*").toString(); env.put("CLASSPATH", classPath); env.put("TACHYON_HOME", ApplicationConstants.Environment.PWD.$()); env.put("TACHYON_MASTER_ADDRESS", mMasterContainerNetAddress); env.put("TACHYON_WORKER_MEMORY_SIZE", FormatUtils.getSizeFromBytes((long) mRamdiskMemInMB * Constants.MB)); // Setup local resources Map<String, LocalResource> localResources = new HashMap<String, LocalResource>(); try { localResources.put("tachyon.tar.gz", Utils.createLocalResourceOfFile(mYarnConf, mResourcePath + "/tachyon.tar.gz")); localResources.put("tachyon-yarn-setup.sh", Utils.createLocalResourceOfFile(mYarnConf, mResourcePath + "/tachyon-yarn-setup.sh")); } catch (IOException e) { throw new RuntimeException("Cannot find resource", e); } for (Container container : containers) { synchronized (mWorkerHosts) { if (mWorkerHosts.size() >= mNumWorkers || (mOneWorkerPerHost && mWorkerHosts.contains(container.getNodeId().getHost()))) { // 1. Yarn will sometimes offer more containers than were requested, so we ignore offers // when mWorkerHosts.size() >= mNumWorkers // 2. Avoid re-using nodes if mOneWorkerPerHost is true LOG.info("Releasing assigned container on {}", container.getNodeId().getHost()); mRMClient.releaseAssignedContainer(container.getId()); } else { try { ContainerLaunchContext ctx = Records.newRecord(ContainerLaunchContext.class); ctx.setCommands(commands); ctx.setLocalResources(localResources); ctx.setEnvironment(env); LOG.info("Launching container {} for Tachyon worker {} on {} with worker command: {}", container.getId(), mWorkerHosts.size(), container.getNodeHttpAddress(), command); mNMClient.startContainer(container, ctx); mWorkerHosts.add(container.getNodeId().getHost()); } catch (Exception e) { LOG.error("Error launching container {}", container.getId(), e); } } mOutstandingWorkerContainerRequestsLatch.countDown(); } } } }
Refactor ApplicationMaster to reduce duplication and improve consistency
integration/yarn/src/main/java/tachyon/yarn/ApplicationMaster.java
Refactor ApplicationMaster to reduce duplication and improve consistency
Java
apache-2.0
db16d9f3c7f47497b817a7eb90db89d1a3bab3f9
0
pbaris/viritin,pbaris/viritin,viritin/viritin,viritin/viritin
package org.vaadin.viritin; import java.io.Serializable; import java.util.AbstractList; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.WeakHashMap; /** * A general purpose helper class to us MTable/ListContainer for service layers * (EJBs, Spring Data etc) that provide large amount of data. Makes paged * requests to PagingProvider, caches recently used pages in memory and this way * hides away Vaadin Container complexity from you. The class generic helper and * is probably useful also other but Vaadin applications as well. * * @author Matti Tahvonen * @param <T> The type of the objects in the list */ public class LazyList<T> extends AbstractList<T> implements Serializable { private static final long serialVersionUID = 2423832460602269469L; private List<T> findPageFromCache(int pageIndexForReqest) { int p = pageIndexForReqest - pageIndex; if (p < 0) { return null; } if (pages.size() <= p) { return null; } return pages.get(p); } private void loadPreviousPage() { pageIndex--; List<T> page = findEntities(pageIndex * pageSize); pages.add(0, page); if (pages.size() > maxPages) { pages.remove(pages.size() - 1); } } private void loadNextPage() { List<T> page = findEntities((pageIndex + pages.size()) * pageSize); pages.add(page); if (pages.size() > maxPages) { pages.remove(0); pageIndex++; } } // Split into subinterfaces for better Java 8 lambda support /** * Interface via the LazyList communicates with the "backend" * * @param <T> The type of the objects in the list */ public interface PagingProvider<T> extends Serializable { /** * Fetches one "page" of entities form the backend. The amount * "maxResults" should match with the value configured for the LazyList * * @param firstRow the index of first row that should be fetched * @return a sub list from given first index */ public List<T> findEntities(int firstRow); } /** * LazyList detects the size of the "simulated" list with via this * interface. Backend call is cached as COUNT queries in databases are * commonly heavy. */ public interface CountProvider extends Serializable { /** * @return the count of entities listed in the LazyList */ public int size(); } /** * Interface via the LazyList communicates with the "backend" * * @param <T> The type of the objects in the list */ public interface EntityProvider<T> extends PagingProvider<T>, CountProvider { } private PagingProvider<T> pageProvider; private final CountProvider countProvider; // Vaadin table by default has 15 rows, 2x that to cache up an down // With this setting it is maximum of 2 requests that happens. With // normal scrolling just 0-1 per user interaction public static final int DEFAULT_PAGE_SIZE = 15 + 15 * 2; public int getMaxPages() { return maxPages; } /** * Sets the maximum of pages that are held in memory. By default 3, but it * is adjusted automatically based on requests that are made to the list, * like subList method calls. Most often this shouldn't be called by end * user. * * @param maxPages the number of pages to be held in memory */ public void setMaxPages(int maxPages) { this.maxPages = maxPages; } private int maxPages = 3; List<List<T>> pages = new ArrayList<>(); private int pageIndex = -10; private final int pageSize; protected LazyList(CountProvider countProvider, int pageSize) { this.countProvider = countProvider; this.pageSize = pageSize; } /** * Constructs a new LazyList with given provider and default page size of * DEFAULT_PAGE_SIZE (30). * * @param dataProvider the data provider that is used to fetch pages of * entities and to detect the total count of entities */ public LazyList(EntityProvider<T> dataProvider) { this(dataProvider, DEFAULT_PAGE_SIZE); } /** * Constructs a new LazyList with given provider and default page size of * DEFAULT_PAGE_SIZE (30). * * @param dataProvider the data provider that is used to fetch pages of * entities and to detect the total count of entities * @param pageSize the page size to be used */ public LazyList(EntityProvider<T> dataProvider, int pageSize) { this.pageProvider = dataProvider; this.countProvider = dataProvider; this.pageSize = pageSize; } /** * Constructs a new LazyList with given providers and default page size of * DEFAULT_PAGE_SIZE (30). * * @param pageProvider the interface via "pages" of entities are requested * @param countProvider the interface via the total count of entities is * detected. */ public LazyList(PagingProvider<T> pageProvider, CountProvider countProvider) { this(pageProvider, countProvider, DEFAULT_PAGE_SIZE); } /** * Constructs a new LazyList with given providers and page size. * * @param pageProvider the interface via "pages" of entities are requested * @param countProvider the interface via the total count of entities is * detected. * @param pageSize the page size that should be used */ public LazyList(PagingProvider<T> pageProvider, CountProvider countProvider, int pageSize) { this.pageProvider = pageProvider; this.countProvider = countProvider; this.pageSize = pageSize; } @Override public T get(final int index) { final int pageIndexForReqest = index / pageSize; final int indexOnPage = index % pageSize; // Find page from cache List<T> page = findPageFromCache(pageIndexForReqest); if (page == null) { if (pageIndex >= 0) { if (pageIndexForReqest > pageIndex && pageIndexForReqest < pageIndex + pages.size() + maxPages) { // load next n pages forward while (pageIndexForReqest >= pageIndex + pages.size()) { loadNextPage(); } } else if (pageIndexForReqest < pageIndex && pageIndexForReqest > pageIndex - maxPages) { //load prev page to cache while (pageIndexForReqest < pageIndex) { loadPreviousPage(); } } else { initCacheFormPage(pageIndexForReqest); } } else { // first page to load initCacheFormPage(pageIndexForReqest); } page = findPageFromCache(pageIndexForReqest); } return page != null ? page.get(indexOnPage) : null; } protected void initCacheFormPage(final int pageIndexForReqest) { // clear cache pageIndex = pageIndexForReqest; pages.clear(); pages.add(findEntities(pageIndex * pageSize)); } protected List<T> findEntities(int i) { return pageProvider.findEntities(i); } private Integer cachedSize; @Override public int size() { if (cachedSize == null) { cachedSize = countProvider.size(); } return cachedSize; } private transient WeakHashMap<T, Integer> indexCache; private Map<T, Integer> getIndexCache() { if (indexCache == null) { indexCache = new WeakHashMap<>(); } return indexCache; } @Override public int indexOf(Object o) { // optimize: check the buffers first Integer indexViaCache = getIndexCache().get(o); if (indexViaCache != null) { return indexViaCache; } for (int i = 0; i < pages.size(); i++) { List<T> page = pages.get(i); int indexOf = page.indexOf(o); if (indexOf != -1) { indexViaCache = (pageIndex + i) * pageSize + indexOf; } } if (indexViaCache != null) { /* * In some cases (selected value) components like Vaadin combobox calls this, then stuff from elsewhere with indexes and * finally again this method with the same object (possibly on other page). Thus, to avoid heavy iterating, * cache the location. */ getIndexCache().put((T) o, indexViaCache); return indexViaCache; } // fall back to iterating, this will most likely be sloooooow.... // If your app gets here, consider overwriting this method, and to // some optimization at service/db level return super.indexOf(o); } @Override public boolean contains(Object o) { // Although there would be the indexed version, vaadin sometimes calls this // First check caches, then fall back to sluggish iterator :-( if (getIndexCache().containsKey(o)) { return true; } for (List<T> t : pages) { if (t.contains(o)) { return true; } } return super.contains(o); } @Override public List<T> subList(int fromIndex, int toIndex) { final int sizeOfSublist = toIndex - fromIndex; if (sizeOfSublist > maxPages * (pageSize -1)) { // Increase the amount of cached pages if necessary maxPages = sizeOfSublist/pageSize + 1; } return new ArrayList<>(super.subList(fromIndex, toIndex)); } @Override public Iterator<T> iterator() { return new Iterator<T>() { private int index = -1; private final int size = size(); @Override public boolean hasNext() { return index + 1 < size; } @Override public T next() { if (!hasNext()) throw new NoSuchElementException(); index++; return get(index); } @Override public void remove() { throw new UnsupportedOperationException("Not supported."); } }; } /** * Resets buffers used by the LazyList. */ public void reset() { pages.clear(); pageIndex = -10; cachedSize = null; if (indexCache != null) { indexCache.clear(); } } }
src/main/java/org/vaadin/viritin/LazyList.java
package org.vaadin.viritin; import java.io.Serializable; import java.util.AbstractList; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.WeakHashMap; /** * A general purpose helper class to us MTable/ListContainer for service layers * (EJBs, Spring Data etc) that provide large amount of data. Makes paged * requests to PagingProvider, caches recently used pages in memory and this way * hides away Vaadin Container complexity from you. The class generic helper and * is probably useful also other but Vaadin applications as well. * * @author Matti Tahvonen * @param <T> The type of the objects in the list */ public class LazyList<T> extends AbstractList<T> implements Serializable { private static final long serialVersionUID = 2423832460602269469L; private List<T> findPageFromCache(int pageIndexForReqest) { int p = pageIndexForReqest - pageIndex; if (p < 0) { return null; } if (pages.size() <= p) { return null; } return pages.get(p); } private void loadPreviousPage() { pageIndex--; List<T> page = findEntities(pageIndex * pageSize); pages.add(0, page); if (pages.size() > maxPages) { pages.remove(pages.size() - 1); } } private void loadNextPage() { List<T> page = findEntities((pageIndex + pages.size()) * pageSize); pages.add(page); if (pages.size() > maxPages) { pages.remove(0); pageIndex++; } } // Split into subinterfaces for better Java 8 lambda support /** * Interface via the LazyList communicates with the "backend" * * @param <T> The type of the objects in the list */ public interface PagingProvider<T> extends Serializable { /** * Fetches one "page" of entities form the backend. The amount * "maxResults" should match with the value configured for the LazyList * * @param firstRow the index of first row that should be fetched * @return a sub list from given first index */ public List<T> findEntities(int firstRow); } /** * LazyList detects the size of the "simulated" list with via this * interface. Backend call is cached as COUNT queries in databases are * commonly heavy. */ public interface CountProvider extends Serializable { /** * @return the count of entities listed in the LazyList */ public int size(); } /** * Interface via the LazyList communicates with the "backend" * * @param <T> The type of the objects in the list */ public interface EntityProvider<T> extends PagingProvider<T>, CountProvider { } private PagingProvider<T> pageProvider; private final CountProvider countProvider; // Vaadin table by default has 15 rows, 2x that to cache up an down // With this setting it is maximum of 2 requests that happens. With // normal scrolling just 0-1 per user interaction public static final int DEFAULT_PAGE_SIZE = 15 + 15 * 2; public int getMaxPages() { return maxPages; } /** * Sets the maximum of pages that are held in memory. By default 3, but it * is adjusted automatically based on requests that are made to the list, * like subList method calls. Most often this shouldn't be called by end * user. * * @param maxPages the number of pages to be held in memory */ public void setMaxPages(int maxPages) { this.maxPages = maxPages; } private int maxPages = 3; List<List<T>> pages = new ArrayList<>(); private int pageIndex = -10; private final int pageSize; protected LazyList(CountProvider countProvider, int pageSize) { this.countProvider = countProvider; this.pageSize = pageSize; } /** * Constructs a new LazyList with given provider and default page size of * DEFAULT_PAGE_SIZE (30). * * @param dataProvider the data provider that is used to fetch pages of * entities and to detect the total count of entities */ public LazyList(EntityProvider<T> dataProvider) { this(dataProvider, DEFAULT_PAGE_SIZE); } /** * Constructs a new LazyList with given provider and default page size of * DEFAULT_PAGE_SIZE (30). * * @param dataProvider the data provider that is used to fetch pages of * entities and to detect the total count of entities * @param pageSize the page size to be used */ public LazyList(EntityProvider<T> dataProvider, int pageSize) { this.pageProvider = dataProvider; this.countProvider = dataProvider; this.pageSize = pageSize; } /** * Constructs a new LazyList with given providers and default page size of * DEFAULT_PAGE_SIZE (30). * * @param pageProvider the interface via "pages" of entities are requested * @param countProvider the interface via the total count of entities is * detected. */ public LazyList(PagingProvider<T> pageProvider, CountProvider countProvider) { this(pageProvider, countProvider, DEFAULT_PAGE_SIZE); } /** * Constructs a new LazyList with given providers and page size. * * @param pageProvider the interface via "pages" of entities are requested * @param countProvider the interface via the total count of entities is * detected. * @param pageSize the page size that should be used */ public LazyList(PagingProvider<T> pageProvider, CountProvider countProvider, int pageSize) { this.pageProvider = pageProvider; this.countProvider = countProvider; this.pageSize = pageSize; } @Override public T get(final int index) { final int pageIndexForReqest = index / pageSize; final int indexOnPage = index % pageSize; // Find page from cache List<T> page = findPageFromCache(pageIndexForReqest); if (page == null) { if (pageIndex >= 0) { if (pageIndexForReqest > pageIndex && pageIndexForReqest < pageIndex + pages.size() + maxPages) { // load next n pages forward while (pageIndexForReqest >= pageIndex + pages.size()) { loadNextPage(); } } else if (pageIndexForReqest < pageIndex && pageIndexForReqest > pageIndex - maxPages) { //load prev page to cache while (pageIndexForReqest < pageIndex) { loadPreviousPage(); } } else { initCacheFormPage(pageIndexForReqest); } } else { // first page to load initCacheFormPage(pageIndexForReqest); } page = findPageFromCache(pageIndexForReqest); } return page != null ? page.get(indexOnPage) : null; } protected void initCacheFormPage(final int pageIndexForReqest) { // clear cache pageIndex = pageIndexForReqest; pages.clear(); pages.add(findEntities(pageIndex * pageSize)); } protected List<T> findEntities(int i) { return pageProvider.findEntities(i); } private Integer cachedSize; @Override public int size() { if (cachedSize == null) { cachedSize = countProvider.size(); } return cachedSize; } private transient WeakHashMap<T, Integer> indexCache; private Map<T, Integer> getIndexCache() { if (indexCache == null) { indexCache = new WeakHashMap<>(); } return indexCache; } @Override public int indexOf(Object o) { // optimize: check the buffers first Integer indexViaCache = getIndexCache().get(o); if (indexViaCache != null) { return indexViaCache; } for (int i = 0; i < pages.size(); i++) { List<T> page = pages.get(i); int indexOf = page.indexOf(o); if (indexOf != -1) { indexViaCache = (pageIndex + i) * pageSize + indexOf; } } if (indexViaCache != null) { /* * In some cases (selected value) components like Vaadin combobox calls this, then stuff from elsewhere with indexes and * finally again this method with the same object (possibly on other page). Thus, to avoid heavy iterating, * cache the location. */ getIndexCache().put((T) o, indexViaCache); return indexViaCache; } // fall back to iterating, this will most likely be sloooooow.... // If your app gets here, consider overwriting this method, and to // some optimization at service/db level return super.indexOf(o); } @Override public boolean contains(Object o) { // Although there would be the indexed version, vaadin sometimes calls this // First check caches, then fall back to sluggish iterator :-( if (getIndexCache().containsKey(o)) { return true; } for (List<T> t : pages) { if (t.contains(o)) { return true; } } return super.contains(o); } @Override public List<T> subList(int fromIndex, int toIndex) { final int sizeOfSublist = toIndex - fromIndex; if (sizeOfSublist > maxPages * (pageSize -1)) { // Increase the amount of cached pages if necessary maxPages = sizeOfSublist/pageSize + 1; } return new ArrayList<>(super.subList(fromIndex, toIndex)); } @Override public Iterator<T> iterator() { return new Iterator<T>() { private int index = -1; private final int size = size(); @Override public boolean hasNext() { return index + 1 < size; } @Override public T next() { index++; return get(index); } @Override public void remove() { throw new UnsupportedOperationException("Not supported."); } }; } /** * Resets buffers used by the LazyList. */ public void reset() { pages.clear(); pageIndex = -10; cachedSize = null; if (indexCache != null) { indexCache.clear(); } } }
Throw NoSuchElementException Instead of returning null.
src/main/java/org/vaadin/viritin/LazyList.java
Throw NoSuchElementException
Java
apache-2.0
78b49e11d80df197c7bf0253c4969e18612c3db2
0
Wi5/odin-wi5-controller,schuza/odin-master,Wi5/odin-wi5-controller,lalithsuresh/odin-master,lalithsuresh/odin-master,Wi5/odin-wi5-controller,lalithsuresh/odin-master,schuza/odin-master,schuza/odin-master,Wi5/odin-wi5-controller,lalithsuresh/odin-master,Wi5/odin-wi5-controller,schuza/odin-master,schuza/odin-master
package net.floodlightcontroller.odinmaster; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.openflow.protocol.OFMessage; import org.openflow.protocol.OFType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import net.floodlightcontroller.core.FloodlightContext; import net.floodlightcontroller.core.IFloodlightProviderService; import net.floodlightcontroller.core.IOFMessageListener; import net.floodlightcontroller.core.IOFSwitch; import net.floodlightcontroller.core.IOFSwitchListener; import net.floodlightcontroller.core.module.FloodlightModuleContext; import net.floodlightcontroller.core.module.FloodlightModuleException; import net.floodlightcontroller.core.module.IFloodlightModule; import net.floodlightcontroller.core.module.IFloodlightService; import net.floodlightcontroller.odinmaster.NotificationCallback; import net.floodlightcontroller.odinmaster.OdinEventSubscription; import net.floodlightcontroller.odinmaster.NotificationCallbackContext; import net.floodlightcontroller.odinmaster.SubscriptionCallbackTuple; import net.floodlightcontroller.odinmaster.IOdinAgent; import net.floodlightcontroller.odinmaster.OdinClient; import net.floodlightcontroller.packet.DHCP; import net.floodlightcontroller.packet.Ethernet; import net.floodlightcontroller.packet.IPacket; import net.floodlightcontroller.restserver.IRestApiService; import net.floodlightcontroller.util.MACAddress; /** * OdinMaster implementation. Exposes interfaces to OdinApplications, * and keeps track of agents and clients in the system. * * @author Lalith Suresh <suresh.lalith@gmail.com> * */ public class OdinMaster implements IFloodlightModule, IOFSwitchListener, IOdinApplicationInterface, IOFMessageListener, IFloodlightService { protected static Logger log = LoggerFactory.getLogger(OdinMaster.class); protected IRestApiService restApi; private IFloodlightProviderService floodlightProvider; private final ScheduledExecutorService executor = Executors.newScheduledThreadPool(10); private final AgentManager agentManager; private final ClientManager clientManager; private final ILvapManager lvapManager; private long subscriptionId = 0; private String subscriptionList = ""; private final ConcurrentMap<Long, SubscriptionCallbackTuple> subscriptions = new ConcurrentHashMap<Long, SubscriptionCallbackTuple>(); private int idleLvapTimeout = 30; // Seconds public OdinMaster(){ clientManager = new ClientManager(); agentManager = new AgentManager(clientManager); lvapManager = new LvapManagerImpl(); } public OdinMaster(AgentManager agentManager, ClientManager clientManager, ILvapManager lvapManager){ this.agentManager = agentManager; this.clientManager = clientManager; this.lvapManager = lvapManager; } /** Odin Agent->Master protocol handlers **/ /** * Handle a ping from an agent * * @param InetAddress of the agent */ public synchronized void receivePing (final InetAddress odinAgentAddr) { if (agentManager.receivePing(odinAgentAddr)) { // if the above leads to a new agent being // tracked, push the current subscription list // to it. pushSubscriptionListToAgent(agentManager.getOdinAgents().get(odinAgentAddr)); // Reclaim idle lvaps and also attach flows to lvaps for (OdinClient client: agentManager.getOdinAgents().get(odinAgentAddr).getLvapsLocal()) { executor.schedule(new IdleLvapReclaimTask(client), idleLvapTimeout, TimeUnit.SECONDS); // Assign flow tables if (!client.getIpAddress().getHostAddress().equals("0.0.0.0")) { // Obtain reference to client entity from clientManager, because agent.getLvapsLocal() // returns a separate copy of the client objects. OdinClient trackedClient = clientManager.getClients().get(client.getMacAddress()); trackedClient.setOFMessageList(lvapManager.getDefaultOFModList(client.getIpAddress())); // Push flow messages associated with the client try { trackedClient.getOdinAgent().getSwitch().write(trackedClient.getOFMessageList(), null); } catch (IOException e) { log.error("Failed to update switch's flow tables " + trackedClient.getOdinAgent().getSwitch()); } } } } // Perform some book-keeping IOdinAgent agent = agentManager.getOdinAgents().get(odinAgentAddr); if (agent != null) { // Update last-heard for failure detection agent.setLastHeard(System.currentTimeMillis()); } } /** * Handle a probe message from an agent, triggered * by a particular client. * * @param odinAgentAddr InetAddress of agent * @param clientHwAddress MAC address of client that performed probe scan */ public synchronized void receiveProbe (final InetAddress odinAgentAddr, final MACAddress clientHwAddress) { if (odinAgentAddr != null && clientHwAddress != null && clientHwAddress.isBroadcast() == false && clientHwAddress.isMulticast() == false && agentManager.isTracked(odinAgentAddr) == true) { OdinClient oc = clientManager.getClient(clientHwAddress); // Hearing from this client for the first time if (oc == null) { oc = lvapManager.getLvapWithNullIp(clientHwAddress); clientManager.addClient(oc); } if (oc.getOdinAgent() == null) { // client is connecting for the // first time, had explicitly // disconnected, or knocked // out at as a result of an agent // failure. handoffClientToAp(clientHwAddress, odinAgentAddr); } // Update last-heard for failure detection IOdinAgent agent = agentManager.getOdinAgents().get(odinAgentAddr); if (agent != null) agent.setLastHeard(System.currentTimeMillis()); } } /** * Handle an event publication from an agent * * @param clientHwAddress client which triggered the event * @param odinAgentAddr agent at which the event was triggered * @param subscriptionIds list of subscription Ids that the event matches */ public synchronized void receivePublish (final MACAddress clientHwAddress, final InetAddress odinAgentAddr, final Map<Long, Long> subscriptionIds) { // The check for null clientHwAddress might go away // in the future if we end up having events // that are not related to clients at all. if (clientHwAddress == null || odinAgentAddr == null || subscriptionIds == null) return; IOdinAgent oa = agentManager.getOdinAgents().get(odinAgentAddr); // This should never happen! if (oa == null) return; // Update last-heard for failure detection oa.setLastHeard(System.currentTimeMillis()); for (Entry<Long, Long> entry: subscriptionIds.entrySet()) { SubscriptionCallbackTuple tup = subscriptions.get(entry.getKey()); /* This might occur as a race condition when the master * has cleared all subscriptions, but hasn't notified * the agent about it yet. */ if (tup == null) continue; NotificationCallbackContext cntx = new NotificationCallbackContext(clientHwAddress, oa, entry.getValue()); tup.cb.exec(tup.oes, cntx); } } /** Odin methods to be used by applications (from IOdinApplicationInterface) **/ /** * VAP-Handoff a client to a new AP. This operation is idempotent. * * @param newApIpAddr IPv4 address of new access point * @param hwAddrSta Ethernet address of STA to be handed off */ public void handoffClientToAp (final MACAddress clientHwAddr, final InetAddress newApIpAddr){ // As an optimisation, we probably need to get the accessing done first, // prime both nodes, and complete a handoff. if (clientHwAddr == null || newApIpAddr == null) { log.error("null argument in handoffClientToAp(): clientHwAddr:" + clientHwAddr + " newApIpAddr:" + newApIpAddr); return; } IOdinAgent newAgent = agentManager.getOdinAgents().get(newApIpAddr); // If new agent doesn't exist, ignore request if (newAgent == null) { log.error("Handoff request ignored: OdinAgent " + newApIpAddr + " doesn't exist"); return; } OdinClient client = clientManager.getClient(clientHwAddr); // Ignore request if we don't know the client if (client == null) { log.error("Handoff request ignored: OdinClient " + clientHwAddr + " doesn't exist"); return; } // If the client is connecting for the first time, then it // doesn't have a VAP associated with it already if (client.getOdinAgent() == null) { log.info ("Client: " + clientHwAddr + " connecting for first time. Assigning to: " + newAgent.getIpAddress()); // Push flow messages associated with the client try { newAgent.getSwitch().write(client.getOFMessageList(), null); } catch (IOException e) { log.error("Failed to update switch's flow tables " + newAgent.getSwitch()); } newAgent.addLvap(client); client.setOdinAgent(newAgent); executor.schedule(new IdleLvapReclaimTask (client), idleLvapTimeout, TimeUnit.SECONDS); return; } // If the client is already associated with AP-newIpAddr, we ignore // the request. InetAddress currentApIpAddress = client.getOdinAgent().getIpAddress(); if (currentApIpAddress.getHostAddress().equals(newApIpAddr.getHostAddress())) { log.info ("Client " + clientHwAddr + " is already associated with AP " + newApIpAddr); return; } // Push flow messages associated with the client try { newAgent.getSwitch().write(client.getOFMessageList(), null); } catch (IOException e) { log.error("Failed to update switch's flow tables " + newAgent.getSwitch()); } // Client is with another AP. We remove the VAP from // the current AP of the client, and spawn it on the new one. // We split the add and remove VAP operations across two threads // to make it faster. Note that there is a temporary inconsistent // state between setting the agent for the client and it actually // being reflected in the network client.setOdinAgent(newAgent); executor.execute(new OdinAgentLvapAddRunnable(newAgent, client)); executor.execute(new OdinAgentLvapRemoveRunnable(agentManager.getOdinAgents().get(currentApIpAddress), client)); } /** * Get the list of clients currently registered with Odin * * @return a map of OdinClient objects keyed by HW Addresses */ public Map<MACAddress, OdinClient> getClients () { return clientManager.getClients(); } /** * Get a list of Odin agents from the agent tracker * @return a map of OdinAgent objects keyed by Ipv4 addresses */ public Map<InetAddress, IOdinAgent> getOdinAgents (){ return agentManager.getOdinAgents(); } /** * Add a subscription for a particular event defined by oes. cb is * defines the application specified callback to be invoked during * notification. If the application plans to delete the subscription, * later, the onus is upon it to keep track of the subscription * id for removal later. * * @param oes the susbcription * @param cb the callback */ public synchronized long registerSubscription (final OdinEventSubscription oes, final NotificationCallback cb) { assert (oes != null); assert (cb != null); SubscriptionCallbackTuple tup = new SubscriptionCallbackTuple(); tup.oes = oes; tup.cb = cb; subscriptionId++; subscriptions.put(subscriptionId, tup); /** * Update the subscription list, and push to all agents * TODO: This is a common subsription string being * sent to all agents. Replace this with per-agent * subscriptions. */ subscriptionList = ""; int count = 0; for (Entry<Long, SubscriptionCallbackTuple> entry: subscriptions.entrySet()) { count++; final String addr = entry.getValue().oes.getClient(); subscriptionList = subscriptionList + entry.getKey() + " " + (addr.equals("*") ? MACAddress.valueOf("00:00:00:00:00:00") : addr) + " " + entry.getValue().oes.getStatistic() + " " + entry.getValue().oes.getRelation().ordinal() + " " + entry.getValue().oes.getValue() + " "; } subscriptionList = String.valueOf(count) + " " + subscriptionList; /** * Should probably have threads to do this */ for (Entry<InetAddress, IOdinAgent> entry : getOdinAgents().entrySet()) { pushSubscriptionListToAgent(entry.getValue()); } return subscriptionId; } /** * Remove a subscription from the list * * @param id subscription id to remove * @return */ public synchronized void unregisterSubscription (final long id) { subscriptions.remove(id); subscriptionList = ""; int count = 0; for (Entry<Long, SubscriptionCallbackTuple> entry: subscriptions.entrySet()) { count++; final String addr = entry.getValue().oes.getClient(); subscriptionList = subscriptionList + entry.getKey() + " " + (addr.equals("*") ? MACAddress.valueOf("00:00:00:00:00:00") : addr) + " " + entry.getValue().oes.getStatistic() + " " + entry.getValue().oes.getRelation().ordinal() + " " + entry.getValue().oes.getValue() + " "; } subscriptionList = String.valueOf(count) + " " + subscriptionList; /** * Should probably have threads to do this */ for (Entry<InetAddress, IOdinAgent> entry : getOdinAgents().entrySet()) { pushSubscriptionListToAgent(entry.getValue()); } } /** IFloodlightModule methods **/ @Override public Collection<Class<? extends IFloodlightService>> getModuleDependencies() { Collection<Class<? extends IFloodlightService>> l = new ArrayList<Class<? extends IFloodlightService>>(); l.add(IFloodlightProviderService.class); l.add(IRestApiService.class); return l; } @Override public Collection<Class<? extends IFloodlightService>> getModuleServices() { return null; } @Override public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() { Map<Class<? extends IFloodlightService>, IFloodlightService> m = new HashMap<Class<? extends IFloodlightService>, IFloodlightService>(); m.put(OdinMaster.class, this); return m; } @Override public void init(FloodlightModuleContext context) throws FloodlightModuleException { floodlightProvider = context.getServiceImpl(IFloodlightProviderService.class); restApi = context.getServiceImpl(IRestApiService.class); } @Override public void startUp(FloodlightModuleContext context) { floodlightProvider.addOFSwitchListener(this); floodlightProvider.addOFMessageListener(OFType.PACKET_IN, this); restApi.addRestletRoutable(new OdinMasterWebRoutable()); agentManager.setFloodlightProvider (floodlightProvider); // read config options Map<String, String> configOptions = context.getConfigParams(this); String authFile = "odin_authorisation"; // default String authFileConfig = configOptions.get("authFile"); if (authFileConfig != null) { authFile = authFileConfig; } try { BufferedReader br = new BufferedReader (new FileReader(authFile)); String strLine; while ((strLine = br.readLine()) != null) { String [] fields = strLine.split(" "); MACAddress hwAddress = MACAddress.valueOf(fields[0]); InetAddress ipaddr = InetAddress.getByName(fields[1]); MACAddress bssid = MACAddress.valueOf(fields[2]); String essid = fields[3]; log.info("Adding client: " + fields[0] + " " + fields[1] + " " +fields[2] + " " +fields[3]); clientManager.addClient(hwAddress, ipaddr, bssid, essid); clientManager.getClients().get(hwAddress).setOFMessageList(lvapManager.getDefaultOFModList(ipaddr)); } } catch (FileNotFoundException e) { // skip } catch (IOException e) { e.printStackTrace(); } String timeoutStr = configOptions.get("idleLvapTimeout"); if (timeoutStr != null) { int timeout = Integer.parseInt(timeoutStr); if (timeout > 0) { idleLvapTimeout = timeout; } } int port = 2819; // default String portNum = configOptions.get("masterPort"); if (portNum != null) { port = Integer.parseInt(portNum); } // Spawn threads for different services executor.execute(new OdinAgentProtocolServer(this, port)); // Spawn applications String applicationStr = configOptions.get("applications"); if (applicationStr == null){ log.info("Configuration file doesn't specify any applications to load"); return; } String [] applicationList = applicationStr.split(","); for (String app : applicationList) { try { OdinApplication appInstance = (OdinApplication) Class.forName(app).newInstance(); appInstance.setOdinInterface(this); executor.execute(appInstance); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (ClassNotFoundException e) { e.printStackTrace(); } } } /** IOFSwitchListener methods **/ @Override public void addedSwitch(IOFSwitch sw) { // inform-agent manager } @Override public String getName() { return "OdinMaster"; } @Override public void removedSwitch(IOFSwitch sw) { // Not all OF switches are Odin agents. We should immediately remove // any associated Odin agent then. final InetAddress switchIpAddr = ((InetSocketAddress) sw.getChannel().getRemoteAddress()).getAddress(); agentManager.getOdinAgents().remove(switchIpAddr); } /** * Push the subscription list to the agent * * @param oa agent to push subscription list to */ private void pushSubscriptionListToAgent (final IOdinAgent oa) { oa.setSubscriptions(subscriptionList); } private class OdinAgentLvapAddRunnable implements Runnable { final IOdinAgent oa; final OdinClient oc; public OdinAgentLvapAddRunnable(IOdinAgent newAgent, OdinClient oc) { this.oa = newAgent; this.oc = oc; } @Override public void run() { oa.addLvap(oc); } } private class OdinAgentLvapRemoveRunnable implements Runnable { final IOdinAgent oa; final OdinClient oc; public OdinAgentLvapRemoveRunnable(IOdinAgent oa, OdinClient oc) { this.oa = oa; this.oc = oc; } @Override public void run() { oa.removeLvap(oc); } } @Override public Command receive( IOFSwitch sw, OFMessage msg, FloodlightContext cntx) { // We use this to pick up DHCP response frames // and update a client's IP address details accordingly Ethernet frame = IFloodlightProviderService.bcStore.get(cntx, IFloodlightProviderService.CONTEXT_PI_PAYLOAD); IPacket payload = frame.getPayload(); // IP if (payload == null) return Command.CONTINUE; IPacket p2 = payload.getPayload(); // TCP or UDP if (p2 == null) return Command.CONTINUE; IPacket p3 = p2.getPayload(); // Application if ((p3 != null) && (p3 instanceof DHCP)) { DHCP packet = (DHCP) p3; try { final MACAddress clientHwAddr = MACAddress.valueOf(packet.getClientHardwareAddress()); final OdinClient oc = clientManager.getClients().get(clientHwAddr); // Don't bother if we're not tracking the client // or if the client is unassociated with the agent // or the agent's switch hasn't been registered yet if (oc == null || oc.getOdinAgent() == null || oc.getOdinAgent().getSwitch() == null) { return Command.CONTINUE; } // Look for the Your-IP field in the DHCP packet if (packet.getYourIPAddress() != 0) { // int -> byte array -> InetAddr final byte[] arr = ByteBuffer.allocate(4).putInt(packet.getYourIPAddress()).array(); final InetAddress yourIp = InetAddress.getByAddress(arr); // No need to invoke agent update protocol if the node // is assigned the same IP if (yourIp.equals(oc.getIpAddress())) { return Command.CONTINUE; } log.info("Updating client: " + clientHwAddr + " with ipAddr: " + yourIp); oc.setIpAddress(yourIp); oc.setOFMessageList(lvapManager.getDefaultOFModList(yourIp)); // Push flow messages associated with the client try { oc.getOdinAgent().getSwitch().write(oc.getOFMessageList(), null); } catch (IOException e) { log.error("Failed to update switch's flow tables " + oc.getOdinAgent().getSwitch()); } oc.getOdinAgent().updateLvap(oc); } } catch (UnknownHostException e) { // Shouldn't ever happen e.printStackTrace(); } } return Command.CONTINUE; } @Override public boolean isCallbackOrderingPostreq(OFType type, String name) { return false; } @Override public boolean isCallbackOrderingPrereq(OFType type, String name) { return false; } private class IdleLvapReclaimTask implements Runnable { private final OdinClient oc; public IdleLvapReclaimTask(final OdinClient oc) { this.oc = oc; } @Override public void run() { OdinClient client = clientManager.getClients().get(oc.getMacAddress()); if (client == null) { return; } // Client didn't follow through to connect try { if (client.getIpAddress().equals(InetAddress.getByName("0.0.0.0"))) { IOdinAgent agent = client.getOdinAgent(); if (agent != null) { log.info("Clearing Lvap " + client.getMacAddress() + " from agent:" + agent.getIpAddress() + " due to inactivity"); agent.removeLvap(client); clientManager.removeClient(client.getMacAddress()); } } } catch (UnknownHostException e) { // skip } } } }
src/main/java/net/floodlightcontroller/odinmaster/OdinMaster.java
package net.floodlightcontroller.odinmaster; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.openflow.protocol.OFMessage; import org.openflow.protocol.OFType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import net.floodlightcontroller.core.FloodlightContext; import net.floodlightcontroller.core.IFloodlightProviderService; import net.floodlightcontroller.core.IOFMessageListener; import net.floodlightcontroller.core.IOFSwitch; import net.floodlightcontroller.core.IOFSwitchListener; import net.floodlightcontroller.core.module.FloodlightModuleContext; import net.floodlightcontroller.core.module.FloodlightModuleException; import net.floodlightcontroller.core.module.IFloodlightModule; import net.floodlightcontroller.core.module.IFloodlightService; import net.floodlightcontroller.odinmaster.NotificationCallback; import net.floodlightcontroller.odinmaster.OdinEventSubscription; import net.floodlightcontroller.odinmaster.NotificationCallbackContext; import net.floodlightcontroller.odinmaster.SubscriptionCallbackTuple; import net.floodlightcontroller.odinmaster.IOdinAgent; import net.floodlightcontroller.odinmaster.OdinClient; import net.floodlightcontroller.packet.DHCP; import net.floodlightcontroller.packet.Ethernet; import net.floodlightcontroller.packet.IPacket; import net.floodlightcontroller.restserver.IRestApiService; import net.floodlightcontroller.util.MACAddress; /** * OdinMaster implementation. Exposes interfaces to OdinApplications, * and keeps track of agents and clients in the system. * * @author Lalith Suresh <suresh.lalith@gmail.com> * */ public class OdinMaster implements IFloodlightModule, IOFSwitchListener, IOdinApplicationInterface, IOFMessageListener, IFloodlightService { protected static Logger log = LoggerFactory.getLogger(OdinMaster.class); protected IRestApiService restApi; private IFloodlightProviderService floodlightProvider; private final ScheduledExecutorService executor = Executors.newScheduledThreadPool(10); private final AgentManager agentManager; private final ClientManager clientManager; private final ILvapManager lvapManager; private long subscriptionId = 0; private String subscriptionList = ""; private final ConcurrentMap<Long, SubscriptionCallbackTuple> subscriptions = new ConcurrentHashMap<Long, SubscriptionCallbackTuple>(); private int idleLvapTimeout = 30; // Seconds public OdinMaster(){ clientManager = new ClientManager(); agentManager = new AgentManager(clientManager); lvapManager = new LvapManagerImpl(); } public OdinMaster(AgentManager agentManager, ClientManager clientManager, ILvapManager lvapManager){ this.agentManager = agentManager; this.clientManager = clientManager; this.lvapManager = lvapManager; } /** Odin Agent->Master protocol handlers **/ /** * Handle a ping from an agent * * @param InetAddress of the agent */ public synchronized void receivePing (final InetAddress odinAgentAddr) { if (agentManager.receivePing(odinAgentAddr)) { // if the above leads to a new agent being // tracked, push the current subscription list // to it. pushSubscriptionListToAgent(agentManager.getOdinAgents().get(odinAgentAddr)); // Reclaim idle lvaps for (OdinClient client: agentManager.getOdinAgents().get(odinAgentAddr).getLvapsLocal()) { executor.schedule(new IdleLvapReclaimTask(client), idleLvapTimeout, TimeUnit.SECONDS); } } // Perform some book-keeping IOdinAgent agent = agentManager.getOdinAgents().get(odinAgentAddr); if (agent != null) { // Update last-heard for failure detection agent.setLastHeard(System.currentTimeMillis()); } } /** * Handle a probe message from an agent, triggered * by a particular client. * * @param odinAgentAddr InetAddress of agent * @param clientHwAddress MAC address of client that performed probe scan */ public synchronized void receiveProbe (final InetAddress odinAgentAddr, final MACAddress clientHwAddress) { if (odinAgentAddr != null && clientHwAddress != null && clientHwAddress.isBroadcast() == false && clientHwAddress.isMulticast() == false && agentManager.isTracked(odinAgentAddr) == true) { OdinClient oc = clientManager.getClient(clientHwAddress); // Hearing from this client for the first time if (oc == null) { oc = lvapManager.getLvapWithNullIp(clientHwAddress); clientManager.addClient(oc); } if (oc.getOdinAgent() == null) { // client is connecting for the // first time, had explicitly // disconnected, or knocked // out at as a result of an agent // failure. handoffClientToAp(clientHwAddress, odinAgentAddr); } // Update last-heard for failure detection IOdinAgent agent = agentManager.getOdinAgents().get(odinAgentAddr); if (agent != null) agent.setLastHeard(System.currentTimeMillis()); } } /** * Handle an event publication from an agent * * @param clientHwAddress client which triggered the event * @param odinAgentAddr agent at which the event was triggered * @param subscriptionIds list of subscription Ids that the event matches */ public synchronized void receivePublish (final MACAddress clientHwAddress, final InetAddress odinAgentAddr, final Map<Long, Long> subscriptionIds) { // The check for null clientHwAddress might go away // in the future if we end up having events // that are not related to clients at all. if (clientHwAddress == null || odinAgentAddr == null || subscriptionIds == null) return; IOdinAgent oa = agentManager.getOdinAgents().get(odinAgentAddr); // This should never happen! if (oa == null) return; // Update last-heard for failure detection oa.setLastHeard(System.currentTimeMillis()); for (Entry<Long, Long> entry: subscriptionIds.entrySet()) { SubscriptionCallbackTuple tup = subscriptions.get(entry.getKey()); /* This might occur as a race condition when the master * has cleared all subscriptions, but hasn't notified * the agent about it yet. */ if (tup == null) continue; NotificationCallbackContext cntx = new NotificationCallbackContext(clientHwAddress, oa, entry.getValue()); tup.cb.exec(tup.oes, cntx); } } /** Odin methods to be used by applications (from IOdinApplicationInterface) **/ /** * VAP-Handoff a client to a new AP. This operation is idempotent. * * @param newApIpAddr IPv4 address of new access point * @param hwAddrSta Ethernet address of STA to be handed off */ public void handoffClientToAp (final MACAddress clientHwAddr, final InetAddress newApIpAddr){ // As an optimisation, we probably need to get the accessing done first, // prime both nodes, and complete a handoff. if (clientHwAddr == null || newApIpAddr == null) { log.error("null argument in handoffClientToAp(): clientHwAddr:" + clientHwAddr + " newApIpAddr:" + newApIpAddr); return; } IOdinAgent newAgent = agentManager.getOdinAgents().get(newApIpAddr); // If new agent doesn't exist, ignore request if (newAgent == null) { log.error("Handoff request ignored: OdinAgent " + newApIpAddr + " doesn't exist"); return; } OdinClient client = clientManager.getClient(clientHwAddr); // Ignore request if we don't know the client if (client == null) { log.error("Handoff request ignored: OdinClient " + clientHwAddr + " doesn't exist"); return; } // If the client is connecting for the first time, then it // doesn't have a VAP associated with it already if (client.getOdinAgent() == null) { log.info ("Client: " + clientHwAddr + " connecting for first time. Assigning to: " + newAgent.getIpAddress()); // Push flow messages associated with the client try { newAgent.getSwitch().write(client.getOFMessageList(), null); } catch (IOException e) { log.error("Failed to update switch's flow tables " + newAgent.getSwitch()); } newAgent.addLvap(client); client.setOdinAgent(newAgent); executor.schedule(new IdleLvapReclaimTask (client), idleLvapTimeout, TimeUnit.SECONDS); return; } // If the client is already associated with AP-newIpAddr, we ignore // the request. InetAddress currentApIpAddress = client.getOdinAgent().getIpAddress(); if (currentApIpAddress.getHostAddress().equals(newApIpAddr.getHostAddress())) { log.info ("Client " + clientHwAddr + " is already associated with AP " + newApIpAddr); return; } // Push flow messages associated with the client try { newAgent.getSwitch().write(client.getOFMessageList(), null); } catch (IOException e) { log.error("Failed to update switch's flow tables " + newAgent.getSwitch()); } // Client is with another AP. We remove the VAP from // the current AP of the client, and spawn it on the new one. // We split the add and remove VAP operations across two threads // to make it faster. Note that there is a temporary inconsistent // state between setting the agent for the client and it actually // being reflected in the network client.setOdinAgent(newAgent); executor.execute(new OdinAgentLvapAddRunnable(newAgent, client)); executor.execute(new OdinAgentLvapRemoveRunnable(agentManager.getOdinAgents().get(currentApIpAddress), client)); } /** * Get the list of clients currently registered with Odin * * @return a map of OdinClient objects keyed by HW Addresses */ public Map<MACAddress, OdinClient> getClients () { return clientManager.getClients(); } /** * Get a list of Odin agents from the agent tracker * @return a map of OdinAgent objects keyed by Ipv4 addresses */ public Map<InetAddress, IOdinAgent> getOdinAgents (){ return agentManager.getOdinAgents(); } /** * Add a subscription for a particular event defined by oes. cb is * defines the application specified callback to be invoked during * notification. If the application plans to delete the subscription, * later, the onus is upon it to keep track of the subscription * id for removal later. * * @param oes the susbcription * @param cb the callback */ public synchronized long registerSubscription (final OdinEventSubscription oes, final NotificationCallback cb) { assert (oes != null); assert (cb != null); SubscriptionCallbackTuple tup = new SubscriptionCallbackTuple(); tup.oes = oes; tup.cb = cb; subscriptionId++; subscriptions.put(subscriptionId, tup); /** * Update the subscription list, and push to all agents * TODO: This is a common subsription string being * sent to all agents. Replace this with per-agent * subscriptions. */ subscriptionList = ""; int count = 0; for (Entry<Long, SubscriptionCallbackTuple> entry: subscriptions.entrySet()) { count++; final String addr = entry.getValue().oes.getClient(); subscriptionList = subscriptionList + entry.getKey() + " " + (addr.equals("*") ? MACAddress.valueOf("00:00:00:00:00:00") : addr) + " " + entry.getValue().oes.getStatistic() + " " + entry.getValue().oes.getRelation().ordinal() + " " + entry.getValue().oes.getValue() + " "; } subscriptionList = String.valueOf(count) + " " + subscriptionList; /** * Should probably have threads to do this */ for (Entry<InetAddress, IOdinAgent> entry : getOdinAgents().entrySet()) { pushSubscriptionListToAgent(entry.getValue()); } return subscriptionId; } /** * Remove a subscription from the list * * @param id subscription id to remove * @return */ public synchronized void unregisterSubscription (final long id) { subscriptions.remove(id); subscriptionList = ""; int count = 0; for (Entry<Long, SubscriptionCallbackTuple> entry: subscriptions.entrySet()) { count++; final String addr = entry.getValue().oes.getClient(); subscriptionList = subscriptionList + entry.getKey() + " " + (addr.equals("*") ? MACAddress.valueOf("00:00:00:00:00:00") : addr) + " " + entry.getValue().oes.getStatistic() + " " + entry.getValue().oes.getRelation().ordinal() + " " + entry.getValue().oes.getValue() + " "; } subscriptionList = String.valueOf(count) + " " + subscriptionList; /** * Should probably have threads to do this */ for (Entry<InetAddress, IOdinAgent> entry : getOdinAgents().entrySet()) { pushSubscriptionListToAgent(entry.getValue()); } } /** IFloodlightModule methods **/ @Override public Collection<Class<? extends IFloodlightService>> getModuleDependencies() { Collection<Class<? extends IFloodlightService>> l = new ArrayList<Class<? extends IFloodlightService>>(); l.add(IFloodlightProviderService.class); l.add(IRestApiService.class); return l; } @Override public Collection<Class<? extends IFloodlightService>> getModuleServices() { return null; } @Override public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() { Map<Class<? extends IFloodlightService>, IFloodlightService> m = new HashMap<Class<? extends IFloodlightService>, IFloodlightService>(); m.put(OdinMaster.class, this); return m; } @Override public void init(FloodlightModuleContext context) throws FloodlightModuleException { floodlightProvider = context.getServiceImpl(IFloodlightProviderService.class); restApi = context.getServiceImpl(IRestApiService.class); } @Override public void startUp(FloodlightModuleContext context) { floodlightProvider.addOFSwitchListener(this); floodlightProvider.addOFMessageListener(OFType.PACKET_IN, this); restApi.addRestletRoutable(new OdinMasterWebRoutable()); agentManager.setFloodlightProvider (floodlightProvider); // read config options Map<String, String> configOptions = context.getConfigParams(this); String authFile = "odin_authorisation"; // default String authFileConfig = configOptions.get("authFile"); if (authFileConfig != null) { authFile = authFileConfig; } try { BufferedReader br = new BufferedReader (new FileReader(authFile)); String strLine; while ((strLine = br.readLine()) != null) { String [] fields = strLine.split(" "); MACAddress hwAddress = MACAddress.valueOf(fields[0]); InetAddress ipaddr = InetAddress.getByName(fields[1]); MACAddress bssid = MACAddress.valueOf(fields[2]); String essid = fields[3]; log.info("Adding client: " + fields[0] + " " + fields[1] + " " +fields[2] + " " +fields[3]); clientManager.addClient(hwAddress, ipaddr, bssid, essid); clientManager.getClients().get(hwAddress).setOFMessageList(lvapManager.getDefaultOFModList(ipaddr)); } } catch (FileNotFoundException e) { // skip } catch (IOException e) { e.printStackTrace(); } String timeoutStr = configOptions.get("idleLvapTimeout"); if (timeoutStr != null) { int timeout = Integer.parseInt(timeoutStr); if (timeout > 0) { idleLvapTimeout = timeout; } } int port = 2819; // default String portNum = configOptions.get("masterPort"); if (portNum != null) { port = Integer.parseInt(portNum); } // Spawn threads for different services executor.execute(new OdinAgentProtocolServer(this, port)); // Spawn applications String applicationStr = configOptions.get("applications"); if (applicationStr == null){ log.info("Configuration file doesn't specify any applications to load"); return; } String [] applicationList = applicationStr.split(","); for (String app : applicationList) { try { OdinApplication appInstance = (OdinApplication) Class.forName(app).newInstance(); appInstance.setOdinInterface(this); executor.execute(appInstance); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (ClassNotFoundException e) { e.printStackTrace(); } } } /** IOFSwitchListener methods **/ @Override public void addedSwitch(IOFSwitch sw) { // inform-agent manager } @Override public String getName() { return "OdinMaster"; } @Override public void removedSwitch(IOFSwitch sw) { // Not all OF switches are Odin agents. We should immediately remove // any associated Odin agent then. final InetAddress switchIpAddr = ((InetSocketAddress) sw.getChannel().getRemoteAddress()).getAddress(); agentManager.getOdinAgents().remove(switchIpAddr); } /** * Push the subscription list to the agent * * @param oa agent to push subscription list to */ private void pushSubscriptionListToAgent (final IOdinAgent oa) { oa.setSubscriptions(subscriptionList); } private class OdinAgentLvapAddRunnable implements Runnable { final IOdinAgent oa; final OdinClient oc; public OdinAgentLvapAddRunnable(IOdinAgent newAgent, OdinClient oc) { this.oa = newAgent; this.oc = oc; } @Override public void run() { oa.addLvap(oc); } } private class OdinAgentLvapRemoveRunnable implements Runnable { final IOdinAgent oa; final OdinClient oc; public OdinAgentLvapRemoveRunnable(IOdinAgent oa, OdinClient oc) { this.oa = oa; this.oc = oc; } @Override public void run() { oa.removeLvap(oc); } } @Override public Command receive( IOFSwitch sw, OFMessage msg, FloodlightContext cntx) { // We use this to pick up DHCP response frames // and update a client's IP address details accordingly Ethernet frame = IFloodlightProviderService.bcStore.get(cntx, IFloodlightProviderService.CONTEXT_PI_PAYLOAD); IPacket payload = frame.getPayload(); // IP if (payload == null) return Command.CONTINUE; IPacket p2 = payload.getPayload(); // TCP or UDP if (p2 == null) return Command.CONTINUE; IPacket p3 = p2.getPayload(); // Application if ((p3 != null) && (p3 instanceof DHCP)) { DHCP packet = (DHCP) p3; try { final MACAddress clientHwAddr = MACAddress.valueOf(packet.getClientHardwareAddress()); final OdinClient oc = clientManager.getClients().get(clientHwAddr); // Don't bother if we're not tracking the client // or if the client is unassociated with the agent // or the agent's switch hasn't been registered yet if (oc == null || oc.getOdinAgent() == null || oc.getOdinAgent().getSwitch() == null) { return Command.CONTINUE; } // Look for the Your-IP field in the DHCP packet if (packet.getYourIPAddress() != 0) { // int -> byte array -> InetAddr final byte[] arr = ByteBuffer.allocate(4).putInt(packet.getYourIPAddress()).array(); final InetAddress yourIp = InetAddress.getByAddress(arr); // No need to invoke agent update protocol if the node // is assigned the same IP if (yourIp.equals(oc.getIpAddress())) { return Command.CONTINUE; } log.info("Updating client: " + clientHwAddr + " with ipAddr: " + yourIp); oc.setIpAddress(yourIp); oc.setOFMessageList(lvapManager.getDefaultOFModList(yourIp)); // Push flow messages associated with the client try { oc.getOdinAgent().getSwitch().write(oc.getOFMessageList(), null); } catch (IOException e) { log.error("Failed to update switch's flow tables " + oc.getOdinAgent().getSwitch()); } oc.getOdinAgent().updateLvap(oc); } } catch (UnknownHostException e) { // Shouldn't ever happen e.printStackTrace(); } } return Command.CONTINUE; } @Override public boolean isCallbackOrderingPostreq(OFType type, String name) { return false; } @Override public boolean isCallbackOrderingPrereq(OFType type, String name) { return false; } private class IdleLvapReclaimTask implements Runnable { private final OdinClient oc; public IdleLvapReclaimTask(final OdinClient oc) { this.oc = oc; } @Override public void run() { OdinClient client = clientManager.getClients().get(oc.getMacAddress()); if (client == null) { return; } // Client didn't follow through to connect try { if (client.getIpAddress().equals(InetAddress.getByName("0.0.0.0"))) { IOdinAgent agent = client.getOdinAgent(); if (agent != null) { log.info("Clearing Lvap " + client.getMacAddress() + " from agent:" + agent.getIpAddress() + " due to inactivity"); agent.removeLvap(client); clientManager.removeClient(client.getMacAddress()); } } } catch (UnknownHostException e) { // skip } } } }
Attach flows to LVAPs that are added upon post-failure agent-sync
src/main/java/net/floodlightcontroller/odinmaster/OdinMaster.java
Attach flows to LVAPs that are added upon post-failure agent-sync
Java
apache-2.0
c6da7687696a81dca8dfb013b67b6c60db77c509
0
LucidWorks/hadoop-solr
package com.lucidworks.hadoop.ingest; import com.google.common.base.Charsets; import com.google.common.io.Files; import com.lucidworks.hadoop.io.LWMapRedOutputFormat; import com.lucidworks.hadoop.utils.IngestJobMockMapRedOutFormat; import com.lucidworks.hadoop.utils.JobArgs; import com.lucidworks.hadoop.utils.MockRecordWriter; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.ToolRunner; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import java.io.File; import java.util.Arrays; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * * **/ public class IngestJobTest extends IngestJobInit { @Test public void testCSV() throws Exception { Path input = new Path(tempDir, "foo.csv"); StringBuilder buffer = new StringBuilder("id,bar,junk,zen,hockey"); buffer.append(lineSep).append("id-1, The quick brown fox, jumped, " + "head, gretzky, extra").append(lineSep) .append("id-2, The quick red fox, kicked, head," + " gretzky"); addContentToFS(input, buffer.toString()); String jobName = "testCsv"; String[] args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()) .withConf("csvFieldMapping[0=id,1=bar, 2=junk , 3 = zen ,4 = hockey];idField[id];" + "csvFirstLineComment[true]") .getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 2, null); jobName = "testCsv2"; args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()) .withConf("csvFieldMapping[0=id,1=bar, 2=junk , 3 = zen ,4 = hockey];idField[id]").getJobArgs(); ToolRunner.run(conf, new IngestJob(), args); verifyJob(jobName, 3, null); jobName = "testCsvFieldId"; // id Field is the the field called "junk" args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()) .withConf("csvFieldMapping[0=bar, 1=id, 2=junk , 3 = zen ,4 = hockey];idField[junk]") .getJobArgs(); ToolRunner.run(conf, new IngestJob(), args); verifyJob(jobName, 3, null); } @Test public void testDirectoy() throws Exception { String dir = "dir" + File.separator + "docs"; File dirFile = new File(ClassLoader.getSystemClassLoader().getResource(dir).getPath()); assertTrue(dir + " does not exist: " + dirFile.getAbsolutePath(), dirFile.exists()); Path input = new Path(tempDir, dir); // Upload each file to fs for (File file : dirFile.listFiles()) { if (!file.isDirectory()) { Path filePath = new Path(input, file.getName()); addContentToFS(filePath, Files.toByteArray(file)); } } String jobName = "testDirectoy"; String[] args = new JobArgs().withJobName(jobName).withClassname(DirectoryIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString() + "/*").getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); // verifyJob MockRecordWriter writer = IngestJobMockMapRedOutFormat.writers.get(jobName); assertNotNull(writer); assertEquals(7, writer.map.size()); } @Test public void testZip() throws Exception { String zip = "zip/zipData.zip"; File zipFile = new File(ClassLoader.getSystemClassLoader().getResource(zip).getPath()); assertTrue(zip + " does not exist: " + zipFile.getAbsolutePath(), zipFile.exists()); Path input = new Path(tempDir, zip); addContentToFS(input, Files.toByteArray(zipFile)); String jobName = "testZip"; String[] args = new JobArgs().withJobName(jobName).withClassname(ZipIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 6, new String[]{"test0.pdf", "test1.doc", "test0.doc", "test3.pdf", "test2.pdf", "test1.pdf"}); } @Test public void testCSVLWS592() throws Exception { String csv = "csv/LWS592.csv"; File csvFile = new File(ClassLoader.getSystemClassLoader().getResource(csv).getPath()); assertTrue(csv + " does not exist: " + csvFile.getAbsolutePath(), csvFile.exists()); Path input = new Path(tempDir, csv); addContentToFS(input, Files.toString(csvFile, Charsets.UTF_8)); String jobName = "testCSVLWS592"; String[] args = new JobArgs().withJobName(jobName) .withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()) .withConf("csvFieldMapping[0=id,1=name_s,2=place_s];" + "csvFirstLineComment[false]") .withDArgs("-DcsvDelimiter=\u0001") .getJobArgs(); System.err.println(Arrays.toString(args)); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 2, new String[]{"2"}); } @Test public void testCSVquoteswithCircumflex() throws Exception { String csv = "csv/quotes_with_circumflex.csv"; File csvFile = new File(ClassLoader.getSystemClassLoader().getResource(csv).getPath()); assertTrue(csv + " does not exist: " + csvFile.getAbsolutePath(), csvFile.exists()); Path input = new Path(tempDir, csv); addContentToFS(input, Files.toString(csvFile, Charsets.UTF_8)); String jobName = "testCSVquoteswithCircumflex"; String[] args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()) .withConf("csvDelimiter[^];csvFieldMapping[0=id,1=name_s];csvFirstLineComment[false]") .getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); // TODO: (ha"rry) the quotes is messing the field validator. verifyJob(jobName, 2, new String[]{"2"}); } @Test public void testWarc() throws Exception { String warc = "warc/at.warc"; File warcFile = new File(ClassLoader.getSystemClassLoader().getResource(warc).getPath()); assertTrue(warc + " does not exist: " + warcFile.getAbsolutePath(), warcFile.exists()); Path input = new Path(tempDir, warc); addContentToFS(input, Files.toString(warcFile, Charsets.UTF_8)); String jobName = "testWarc"; String[] args = new JobArgs().withJobName(jobName).withClassname(WarcIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 3, new String[]{"<urn:uuid:b328f1fe-b2ee-45c0-9139-908850810b52>", "<urn:uuid:6ee9accb-a284-47ef-8785-ed28aee2f79e>"}, "warc.WARC-Target-URI", "warc.WARC-Warcinfo-ID"); } @Test public void testSolrXML() throws Exception { String solr = "sequence" + File.separator + "frankenstein_text_solr.seq"; File solrFile = new File(ClassLoader.getSystemClassLoader().getResource(solr).getPath()); assertTrue(solr + " does not exist: " + solrFile.getAbsolutePath(), solrFile.exists()); Path input = new Path(tempDir, solr); addContentToFS(input, Files.toByteArray(solrFile)); String jobName = "testSolrXML"; String[] args = new JobArgs().withJobName(jobName).withClassname(SolrXMLIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 776, new String[]{"solr_521", "solr_137", "solr_519"}, "body"); } @Test public void testSequenceFile() throws Exception { String seq = "sequence" + File.separator + "frankenstein_text_text.seq"; File seqFile = new File(ClassLoader.getSystemClassLoader().getResource(seq).getPath()); assertTrue(seq + " does not exist: " + seqFile.getAbsolutePath(), seqFile.exists()); Path input = new Path(tempDir, seq); addContentToFS(input, Files.toByteArray(seqFile)); String jobName = "testSequenceFile"; String[] args = new JobArgs().withJobName(jobName).withClassname(SequenceFileIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 776, new String[]{"frank_seq_558", "frank_seq_171", "frank_seq_554", "frank_seq_551"}); } @Test public void testRegex() throws Exception { String regex1 = "regex" + File.separator + "regex-small.txt"; File regexFile1 = new File(ClassLoader.getSystemClassLoader().getResource(regex1).getPath()); assertTrue(regex1 + " does not exist: " + regexFile1.getAbsolutePath(), regexFile1.exists()); Path input1 = new Path(tempDir, regex1); addContentToFS(input1, Files.toByteArray(regexFile1)); String regex2 = "regex" + File.separator + "regex-small-2.txt"; File regexFile2 = new File(ClassLoader.getSystemClassLoader().getResource(regex2).getPath()); assertTrue(regex2 + " does not exist: " + regexFile2.getAbsolutePath(), regexFile2.exists()); Path input2 = new Path(tempDir, regex2); addContentToFS(input2, Files.toByteArray(regexFile2)); String jobName = "testRegex"; String[] args = new JobArgs().withJobName(jobName).withClassname(RegexIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(tempDir.toUri().toString() + File.separator + "regex" + File.separator + "regex-small*") .withDArgs("-D" + RegexIngestMapper.REGEX + "=\\w+", "-D" + RegexIngestMapper .GROUPS_TO_FIELDS + "=0=match") .getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); MockRecordWriter mockRecordWriter = IngestJobMockMapRedOutFormat.writers.get(jobName); Assert.assertNotNull(mockRecordWriter); assertEquals(2, mockRecordWriter.map.size()); } @Test public void testGrok() throws Exception { String grok = "grok" + File.separator + "ip-word.log"; File grokFile = new File(ClassLoader.getSystemClassLoader().getResource(grok).getPath()); assertTrue(grok + " does not exist: " + grokFile.getAbsolutePath(), grokFile.exists()); Path input = new Path(tempDir, grok); addContentToFS(input, Files.toByteArray(grokFile)); // Adding the grok-conf file String grokConf = "grok" + File.separator + "IP-WORD.conf"; File grokConfFile = new File(ClassLoader.getSystemClassLoader().getResource(grokConf).getPath()); assertTrue(grokConf + " does not exist: " + grokConfFile.getAbsolutePath(), grokConfFile.exists()); String jobName = "testGrok"; String[] args = new JobArgs().withJobName(jobName).withClassname(GrokIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).withDArgs("-Dgrok.uri=" + grokConfFile) .getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); MockRecordWriter mockRecordWriter = IngestJobMockMapRedOutFormat.writers.get(jobName); Assert.assertNotNull(mockRecordWriter); assertEquals(4000, mockRecordWriter.map.size()); } @Test public void testReducer() throws Exception { Path input = new Path(tempDir, "reducer.csv"); StringBuilder buffer = new StringBuilder("id,bar,junk,zen,hockey"); buffer.append(lineSep).append("id-1, The quick brown fox, jumped, " + "head, gretzky, extra").append(lineSep) .append("id-2, The quick red fox, kicked, head," + "" + " gretzky"); addContentToFS(input, buffer.toString()); String jobName = "testCsvReducers"; String[] args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).withReducersClass(IngestReducer.class.getName()) .withReducersAmount("3") .withConf("csvFieldMapping[0=id," + "1=bar, 2=junk , 3 = zen ,4 = hockey];" + "idField[id];csvFirstLineComment[true]") .getJobArgs(); conf.set("io.serializations", "com.lucidworks.hadoop.io.impl.LWMockSerealization"); conf.set("io.sort.mb", "1"); ToolRunner.run(conf, new IngestJob(), args); verifyJob(jobName, 2, null, "hockey", "field_5"); } @Test public void testBadArgs() throws Exception { String jobName = "testDidnotIngetAnyDocs"; String[] args = new JobArgs().withJobName(jobName).withClassname(DirectoryIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(tempDir.toUri().toString()).getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(1, val); assertErrorMessage("Didn't ingest any document"); Path input = new Path(tempDir, "foo.csv"); StringBuilder buffer = new StringBuilder("id,bar,junk,zen,hockey"); buffer.append(lineSep).append("id-1, The quick brown fox, jumped, " + "head, gretzky, extra").append(lineSep) .append("id-2, The quick red fox, kicked, head," + " gretzky"); addContentToFS(input, buffer.toString()); jobName = "testBadMapper"; // foo -> bad mapper option args = new JobArgs().withJobName(jobName).withClassname("foo").withCollection(DEFAULT_COLLECTION) .withZkString(getBaseUrl()).withInput(input.toUri().toString()).getJobArgs(); val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(1, val); assertErrorMessage("Unable to instantiate AbstractIngestMapper class"); jobName = "testInvalidSolrConnection"; // Plus one to the current jetty port to ensure this not exists String invalidSolrConnection = getBaseUrl() + "+1"; args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(invalidSolrConnection) .withInput(input.toUri().toString()).withOutputFormat(LWMapRedOutputFormat.class.getName()) .getJobArgs(); val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(1, val); assertErrorMessage("server not available on"); jobName = "testBadReducer"; args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).withReducersClass("foo").withReducersAmount("3") .getJobArgs(); val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(1, val); assertErrorMessage("Unable to instantiate IngestReducer class"); jobName = "testNoZKorS"; args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withInput(input.toUri().toString()) .withOutputFormat(LWMapRedOutputFormat.class.getName()).getJobArgs(); val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(1, val); assertErrorMessage("You must specify either the.*or the"); // Missing options - will print the usage jobName = "testNullArgs"; val = ToolRunner.run(conf, new IngestJob(), null); assertEquals(1, val); assertErrorMessage("Missing required option "); } @Test public void testPingWrongCollection() throws Exception { String jobName = "testInvalidSolrConnection"; // Plus one to the current jetty port to ensure this not exists String invalidSolrConnection = getBaseUrl(); Path input = new Path(tempDir, "foo.csv"); StringBuilder buffer = new StringBuilder("id,bar,junk,zen,hockey"); buffer.append(lineSep).append("id-1, The quick brown fox, jumped, " + "head, gretzky, extra").append(lineSep) .append("id-2, The quick red fox, kicked, head," + "" + " gretzky"); addContentToFS(input, buffer.toString()); String[] args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection("INVALID-COLLECTION").withZkString(invalidSolrConnection) .withInput(input.toUri().toString()) .withConf("csvFieldMapping[0=id,1=bar, 2=junk " + ", 3 = zen ,4 = hockey];idField[id]") .withOutputFormat(LWMapRedOutputFormat.class.getName()).getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(1, val); assertErrorMessage("Make sure that collection"); } @Test public void testConfHandling() throws Exception { JobConf conf = new JobConf(); TestIngestJob ij = new TestIngestJob(); ij.processConf("foo[true];bar[1];junk[2.3];hockey[this is a string]", conf); assertTrue(conf.getBoolean("foo", false)); assertEquals(1, conf.getInt("bar", 0)); assertEquals(2.3, conf.getFloat("junk", 0), 0.1); assertEquals("this is a string", conf.get("hockey")); try { ij.processConf("foo", conf);// bad fail(); } catch (Exception e) { assertTrue(e.getMessage().startsWith("Can't parse")); } } @Test public void testXML() throws Exception { String xsl = "xml" + File.separator + "xml_ingest_mapper.xsl"; File xslFile = new File(ClassLoader.getSystemClassLoader().getResource(xsl).getPath()); assertTrue(xsl + " does not exist: " + xslFile.getAbsolutePath(), xslFile.exists()); Path inputXsl = new Path(tempDir, xsl); addContentToFS(inputXsl, Files.toByteArray(xslFile)); String xml = "xml" + File.separator + "foo.xml"; File xmlFile = new File(ClassLoader.getSystemClassLoader().getResource(xml).getPath()); assertTrue(xml + " does not exist: " + xmlFile.getAbsolutePath(), xmlFile.exists()); Path input = new Path(tempDir, xml); addContentToFS(input, Files.toByteArray(xmlFile)); String jobName = "testXml"; String[] args = new JobArgs().withJobName(jobName).withClassname(XMLIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).withConf("lww.xslt[" + inputXsl + "];lww.xml" + ".start[root]; lww.xml.end[root];lww.xml.docXPathExpr[//doc];lww.xml.includeParentAttrsPrefix[p_]") .getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 2, new String[]{"1", "2"}, "text", "int"); } }
solr-hadoop-core/src/test/java/com/lucidworks/hadoop/ingest/IngestJobTest.java
package com.lucidworks.hadoop.ingest; import com.google.common.base.Charsets; import com.google.common.io.Files; import com.lucidworks.hadoop.io.LWMapRedOutputFormat; import com.lucidworks.hadoop.utils.IngestJobMockMapRedOutFormat; import com.lucidworks.hadoop.utils.JobArgs; import com.lucidworks.hadoop.utils.MockRecordWriter; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.ToolRunner; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import java.io.File; import java.util.Arrays; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * * **/ public class IngestJobTest extends IngestJobInit { @Ignore @Test public void testCSV() throws Exception { Path input = new Path(tempDir, "foo.csv"); StringBuilder buffer = new StringBuilder("id,bar,junk,zen,hockey"); buffer.append(lineSep).append("id-1, The quick brown fox, jumped, " + "head, gretzky, extra").append(lineSep) .append("id-2, The quick red fox, kicked, head," + " gretzky"); addContentToFS(input, buffer.toString()); String jobName = "testCsv"; String[] args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()) .withConf("csvFieldMapping[0=id,1=bar, 2=junk , 3 = zen ,4 = hockey];idField[id];" + "csvFirstLineComment[true]") .getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 2, null); jobName = "testCsv2"; args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()) .withConf("csvFieldMapping[0=id,1=bar, 2=junk , 3 = zen ,4 = hockey];idField[id]").getJobArgs(); ToolRunner.run(conf, new IngestJob(), args); verifyJob(jobName, 3, null); jobName = "testCsvFieldId"; // id Field is the the field called "junk" args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()) .withConf("csvFieldMapping[0=bar, 1=id, 2=junk , 3 = zen ,4 = hockey];idField[junk]") .getJobArgs(); ToolRunner.run(conf, new IngestJob(), args); verifyJob(jobName, 3, null); } @Test public void testDirectoy() throws Exception { String dir = "dir" + File.separator + "docs"; File dirFile = new File(ClassLoader.getSystemClassLoader().getResource(dir).getPath()); assertTrue(dir + " does not exist: " + dirFile.getAbsolutePath(), dirFile.exists()); Path input = new Path(tempDir, dir); // Upload each file to fs for (File file : dirFile.listFiles()) { if (!file.isDirectory()) { Path filePath = new Path(input, file.getName()); addContentToFS(filePath, Files.toByteArray(file)); } } String jobName = "testDirectoy"; String[] args = new JobArgs().withJobName(jobName).withClassname(DirectoryIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString() + "/*").getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); // verifyJob MockRecordWriter writer = IngestJobMockMapRedOutFormat.writers.get(jobName); assertNotNull(writer); assertEquals(7, writer.map.size()); } @Test public void testZip() throws Exception { String zip = "zip/zipData.zip"; File zipFile = new File(ClassLoader.getSystemClassLoader().getResource(zip).getPath()); assertTrue(zip + " does not exist: " + zipFile.getAbsolutePath(), zipFile.exists()); Path input = new Path(tempDir, zip); addContentToFS(input, Files.toByteArray(zipFile)); String jobName = "testZip"; String[] args = new JobArgs().withJobName(jobName).withClassname(ZipIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 6, new String[]{"test0.pdf", "test1.doc", "test0.doc", "test3.pdf", "test2.pdf", "test1.pdf"}); } @Test public void testCSVLWS592() throws Exception { String csv = "csv/LWS592.csv"; File csvFile = new File(ClassLoader.getSystemClassLoader().getResource(csv).getPath()); assertTrue(csv + " does not exist: " + csvFile.getAbsolutePath(), csvFile.exists()); Path input = new Path(tempDir, csv); addContentToFS(input, Files.toString(csvFile, Charsets.UTF_8)); String jobName = "testCSVLWS592"; String[] args = new JobArgs().withJobName(jobName) .withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()) .withConf("csvFieldMapping[0=id,1=name_s,2=place_s];" + "csvFirstLineComment[false]") .withDArgs("-DcsvDelimiter=\u0001") .getJobArgs(); System.err.println(Arrays.toString(args)); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 2, new String[]{"2"}); } @Test public void testCSVquoteswithCircumflex() throws Exception { String csv = "csv/quotes_with_circumflex.csv"; File csvFile = new File(ClassLoader.getSystemClassLoader().getResource(csv).getPath()); assertTrue(csv + " does not exist: " + csvFile.getAbsolutePath(), csvFile.exists()); Path input = new Path(tempDir, csv); addContentToFS(input, Files.toString(csvFile, Charsets.UTF_8)); String jobName = "testCSVquoteswithCircumflex"; String[] args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()) .withConf("csvDelimiter[^];csvFieldMapping[0=id,1=name_s];csvFirstLineComment[false]") .getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); // TODO: (ha"rry) the quotes is messing the field validator. verifyJob(jobName, 2, new String[]{"2"}); } @Test public void testWarc() throws Exception { String warc = "warc/at.warc"; File warcFile = new File(ClassLoader.getSystemClassLoader().getResource(warc).getPath()); assertTrue(warc + " does not exist: " + warcFile.getAbsolutePath(), warcFile.exists()); Path input = new Path(tempDir, warc); addContentToFS(input, Files.toString(warcFile, Charsets.UTF_8)); String jobName = "testWarc"; String[] args = new JobArgs().withJobName(jobName).withClassname(WarcIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 3, new String[]{"<urn:uuid:b328f1fe-b2ee-45c0-9139-908850810b52>", "<urn:uuid:6ee9accb-a284-47ef-8785-ed28aee2f79e>"}, "warc.WARC-Target-URI", "warc.WARC-Warcinfo-ID"); } @Test public void testSolrXML() throws Exception { String solr = "sequence" + File.separator + "frankenstein_text_solr.seq"; File solrFile = new File(ClassLoader.getSystemClassLoader().getResource(solr).getPath()); assertTrue(solr + " does not exist: " + solrFile.getAbsolutePath(), solrFile.exists()); Path input = new Path(tempDir, solr); addContentToFS(input, Files.toByteArray(solrFile)); String jobName = "testSolrXML"; String[] args = new JobArgs().withJobName(jobName).withClassname(SolrXMLIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 776, new String[]{"solr_521", "solr_137", "solr_519"}, "body"); } @Test public void testSequenceFile() throws Exception { String seq = "sequence" + File.separator + "frankenstein_text_text.seq"; File seqFile = new File(ClassLoader.getSystemClassLoader().getResource(seq).getPath()); assertTrue(seq + " does not exist: " + seqFile.getAbsolutePath(), seqFile.exists()); Path input = new Path(tempDir, seq); addContentToFS(input, Files.toByteArray(seqFile)); String jobName = "testSequenceFile"; String[] args = new JobArgs().withJobName(jobName).withClassname(SequenceFileIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 776, new String[]{"frank_seq_558", "frank_seq_171", "frank_seq_554", "frank_seq_551"}); } @Test public void testRegex() throws Exception { String regex1 = "regex" + File.separator + "regex-small.txt"; File regexFile1 = new File(ClassLoader.getSystemClassLoader().getResource(regex1).getPath()); assertTrue(regex1 + " does not exist: " + regexFile1.getAbsolutePath(), regexFile1.exists()); Path input1 = new Path(tempDir, regex1); addContentToFS(input1, Files.toByteArray(regexFile1)); String regex2 = "regex" + File.separator + "regex-small-2.txt"; File regexFile2 = new File(ClassLoader.getSystemClassLoader().getResource(regex2).getPath()); assertTrue(regex2 + " does not exist: " + regexFile2.getAbsolutePath(), regexFile2.exists()); Path input2 = new Path(tempDir, regex2); addContentToFS(input2, Files.toByteArray(regexFile2)); String jobName = "testRegex"; String[] args = new JobArgs().withJobName(jobName).withClassname(RegexIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(tempDir.toUri().toString() + File.separator + "regex" + File.separator + "regex-small*") .withDArgs("-D" + RegexIngestMapper.REGEX + "=\\w+", "-D" + RegexIngestMapper .GROUPS_TO_FIELDS + "=0=match") .getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); MockRecordWriter mockRecordWriter = IngestJobMockMapRedOutFormat.writers.get(jobName); Assert.assertNotNull(mockRecordWriter); assertEquals(2, mockRecordWriter.map.size()); } @Test public void testGrok() throws Exception { String grok = "grok" + File.separator + "ip-word.log"; File grokFile = new File(ClassLoader.getSystemClassLoader().getResource(grok).getPath()); assertTrue(grok + " does not exist: " + grokFile.getAbsolutePath(), grokFile.exists()); Path input = new Path(tempDir, grok); addContentToFS(input, Files.toByteArray(grokFile)); // Adding the grok-conf file String grokConf = "grok" + File.separator + "IP-WORD.conf"; File grokConfFile = new File(ClassLoader.getSystemClassLoader().getResource(grokConf).getPath()); assertTrue(grokConf + " does not exist: " + grokConfFile.getAbsolutePath(), grokConfFile.exists()); String jobName = "testGrok"; String[] args = new JobArgs().withJobName(jobName).withClassname(GrokIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).withDArgs("-Dgrok.uri=" + grokConfFile) .getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); MockRecordWriter mockRecordWriter = IngestJobMockMapRedOutFormat.writers.get(jobName); Assert.assertNotNull(mockRecordWriter); assertEquals(4000, mockRecordWriter.map.size()); } @Test public void testReducer() throws Exception { Path input = new Path(tempDir, "reducer.csv"); StringBuilder buffer = new StringBuilder("id,bar,junk,zen,hockey"); buffer.append(lineSep).append("id-1, The quick brown fox, jumped, " + "head, gretzky, extra").append(lineSep) .append("id-2, The quick red fox, kicked, head," + "" + " gretzky"); addContentToFS(input, buffer.toString()); String jobName = "testCsvReducers"; String[] args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).withReducersClass(IngestReducer.class.getName()) .withReducersAmount("3") .withConf("csvFieldMapping[0=id," + "1=bar, 2=junk , 3 = zen ,4 = hockey];" + "idField[id];csvFirstLineComment[true]") .getJobArgs(); conf.set("io.serializations", "com.lucidworks.hadoop.io.impl.LWMockSerealization"); conf.set("io.sort.mb", "1"); ToolRunner.run(conf, new IngestJob(), args); verifyJob(jobName, 2, null, "hockey", "field_5"); } @Test public void testBadArgs() throws Exception { String jobName = "testDidnotIngetAnyDocs"; String[] args = new JobArgs().withJobName(jobName).withClassname(DirectoryIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(tempDir.toUri().toString()).getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(1, val); assertErrorMessage("Didn't ingest any document"); Path input = new Path(tempDir, "foo.csv"); StringBuilder buffer = new StringBuilder("id,bar,junk,zen,hockey"); buffer.append(lineSep).append("id-1, The quick brown fox, jumped, " + "head, gretzky, extra").append(lineSep) .append("id-2, The quick red fox, kicked, head," + " gretzky"); addContentToFS(input, buffer.toString()); jobName = "testBadMapper"; // foo -> bad mapper option args = new JobArgs().withJobName(jobName).withClassname("foo").withCollection(DEFAULT_COLLECTION) .withZkString(getBaseUrl()).withInput(input.toUri().toString()).getJobArgs(); val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(1, val); assertErrorMessage("Unable to instantiate AbstractIngestMapper class"); jobName = "testInvalidSolrConnection"; // Plus one to the current jetty port to ensure this not exists String invalidSolrConnection = getBaseUrl() + "+1"; args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(invalidSolrConnection) .withInput(input.toUri().toString()).withOutputFormat(LWMapRedOutputFormat.class.getName()) .getJobArgs(); val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(1, val); assertErrorMessage("server not available on"); jobName = "testBadReducer"; args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).withReducersClass("foo").withReducersAmount("3") .getJobArgs(); val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(1, val); assertErrorMessage("Unable to instantiate IngestReducer class"); jobName = "testNoZKorS"; args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withInput(input.toUri().toString()) .withOutputFormat(LWMapRedOutputFormat.class.getName()).getJobArgs(); val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(1, val); assertErrorMessage("You must specify either the.*or the"); // Missing options - will print the usage jobName = "testNullArgs"; val = ToolRunner.run(conf, new IngestJob(), null); assertEquals(1, val); assertErrorMessage("Missing required option "); } @Test public void testPingWrongCollection() throws Exception { String jobName = "testInvalidSolrConnection"; // Plus one to the current jetty port to ensure this not exists String invalidSolrConnection = getBaseUrl(); Path input = new Path(tempDir, "foo.csv"); StringBuilder buffer = new StringBuilder("id,bar,junk,zen,hockey"); buffer.append(lineSep).append("id-1, The quick brown fox, jumped, " + "head, gretzky, extra").append(lineSep) .append("id-2, The quick red fox, kicked, head," + "" + " gretzky"); addContentToFS(input, buffer.toString()); String[] args = new JobArgs().withJobName(jobName).withClassname(CSVIngestMapper.class.getName()) .withCollection("INVALID-COLLECTION").withZkString(invalidSolrConnection) .withInput(input.toUri().toString()) .withConf("csvFieldMapping[0=id,1=bar, 2=junk " + ", 3 = zen ,4 = hockey];idField[id]") .withOutputFormat(LWMapRedOutputFormat.class.getName()).getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(1, val); assertErrorMessage("Make sure that collection"); } @Test public void testConfHandling() throws Exception { JobConf conf = new JobConf(); TestIngestJob ij = new TestIngestJob(); ij.processConf("foo[true];bar[1];junk[2.3];hockey[this is a string]", conf); assertTrue(conf.getBoolean("foo", false)); assertEquals(1, conf.getInt("bar", 0)); assertEquals(2.3, conf.getFloat("junk", 0), 0.1); assertEquals("this is a string", conf.get("hockey")); try { ij.processConf("foo", conf);// bad fail(); } catch (Exception e) { assertTrue(e.getMessage().startsWith("Can't parse")); } } @Test public void testXML() throws Exception { String xsl = "xml" + File.separator + "xml_ingest_mapper.xsl"; File xslFile = new File(ClassLoader.getSystemClassLoader().getResource(xsl).getPath()); assertTrue(xsl + " does not exist: " + xslFile.getAbsolutePath(), xslFile.exists()); Path inputXsl = new Path(tempDir, xsl); addContentToFS(inputXsl, Files.toByteArray(xslFile)); String xml = "xml" + File.separator + "foo.xml"; File xmlFile = new File(ClassLoader.getSystemClassLoader().getResource(xml).getPath()); assertTrue(xml + " does not exist: " + xmlFile.getAbsolutePath(), xmlFile.exists()); Path input = new Path(tempDir, xml); addContentToFS(input, Files.toByteArray(xmlFile)); String jobName = "testXml"; String[] args = new JobArgs().withJobName(jobName).withClassname(XMLIngestMapper.class.getName()) .withCollection(DEFAULT_COLLECTION).withZkString(getBaseUrl()) .withInput(input.toUri().toString()).withConf("lww.xslt[" + inputXsl + "];lww.xml" + ".start[root]; lww.xml.end[root];lww.xml.docXPathExpr[//doc];lww.xml.includeParentAttrsPrefix[p_]") .getJobArgs(); int val = ToolRunner.run(conf, new IngestJob(), args); assertEquals(0, val); verifyJob(jobName, 2, new String[]{"1", "2"}, "text", "int"); } }
LWSHADOOP-517: Uncomment IngestJobTest A recent commit fixed a bug causing sporadic IngestJobTest failures. With this bug fixed, IngestJobTest can be uncommented and added back to test runs.
solr-hadoop-core/src/test/java/com/lucidworks/hadoop/ingest/IngestJobTest.java
LWSHADOOP-517: Uncomment IngestJobTest
Java
apache-2.0
cfae680957faf3aca85d5402e590a900972c9708
0
mikeb01/Aeron,mikeb01/Aeron,mikeb01/Aeron,mikeb01/Aeron
/* * Copyright 2014-2021 Real Logic Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.aeron.cluster; import io.aeron.*; import io.aeron.archive.client.AeronArchive; import io.aeron.archive.client.ArchiveException; import io.aeron.archive.client.RecordingSignalPoller; import io.aeron.archive.codecs.*; import io.aeron.archive.status.RecordingPos; import io.aeron.cluster.client.AeronCluster; import io.aeron.cluster.client.ClusterException; import io.aeron.cluster.codecs.MessageHeaderDecoder; import io.aeron.cluster.codecs.*; import io.aeron.cluster.service.*; import io.aeron.exceptions.AeronException; import io.aeron.logbuffer.ControlledFragmentHandler; import io.aeron.security.Authenticator; import io.aeron.status.ReadableCounter; import org.agrona.*; import org.agrona.collections.*; import org.agrona.concurrent.*; import org.agrona.concurrent.status.CountersReader; import java.util.*; import java.util.concurrent.TimeUnit; import static io.aeron.Aeron.NULL_VALUE; import static io.aeron.CommonContext.*; import static io.aeron.archive.client.AeronArchive.NULL_LENGTH; import static io.aeron.archive.client.AeronArchive.NULL_POSITION; import static io.aeron.archive.client.ReplayMerge.LIVE_ADD_MAX_WINDOW; import static io.aeron.archive.codecs.SourceLocation.LOCAL; import static io.aeron.cluster.ClusterMember.quorumPosition; import static io.aeron.cluster.ClusterSession.State.*; import static io.aeron.cluster.ConsensusModule.Configuration.*; import static io.aeron.cluster.client.AeronCluster.SESSION_HEADER_LENGTH; import static io.aeron.cluster.service.ClusteredServiceContainer.Configuration.MARK_FILE_UPDATE_INTERVAL_NS; import static io.aeron.exceptions.AeronException.Category.WARN; import static java.lang.Math.min; import static org.agrona.BitUtil.findNextPositivePowerOfTwo; final class ConsensusModuleAgent implements Agent { static final long SLOW_TICK_INTERVAL_NS = TimeUnit.MILLISECONDS.toNanos(10); private static final int SERVICE_MESSAGE_LIMIT = 20; private final long sessionTimeoutNs; private final long leaderHeartbeatIntervalNs; private final long leaderHeartbeatTimeoutNs; private long unavailableCounterHandlerRegistrationId; private long nextSessionId = 1; private long nextServiceSessionId = Long.MIN_VALUE + 1; private long logServiceSessionId = Long.MIN_VALUE; private long leadershipTermId = NULL_VALUE; private long replayLeadershipTermId = NULL_VALUE; private long expectedAckPosition = 0; private long serviceAckId = 0; private long terminationPosition = NULL_POSITION; private long notifiedCommitPosition = 0; private long lastAppendPosition = 0; private long timeOfLastLogUpdateNs = 0; private long timeOfLastAppendPositionNs = 0; private long slowTickDeadlineNs = 0; private long markFileUpdateDeadlineNs = 0; private int pendingServiceMessageHeadOffset = 0; private int uncommittedServiceMessages = 0; private int memberId; private int highMemberId; private int pendingMemberRemovals = 0; private long logPublicationChannelTag; private ReadableCounter appendPosition = null; private final Counter commitPosition; private ConsensusModule.State state = ConsensusModule.State.INIT; private Cluster.Role role = Cluster.Role.FOLLOWER; private ClusterMember[] activeMembers; private ClusterMember[] passiveMembers = ClusterMember.EMPTY_MEMBERS; private ClusterMember leaderMember; private ClusterMember thisMember; private long[] rankedPositions; private final long[] serviceClientIds; private final ArrayDeque<ServiceAck>[] serviceAckQueues; private final Counter clusterRoleCounter; private final ClusterMarkFile markFile; private final AgentInvoker aeronClientInvoker; private final ClusterClock clusterClock; private final TimeUnit clusterTimeUnit; private final Counter moduleState; private final Counter controlToggle; private final TimerService timerService; private final ConsensusModuleAdapter consensusModuleAdapter; private final ServiceProxy serviceProxy; private final IngressAdapter ingressAdapter; private final EgressPublisher egressPublisher; private final LogPublisher logPublisher; private final LogAdapter logAdapter; private final ConsensusAdapter consensusAdapter; private final ConsensusPublisher consensusPublisher = new ConsensusPublisher(); private final Long2ObjectHashMap<ClusterSession> sessionByIdMap = new Long2ObjectHashMap<>(); private final ArrayList<ClusterSession> pendingSessions = new ArrayList<>(); private final ArrayList<ClusterSession> rejectedSessions = new ArrayList<>(); private final ArrayList<ClusterSession> redirectSessions = new ArrayList<>(); private final Int2ObjectHashMap<ClusterMember> clusterMemberByIdMap = new Int2ObjectHashMap<>(); private final Long2LongCounterMap expiredTimerCountByCorrelationIdMap = new Long2LongCounterMap(0); private final ArrayDeque<ClusterSession> uncommittedClosedSessions = new ArrayDeque<>(); private final LongArrayQueue uncommittedTimers = new LongArrayQueue(Long.MAX_VALUE); private final ExpandableRingBuffer pendingServiceMessages = new ExpandableRingBuffer(); private final ExpandableRingBuffer.MessageConsumer serviceSessionMessageAppender = this::serviceSessionMessageAppender; private final ExpandableRingBuffer.MessageConsumer leaderServiceSessionMessageSweeper = this::leaderServiceSessionMessageSweeper; private final ExpandableRingBuffer.MessageConsumer followerServiceSessionMessageSweeper = this::followerServiceSessionMessageSweeper; private final Authenticator authenticator; private final ClusterSessionProxy sessionProxy; private final Aeron aeron; private final ConsensusModule.Context ctx; private final IdleStrategy idleStrategy; private final RecordingLog recordingLog; private final ArrayList<RecordingLog.Snapshot> dynamicJoinSnapshots = new ArrayList<>(); private RecordingLog.RecoveryPlan recoveryPlan; private AeronArchive archive; private RecordingSignalPoller recordingSignalPoller; private Election election; private DynamicJoin dynamicJoin; private ClusterTermination clusterTermination; private long logSubscriptionId = NULL_VALUE; private long logRecordingId = NULL_VALUE; private long logRecordedPosition = NULL_POSITION; private String liveLogDestination; private String catchupLogDestination; private String ingressEndpoints; private boolean isElectionRequired; ConsensusModuleAgent(final ConsensusModule.Context ctx) { this.ctx = ctx; this.aeron = ctx.aeron(); this.clusterClock = ctx.clusterClock(); this.clusterTimeUnit = clusterClock.timeUnit(); this.sessionTimeoutNs = ctx.sessionTimeoutNs(); this.leaderHeartbeatIntervalNs = ctx.leaderHeartbeatIntervalNs(); this.leaderHeartbeatTimeoutNs = ctx.leaderHeartbeatTimeoutNs(); this.egressPublisher = ctx.egressPublisher(); this.moduleState = ctx.moduleStateCounter(); this.commitPosition = ctx.commitPositionCounter(); this.controlToggle = ctx.controlToggleCounter(); this.logPublisher = ctx.logPublisher(); this.idleStrategy = ctx.idleStrategy(); this.timerService = new TimerService( this, clusterTimeUnit, 0, findNextPositivePowerOfTwo(clusterTimeUnit.convert(ctx.wheelTickResolutionNs(), TimeUnit.NANOSECONDS)), ctx.ticksPerWheel()); this.activeMembers = ClusterMember.parse(ctx.clusterMembers()); this.sessionProxy = new ClusterSessionProxy(egressPublisher); this.memberId = ctx.clusterMemberId(); this.clusterRoleCounter = ctx.clusterNodeRoleCounter(); this.markFile = ctx.clusterMarkFile(); this.recordingLog = ctx.recordingLog(); this.serviceClientIds = new long[ctx.serviceCount()]; Arrays.fill(serviceClientIds, NULL_VALUE); this.serviceAckQueues = ServiceAck.newArray(ctx.serviceCount()); this.highMemberId = ClusterMember.highMemberId(activeMembers); aeronClientInvoker = aeron.conductorAgentInvoker(); aeronClientInvoker.invoke(); rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)]; role(Cluster.Role.FOLLOWER); ClusterMember.addClusterMemberIds(activeMembers, clusterMemberByIdMap); thisMember = ClusterMember.determineMember(activeMembers, ctx.clusterMemberId(), ctx.memberEndpoints()); leaderMember = thisMember; final ChannelUri consensusUri = ChannelUri.parse(ctx.consensusChannel()); if (!consensusUri.containsKey(ENDPOINT_PARAM_NAME)) { consensusUri.put(ENDPOINT_PARAM_NAME, thisMember.consensusEndpoint()); } consensusAdapter = new ConsensusAdapter( aeron.addSubscription(consensusUri.toString(), ctx.consensusStreamId()), this); ClusterMember.addConsensusPublications(activeMembers, thisMember, consensusUri, ctx.consensusStreamId(), aeron); ingressAdapter = new IngressAdapter(ctx.ingressFragmentLimit(), this); logAdapter = new LogAdapter(this, ctx.logFragmentLimit()); consensusModuleAdapter = new ConsensusModuleAdapter( aeron.addSubscription(ctx.controlChannel(), ctx.consensusModuleStreamId()), this); serviceProxy = new ServiceProxy(aeron.addPublication(ctx.controlChannel(), ctx.serviceStreamId())); authenticator = ctx.authenticatorSupplier().get(); } /** * {@inheritDoc} */ public void onClose() { if (!aeron.isClosed()) { aeron.removeUnavailableCounterHandler(unavailableCounterHandlerRegistrationId); tryStopLogRecording(); if (!ctx.ownsAeronClient()) { logPublisher.disconnect(ctx.countedErrorHandler()); logAdapter.disconnect(ctx.countedErrorHandler()); final CountedErrorHandler errorHandler = ctx.countedErrorHandler(); for (final ClusterSession session : sessionByIdMap.values()) { session.close(errorHandler); } CloseHelper.close(errorHandler, ingressAdapter); ClusterMember.closeConsensusPublications(errorHandler, activeMembers); CloseHelper.close(errorHandler, consensusAdapter); CloseHelper.close(errorHandler, serviceProxy); CloseHelper.close(errorHandler, consensusModuleAdapter); CloseHelper.close(errorHandler, archive); } state(ConsensusModule.State.CLOSED); } markFile.updateActivityTimestamp(NULL_VALUE); ctx.close(); } /** * {@inheritDoc} */ public void onStart() { archive = AeronArchive.connect(ctx.archiveContext().clone()); recordingSignalPoller = new RecordingSignalPoller( archive.controlSessionId(), archive.controlResponsePoller().subscription()); if (null == (dynamicJoin = requiresDynamicJoin())) { final long lastTermRecordingId = recordingLog.findLastTermRecordingId(); if (NULL_VALUE != lastTermRecordingId) { archive.tryStopRecordingByIdentity(lastTermRecordingId); } recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), logRecordingId); if (null != recoveryPlan.log) { logRecordingId = recoveryPlan.log.recordingId; } try (Counter ignore = addRecoveryStateCounter(recoveryPlan)) { if (!recoveryPlan.snapshots.isEmpty()) { loadSnapshot(recoveryPlan.snapshots.get(0), archive); } while (!ServiceAck.hasReached(expectedAckPosition, serviceAckId, serviceAckQueues)) { idle(consensusModuleAdapter.poll()); } captureServiceClientIds(); ++serviceAckId; } election = new Election( true, recoveryPlan.lastLeadershipTermId, commitPosition.getWeak(), recoveryPlan.appendedLogPosition, activeMembers, clusterMemberByIdMap, thisMember, consensusPublisher, ctx, this); state(ConsensusModule.State.ACTIVE); } unavailableCounterHandlerRegistrationId = aeron.addUnavailableCounterHandler(this::onUnavailableCounter); } /** * {@inheritDoc} */ public int doWork() { int workCount = 0; final long now = clusterClock.time(); final long nowNs = clusterTimeUnit.toNanos(now); if (nowNs >= slowTickDeadlineNs) { slowTickDeadlineNs = nowNs + SLOW_TICK_INTERVAL_NS; workCount += slowTickWork(clusterTimeUnit.toMillis(now), nowNs); } workCount += consensusAdapter.poll(); if (null != dynamicJoin) { workCount += dynamicJoin.doWork(nowNs); } else if (null != election) { workCount += election.doWork(nowNs); } else { workCount += consensusWork(now, nowNs); } return workCount; } /** * {@inheritDoc} */ public String roleName() { return "consensus-module_" + ctx.clusterId() + "_" + memberId; } void onSessionConnect( final long correlationId, final int responseStreamId, final int version, final String responseChannel, final byte[] encodedCredentials) { final long clusterSessionId = Cluster.Role.LEADER == role ? nextSessionId++ : NULL_VALUE; final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel); session.connect(aeron); final long now = clusterClock.time(); session.lastActivityNs(clusterTimeUnit.toNanos(now), correlationId); if (Cluster.Role.LEADER != role) { redirectSessions.add(session); } else { if (AeronCluster.Configuration.PROTOCOL_MAJOR_VERSION != SemanticVersion.major(version)) { final String detail = SESSION_INVALID_VERSION_MSG + " " + SemanticVersion.toString(version) + ", cluster is " + SemanticVersion.toString(AeronCluster.Configuration.PROTOCOL_SEMANTIC_VERSION); session.reject(EventCode.ERROR, detail); rejectedSessions.add(session); } else if (pendingSessions.size() + sessionByIdMap.size() >= ctx.maxConcurrentSessions()) { session.reject(EventCode.ERROR, SESSION_LIMIT_MSG); rejectedSessions.add(session); } else { authenticator.onConnectRequest(session.id(), encodedCredentials, clusterTimeUnit.toMillis(now)); pendingSessions.add(session); } } } void onSessionClose(final long leadershipTermId, final long clusterSessionId) { if (leadershipTermId == this.leadershipTermId && Cluster.Role.LEADER == role) { final ClusterSession session = sessionByIdMap.get(clusterSessionId); if (null != session && session.state() == OPEN) { session.closing(CloseReason.CLIENT_ACTION); session.disconnect(ctx.countedErrorHandler()); if (logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time())) { session.closedLogPosition(logPublisher.position()); uncommittedClosedSessions.addLast(session); sessionByIdMap.remove(clusterSessionId); session.close(ctx.countedErrorHandler()); } } } } ControlledFragmentAssembler.Action onIngressMessage( final long leadershipTermId, final long clusterSessionId, final DirectBuffer buffer, final int offset, final int length) { if (leadershipTermId == this.leadershipTermId && Cluster.Role.LEADER == role) { final ClusterSession session = sessionByIdMap.get(clusterSessionId); if (null != session && session.state() == OPEN) { final long now = clusterClock.time(); if (logPublisher.appendMessage(leadershipTermId, clusterSessionId, now, buffer, offset, length) > 0) { session.timeOfLastActivityNs(clusterTimeUnit.toNanos(now)); return ControlledFragmentHandler.Action.CONTINUE; } else { return ControlledFragmentHandler.Action.ABORT; } } } return ControlledFragmentHandler.Action.CONTINUE; } void onSessionKeepAlive(final long leadershipTermId, final long clusterSessionId) { if (leadershipTermId == this.leadershipTermId && Cluster.Role.LEADER == role) { final ClusterSession session = sessionByIdMap.get(clusterSessionId); if (null != session && session.state() == OPEN) { session.timeOfLastActivityNs(clusterTimeUnit.toNanos(clusterClock.time())); } } } void onChallengeResponse(final long correlationId, final long clusterSessionId, final byte[] encodedCredentials) { if (Cluster.Role.LEADER == role) { for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--) { final ClusterSession session = pendingSessions.get(i); if (session.id() == clusterSessionId && session.state() == CHALLENGED) { final long now = clusterClock.time(); final long nowMs = clusterTimeUnit.toMillis(now); session.lastActivityNs(clusterTimeUnit.toNanos(now), correlationId); authenticator.onChallengeResponse(clusterSessionId, encodedCredentials, nowMs); break; } } } } boolean onTimerEvent(final long correlationId) { final long appendPosition = logPublisher.appendTimer(correlationId, leadershipTermId, clusterClock.time()); if (appendPosition > 0) { uncommittedTimers.offerLong(appendPosition); uncommittedTimers.offerLong(correlationId); return true; } return false; } void onCanvassPosition( final long logLeadershipTermId, final long logPosition, final long leadershipTermId, final int followerMemberId) { if (null != election) { election.onCanvassPosition(logLeadershipTermId, logPosition, leadershipTermId, followerMemberId); } else if (Cluster.Role.LEADER == role) { final ClusterMember follower = clusterMemberByIdMap.get(followerMemberId); if (null != follower && logLeadershipTermId <= this.leadershipTermId) { final RecordingLog.Entry currentTermEntry = recordingLog.getTermEntry(this.leadershipTermId); final long termBaseLogPosition = currentTermEntry.termBaseLogPosition; final long timestamp = ctx.clusterClock().timeNanos(); final long nextLogLeadershipTermId; final long nextTermBaseLogPosition; final long nextLogPosition; if (logLeadershipTermId < this.leadershipTermId) { final RecordingLog.Entry nextLogEntry = recordingLog.findTermEntry(logLeadershipTermId + 1); nextLogLeadershipTermId = null != nextLogEntry ? nextLogEntry.leadershipTermId : this.leadershipTermId; nextTermBaseLogPosition = null != nextLogEntry ? nextLogEntry.termBaseLogPosition : termBaseLogPosition; nextLogPosition = null != nextLogEntry ? nextLogEntry.logPosition : NULL_POSITION; } else { nextLogLeadershipTermId = NULL_VALUE; nextTermBaseLogPosition = NULL_POSITION; nextLogPosition = NULL_POSITION; } final long appendPosition = logPublisher.position(); consensusPublisher.newLeadershipTerm( follower.publication(), logLeadershipTermId, nextLogLeadershipTermId, nextTermBaseLogPosition, nextLogPosition, this.leadershipTermId, termBaseLogPosition, appendPosition, logRecordingId, timestamp, memberId, logPublisher.sessionId(), false); } } } void onRequestVote( final long logLeadershipTermId, final long logPosition, final long candidateTermId, final int candidateId) { if (null != election) { election.onRequestVote(logLeadershipTermId, logPosition, candidateTermId, candidateId); } else if (candidateTermId > leadershipTermId && null == dynamicJoin) { ctx.countedErrorHandler().onError(new ClusterException("unexpected vote request", WARN)); enterElection(); } } void onVote( final long candidateTermId, final long logLeadershipTermId, final long logPosition, final int candidateMemberId, final int followerMemberId, final boolean vote) { if (null != election) { election.onVote( candidateTermId, logLeadershipTermId, logPosition, candidateMemberId, followerMemberId, vote); } } void onNewLeadershipTerm( final long logLeadershipTermId, final long nextLeadershipTermId, final long nextTermBaseLogPosition, final long nextLogPosition, final long leadershipTermId, final long termBaseLogPosition, final long logPosition, final long leaderRecordingId, final long timestamp, final int leaderId, final int logSessionId, final boolean isStartup) { if (null != election) { election.onNewLeadershipTerm( logLeadershipTermId, nextLeadershipTermId, nextTermBaseLogPosition, nextLogPosition, leadershipTermId, termBaseLogPosition, logPosition, leaderRecordingId, timestamp, leaderId, logSessionId, isStartup); } else if (Cluster.Role.FOLLOWER == role && leadershipTermId == this.leadershipTermId && leaderId == leaderMember.id()) { notifiedCommitPosition = Math.max(notifiedCommitPosition, logPosition); timeOfLastLogUpdateNs = clusterClock.timeNanos(); } else if (leadershipTermId > this.leadershipTermId && null == dynamicJoin) { ctx.countedErrorHandler().onError(new ClusterException("unexpected new leadership term", WARN)); enterElection(); } } void onAppendPosition(final long leadershipTermId, final long logPosition, final int followerMemberId) { if (null != election) { election.onAppendPosition(leadershipTermId, logPosition, followerMemberId); } else if (leadershipTermId <= this.leadershipTermId && Cluster.Role.LEADER == role) { final ClusterMember follower = clusterMemberByIdMap.get(followerMemberId); if (null != follower) { follower .logPosition(logPosition) .timeOfLastAppendPositionNs(clusterClock.timeNanos()); trackCatchupCompletion(follower, leadershipTermId); } } } void onCommitPosition(final long leadershipTermId, final long logPosition, final int leaderMemberId) { if (null != election) { election.onCommitPosition(leadershipTermId, logPosition, leaderMemberId); } else if (leadershipTermId == this.leadershipTermId && leaderMemberId == leaderMember.id() && Cluster.Role.FOLLOWER == role) { notifiedCommitPosition = logPosition; timeOfLastLogUpdateNs = clusterClock.timeNanos(); } else if (leadershipTermId > this.leadershipTermId && null == dynamicJoin) { ctx.countedErrorHandler().onError(new ClusterException("unexpected commit position", WARN)); enterElection(); } } void onCatchupPosition( final long leadershipTermId, final long logPosition, final int followerMemberId, final String catchupEndpoint) { if (leadershipTermId <= this.leadershipTermId && Cluster.Role.LEADER == role) { final ClusterMember follower = clusterMemberByIdMap.get(followerMemberId); if (null != follower && follower.catchupReplaySessionId() == NULL_VALUE) { final String channel = new ChannelUriStringBuilder() .media(CommonContext.UDP_MEDIA) .endpoint(catchupEndpoint) .sessionId(logPublisher.sessionId()) .linger(0L) .eos(Boolean.FALSE) .build(); follower.catchupReplaySessionId(archive.startReplay( logRecordingId, logPosition, Long.MAX_VALUE, channel, ctx.logStreamId())); follower.catchupReplayCorrelationId(archive.lastCorrelationId()); } } } void onStopCatchup(final long leadershipTermId, final int followerMemberId) { if (leadershipTermId == this.replayLeadershipTermId && followerMemberId == memberId) { if (null != catchupLogDestination) { logAdapter.removeDestination(catchupLogDestination); catchupLogDestination = null; } } } void onAddPassiveMember(final long correlationId, final String memberEndpoints) { if (null == election && null == dynamicJoin) { if (Cluster.Role.LEADER == role) { if (ClusterMember.notDuplicateEndpoint(passiveMembers, memberEndpoints)) { final ClusterMember newMember = ClusterMember.parseEndpoints(++highMemberId, memberEndpoints); newMember.correlationId(correlationId); passiveMembers = ClusterMember.addMember(passiveMembers, newMember); clusterMemberByIdMap.put(newMember.id(), newMember); ClusterMember.addConsensusPublication( newMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron); logPublisher.addDestination(ctx.isLogMdc(), newMember.logEndpoint()); } } else if (Cluster.Role.FOLLOWER == role) { consensusPublisher.addPassiveMember(leaderMember.publication(), correlationId, memberEndpoints); } } } void onClusterMembersChange( final long correlationId, final int leaderMemberId, final String activeMembers, final String passiveMembers) { if (null != dynamicJoin) { dynamicJoin.onClusterMembersChange(correlationId, leaderMemberId, activeMembers, passiveMembers); } } void onSnapshotRecordingQuery(final long correlationId, final int requestMemberId) { if (null == election && Cluster.Role.LEADER == role) { final ClusterMember requester = clusterMemberByIdMap.get(requestMemberId); if (null != requester) { consensusPublisher.snapshotRecording( requester.publication(), correlationId, recoveryPlan, ClusterMember.encodeAsString(activeMembers)); } } } void onSnapshotRecordings(final long correlationId, final SnapshotRecordingsDecoder decoder) { if (null != dynamicJoin) { dynamicJoin.onSnapshotRecordings(correlationId, decoder); } } void onJoinCluster(final long leadershipTermId, final int memberId) { if (null == election && Cluster.Role.LEADER == role) { final ClusterMember member = clusterMemberByIdMap.get(memberId); final long snapshotLeadershipTermId = recoveryPlan.snapshots.isEmpty() ? NULL_VALUE : recoveryPlan.snapshots.get(0).leadershipTermId; if (null != member && !member.hasRequestedJoin() && leadershipTermId <= snapshotLeadershipTermId) { if (null == member.publication()) { final ChannelUri consensusUri = ChannelUri.parse(ctx.consensusChannel()); final int streamId = ctx.consensusStreamId(); ClusterMember.addConsensusPublication(member, consensusUri, streamId, aeron); logPublisher.addDestination(ctx.isLogMdc(), member.logEndpoint()); } member.hasRequestedJoin(true); } } } void onTerminationPosition(final long leadershipTermId, final long logPosition) { if (leadershipTermId == this.leadershipTermId && Cluster.Role.FOLLOWER == role) { terminationPosition = logPosition; timeOfLastLogUpdateNs = clusterClock.timeNanos(); } } void onTerminationAck(final long leadershipTermId, final long logPosition, final int memberId) { if (leadershipTermId == this.leadershipTermId && logPosition >= terminationPosition && Cluster.Role.LEADER == role) { final ClusterMember member = clusterMemberByIdMap.get(memberId); if (null != member) { member.hasTerminated(true); if (clusterTermination.canTerminate(activeMembers, terminationPosition, clusterClock.timeNanos())) { recordingLog.commitLogPosition(leadershipTermId, terminationPosition); closeAndTerminate(); } } } } void onBackupQuery( final long correlationId, final int responseStreamId, final int version, final String responseChannel, final byte[] encodedCredentials) { if (null == election && null == dynamicJoin) { if (Cluster.Role.LEADER != role) { consensusPublisher.backupQuery( leaderMember.publication(), correlationId, responseStreamId, version, responseChannel, encodedCredentials); } else if (state == ConsensusModule.State.ACTIVE || state == ConsensusModule.State.SUSPENDED) { final ClusterSession session = new ClusterSession(NULL_VALUE, responseStreamId, responseChannel); session.markAsBackupSession(); session.connect(aeron); final long now = clusterClock.time(); session.lastActivityNs(clusterTimeUnit.toNanos(now), correlationId); if (AeronCluster.Configuration.PROTOCOL_MAJOR_VERSION != SemanticVersion.major(version)) { final String detail = SESSION_INVALID_VERSION_MSG + " " + SemanticVersion.toString(version) + ", cluster=" + SemanticVersion.toString(AeronCluster.Configuration.PROTOCOL_SEMANTIC_VERSION); session.reject(EventCode.ERROR, detail); rejectedSessions.add(session); } else if (pendingSessions.size() + sessionByIdMap.size() >= ctx.maxConcurrentSessions()) { session.reject(EventCode.ERROR, SESSION_LIMIT_MSG); rejectedSessions.add(session); } else { authenticator.onConnectRequest(session.id(), encodedCredentials, clusterTimeUnit.toMillis(now)); pendingSessions.add(session); } } } } void onRemoveMember(final int memberId, final boolean isPassive) { if (null == election && Cluster.Role.LEADER == role) { final ClusterMember member = clusterMemberByIdMap.get(memberId); if (null != member) { if (isPassive) { passiveMembers = ClusterMember.removeMember(passiveMembers, memberId); member.closePublication(ctx.countedErrorHandler()); logPublisher.removeDestination(ctx.isLogMdc(), member.logEndpoint()); clusterMemberByIdMap.remove(memberId); clusterMemberByIdMap.compact(); } else { final long now = clusterClock.time(); final long position = logPublisher.appendMembershipChangeEvent( leadershipTermId, now, this.memberId, activeMembers.length, ChangeType.QUIT, memberId, ClusterMember.encodeAsString(ClusterMember.removeMember(activeMembers, memberId))); if (position > 0) { timeOfLastLogUpdateNs = clusterTimeUnit.toNanos(now) - leaderHeartbeatIntervalNs; member.removalPosition(position); pendingMemberRemovals++; } } } } } void onClusterMembersQuery(final long correlationId, final boolean isExtendedRequest) { if (isExtendedRequest) { serviceProxy.clusterMembersExtendedResponse( correlationId, clusterClock.timeNanos(), leaderMember.id(), memberId, activeMembers, passiveMembers); } else { serviceProxy.clusterMembersResponse( correlationId, leaderMember.id(), ClusterMember.encodeAsString(activeMembers), ClusterMember.encodeAsString(passiveMembers)); } } void state(final ConsensusModule.State newState) { if (newState != state) { stateChange(state, newState, memberId); state = newState; if (!moduleState.isClosed()) { moduleState.set(newState.code()); } } } ConsensusModule.State state() { return state; } void stateChange(final ConsensusModule.State oldState, final ConsensusModule.State newState, final int memberId) { //System.out.println("CM State memberId=" + memberId + " " + oldState + " -> " + newState); } void role(final Cluster.Role newRole) { if (newRole != role) { roleChange(role, newRole, memberId); role = newRole; if (!clusterRoleCounter.isClosed()) { clusterRoleCounter.set(newRole.code()); } } } void roleChange(final Cluster.Role oldRole, final Cluster.Role newRole, final int memberId) { //System.out.println("CM Role memberId=" + memberId + " " + oldRole + " -> " + newRole); } Cluster.Role role() { return role; } long prepareForNewLeadership(final long logPosition) { role(Cluster.Role.FOLLOWER); CloseHelper.close(ctx.countedErrorHandler(), ingressAdapter); ClusterControl.ToggleState.deactivate(controlToggle); if (null != catchupLogDestination) { logAdapter.removeDestination(catchupLogDestination); catchupLogDestination = null; } if (null != liveLogDestination) { logAdapter.removeDestination(liveLogDestination); liveLogDestination = null; } logAdapter.disconnect(ctx.countedErrorHandler()); logPublisher.disconnect(ctx.countedErrorHandler()); if (RecordingPos.NULL_RECORDING_ID != logRecordingId) { tryStopLogRecording(); lastAppendPosition = getLastAppendedPosition(); recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), logRecordingId); clearSessionsAfter(logPosition); for (final ClusterSession session : sessionByIdMap.values()) { session.disconnect(ctx.countedErrorHandler()); } commitPosition.setOrdered(logPosition); restoreUncommittedEntries(logPosition); } return lastAppendPosition; } void onServiceCloseSession(final long clusterSessionId) { final ClusterSession session = sessionByIdMap.get(clusterSessionId); if (null != session) { session.closing(CloseReason.SERVICE_ACTION); if (Cluster.Role.LEADER == role && logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time())) { final String msg = CloseReason.SERVICE_ACTION.name(); egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, msg); session.closedLogPosition(logPublisher.position()); uncommittedClosedSessions.addLast(session); sessionByIdMap.remove(clusterSessionId); session.close(ctx.countedErrorHandler()); } } } void onServiceMessage(final long leadershipTermId, final DirectBuffer buffer, final int offset, final int length) { if (leadershipTermId == this.leadershipTermId) { enqueueServiceSessionMessage((MutableDirectBuffer)buffer, offset, length, nextServiceSessionId++); } } void onScheduleTimer(final long correlationId, final long deadline) { if (expiredTimerCountByCorrelationIdMap.get(correlationId) == 0) { timerService.scheduleTimerForCorrelationId(correlationId, deadline); } else { expiredTimerCountByCorrelationIdMap.decrementAndGet(correlationId); } } void onCancelTimer(final long correlationId) { timerService.cancelTimerByCorrelationId(correlationId); } void onServiceAck( final long logPosition, final long timestamp, final long ackId, final long relevantId, final int serviceId) { captureServiceAck(logPosition, ackId, relevantId, serviceId); if (ServiceAck.hasReached(logPosition, serviceAckId, serviceAckQueues)) { if (ConsensusModule.State.SNAPSHOT == state) { final ServiceAck[] serviceAcks = pollServiceAcks(logPosition, serviceId); ++serviceAckId; takeSnapshot(timestamp, logPosition, serviceAcks); if (null != clusterTermination) { serviceProxy.terminationPosition(terminationPosition, ctx.countedErrorHandler()); clusterTermination.deadlineNs(clusterClock.timeNanos() + ctx.terminationTimeoutNs()); state(ConsensusModule.State.TERMINATING); } else { state(ConsensusModule.State.ACTIVE); if (Cluster.Role.LEADER == role) { ClusterControl.ToggleState.reset(controlToggle); } } } else if (ConsensusModule.State.QUITTING == state) { closeAndTerminate(); } else if (ConsensusModule.State.TERMINATING == state) { if (null == clusterTermination) { consensusPublisher.terminationAck( leaderMember.publication(), leadershipTermId, logPosition, memberId); recordingLog.commitLogPosition(leadershipTermId, logPosition); closeAndTerminate(); } else { clusterTermination.onServicesTerminated(); if (clusterTermination.canTerminate( activeMembers, terminationPosition, clusterClock.timeNanos())) { recordingLog.commitLogPosition(leadershipTermId, logPosition); closeAndTerminate(); } } } } } void onReplaySessionMessage(final long clusterSessionId, final long timestamp) { final ClusterSession clusterSession = sessionByIdMap.get(clusterSessionId); if (null == clusterSession) { logServiceSessionId = clusterSessionId; pendingServiceMessages.consume(followerServiceSessionMessageSweeper, Integer.MAX_VALUE); } else { clusterSession.timeOfLastActivityNs(clusterTimeUnit.toNanos(timestamp)); } } void onReplayTimerEvent(final long correlationId) { if (!timerService.cancelTimerByCorrelationId(correlationId)) { expiredTimerCountByCorrelationIdMap.getAndIncrement(correlationId); } } void onReplaySessionOpen( final long logPosition, final long correlationId, final long clusterSessionId, final long timestamp, final int responseStreamId, final String responseChannel) { final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel); session.open(logPosition); session.lastActivityNs(clusterTimeUnit.toNanos(timestamp), correlationId); sessionByIdMap.put(clusterSessionId, session); if (clusterSessionId >= nextSessionId) { nextSessionId = clusterSessionId + 1; } } void onReplaySessionClose(final long clusterSessionId, final CloseReason closeReason) { final ClusterSession clusterSession = sessionByIdMap.remove(clusterSessionId); if (null != clusterSession) { clusterSession.closing(closeReason); clusterSession.close(ctx.countedErrorHandler()); } } void onReplayClusterAction(final long leadershipTermId, final ClusterAction action) { if (leadershipTermId == this.replayLeadershipTermId) { if (ClusterAction.SUSPEND == action) { state(ConsensusModule.State.SUSPENDED); } else if (ClusterAction.RESUME == action) { state(ConsensusModule.State.ACTIVE); } else if (ClusterAction.SNAPSHOT == action) { state(ConsensusModule.State.SNAPSHOT); } } } void onReplayNewLeadershipTermEvent( final long leadershipTermId, final long logPosition, final long timestamp, final long termBaseLogPosition, final TimeUnit timeUnit, final int appVersion) { if (timeUnit != clusterTimeUnit) { ctx.countedErrorHandler().onError(new ClusterException( "incompatible timestamp units: " + clusterTimeUnit + " log=" + timeUnit, AeronException.Category.FATAL)); unexpectedTermination(); } if (SemanticVersion.major(ctx.appVersion()) != SemanticVersion.major(appVersion)) { ctx.countedErrorHandler().onError(new ClusterException( "incompatible version: " + SemanticVersion.toString(ctx.appVersion()) + " log=" + SemanticVersion.toString(appVersion), AeronException.Category.FATAL)); unexpectedTermination(); } leadershipTermId(leadershipTermId); if (null != election) { election.onReplayNewLeadershipTermEvent( logRecordingId, leadershipTermId, logPosition, timestamp, termBaseLogPosition); } } void onReplayMembershipChange( final long leadershipTermId, final long logPosition, final int leaderMemberId, final ChangeType changeType, final int memberId, final String clusterMembers) { if (leadershipTermId == this.replayLeadershipTermId) { if (ChangeType.JOIN == changeType) { final ClusterMember[] newMembers = ClusterMember.parse(clusterMembers); if (memberId == this.memberId) { activeMembers = newMembers; clusterMemberByIdMap.clear(); clusterMemberByIdMap.compact(); ClusterMember.addClusterMemberIds(newMembers, clusterMemberByIdMap); thisMember = ClusterMember.findMember(activeMembers, memberId); leaderMember = ClusterMember.findMember(activeMembers, leaderMemberId); ClusterMember.addConsensusPublications( newMembers, thisMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron); } else { clusterMemberJoined(memberId, newMembers); } } else if (ChangeType.QUIT == changeType) { if (memberId == this.memberId) { state(ConsensusModule.State.QUITTING); } else { clusterMemberQuit(memberId); if (leaderMemberId == memberId && null == election) { commitPosition.proposeMaxOrdered(logPosition); enterElection(); } } } if (null != election) { election.onMembershipChange(activeMembers, changeType, memberId, logPosition); } } } void onLoadSession( final long clusterSessionId, final long correlationId, final long openedPosition, final long timeOfLastActivity, final CloseReason closeReason, final int responseStreamId, final String responseChannel) { sessionByIdMap.put(clusterSessionId, new ClusterSession( clusterSessionId, correlationId, openedPosition, timeOfLastActivity, responseStreamId, responseChannel, closeReason)); if (clusterSessionId >= nextSessionId) { nextSessionId = clusterSessionId + 1; } } void onLoadPendingMessage(final DirectBuffer buffer, final int offset, final int length) { pendingServiceMessages.append(buffer, offset, length); } void onLoadConsensusModuleState( final long nextSessionId, final long nextServiceSessionId, final long logServiceSessionId, final int pendingMessageCapacity) { this.nextSessionId = nextSessionId; this.nextServiceSessionId = nextServiceSessionId; this.logServiceSessionId = logServiceSessionId; pendingServiceMessages.reset(pendingMessageCapacity); } void onLoadClusterMembers(final int memberId, final int highMemberId, final String members) { if (null == dynamicJoin && !ctx.clusterMembersIgnoreSnapshot()) { if (NULL_VALUE == this.memberId) { this.memberId = memberId; ctx.clusterMarkFile().memberId(memberId); } if (ClusterMember.EMPTY_MEMBERS == activeMembers) { activeMembers = ClusterMember.parse(members); this.highMemberId = Math.max(ClusterMember.highMemberId(activeMembers), highMemberId); rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)]; thisMember = clusterMemberByIdMap.get(memberId); final ChannelUri consensusUri = ChannelUri.parse(ctx.consensusChannel()); consensusUri.put(ENDPOINT_PARAM_NAME, thisMember.consensusEndpoint()); ClusterMember.addConsensusPublications( activeMembers, thisMember, consensusUri, ctx.consensusStreamId(), aeron); } } } int addLogPublication() { final long logPublicationTag = aeron.nextCorrelationId(); logPublicationChannelTag = aeron.nextCorrelationId(); final ChannelUri channelUri = ChannelUri.parse(ctx.logChannel()); channelUri.put(ALIAS_PARAM_NAME, "log"); channelUri.put(TAGS_PARAM_NAME, logPublicationChannelTag + "," + logPublicationTag); if (channelUri.isUdp()) { if (!channelUri.containsKey(FLOW_CONTROL_PARAM_NAME)) { final long timeout = TimeUnit.NANOSECONDS.toSeconds(ctx.leaderHeartbeatTimeoutNs()); channelUri.put(FLOW_CONTROL_PARAM_NAME, "min,t:" + timeout + "s"); } if (ctx.isLogMdc()) { channelUri.put(MDC_CONTROL_MODE_PARAM_NAME, MDC_CONTROL_MODE_MANUAL); } channelUri.put(SPIES_SIMULATE_CONNECTION_PARAM_NAME, Boolean.toString(activeMembers.length == 1)); } if (null != recoveryPlan.log) { channelUri.initialPosition( recoveryPlan.appendedLogPosition, recoveryPlan.log.initialTermId, recoveryPlan.log.termBufferLength); channelUri.put(MTU_LENGTH_PARAM_NAME, Integer.toString(recoveryPlan.log.mtuLength)); } final String channel = channelUri.toString(); final ExclusivePublication publication = aeron.addExclusivePublication(channel, ctx.logStreamId()); if (ctx.isLogMdc()) { for (final ClusterMember member : activeMembers) { if (member.id() != memberId) { publication.asyncAddDestination("aeron:udp?endpoint=" + member.logEndpoint()); } } for (final ClusterMember member : passiveMembers) { publication.asyncAddDestination("aeron:udp?endpoint=" + member.logEndpoint()); } } logPublisher.publication(publication); return publication.sessionId(); } void joinLogAsLeader( final long leadershipTermId, final long logPosition, final int logSessionId, final boolean isStartup) { final boolean isIpc = ctx.logChannel().startsWith(IPC_CHANNEL); final String channel = (isIpc ? IPC_CHANNEL : UDP_CHANNEL) + "?tags=" + logPublicationChannelTag + "|session-id=" + logSessionId + "|alias=log"; leadershipTermId(leadershipTermId); startLogRecording(channel, ctx.logStreamId(), SourceLocation.LOCAL); createAppendPosition(logSessionId); awaitServicesReady( isIpc ? channel : SPY_PREFIX + channel, ctx.logStreamId(), logSessionId, logPosition, Long.MAX_VALUE, isStartup, Cluster.Role.LEADER); } void liveLogDestination(final String liveLogDestination) { this.liveLogDestination = liveLogDestination; } String liveLogDestination() { return liveLogDestination; } void catchupLogDestination(final String catchupLogDestination) { this.catchupLogDestination = catchupLogDestination; } String catchupLogDestination() { return catchupLogDestination; } void joinLogAsFollower(final Image image, final boolean isLeaderStartup) { final Subscription logSubscription = image.subscription(); final int streamId = logSubscription.streamId(); final String channel = logSubscription.channel(); startLogRecording(channel, streamId, SourceLocation.REMOTE); createAppendPosition(image.sessionId()); appendDynamicJoinTermAndSnapshots(); logAdapter.image(image); lastAppendPosition = image.joinPosition(); awaitServicesReady( channel, streamId, image.sessionId(), image.joinPosition(), Long.MAX_VALUE, isLeaderStartup, Cluster.Role.FOLLOWER); } boolean tryJoinLogAsFollower(final Image image, final boolean isLeaderStartup) { final Subscription logSubscription = image.subscription(); final int streamId = logSubscription.streamId(); final String channel = logSubscription.channel(); if (NULL_VALUE == logSubscriptionId) { startLogRecording(channel, streamId, SourceLocation.REMOTE); } if (!tryCreateAppendPosition(image.sessionId())) { return false; } appendDynamicJoinTermAndSnapshots(); logAdapter.image(image); lastAppendPosition = image.joinPosition(); awaitServicesReady( channel, streamId, image.sessionId(), image.joinPosition(), Long.MAX_VALUE, isLeaderStartup, Cluster.Role.FOLLOWER); return true; } void awaitServicesReady( final String logChannel, final int streamId, final int logSessionId, final long logPosition, final long maxLogPosition, final boolean isStartup, final Cluster.Role role) { serviceProxy.joinLog( logPosition, maxLogPosition, memberId, logSessionId, streamId, isStartup, role, logChannel); expectedAckPosition = logPosition; while (!ServiceAck.hasReached(logPosition, serviceAckId, serviceAckQueues)) { idle(consensusModuleAdapter.poll()); } ServiceAck.removeHead(serviceAckQueues); ++serviceAckId; } void leadershipTermId(final long leadershipTermId) { this.leadershipTermId = leadershipTermId; this.replayLeadershipTermId = leadershipTermId; } LogReplay newLogReplay(final long logPosition, final long appendPosition) { return new LogReplay( archive, logRecordingId, logPosition, appendPosition, logAdapter, ctx); } int replayLogPoll(final LogAdapter logAdapter, final long stopPosition) { int workCount = 0; if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state) { final int fragments = logAdapter.poll(stopPosition); final long position = logAdapter.position(); if (fragments > 0) { commitPosition.setOrdered(position); } else if (logAdapter.isImageClosed() && position < stopPosition) { throw new ClusterException("unexpected image close when replaying log: position=" + position); } workCount += fragments; } workCount += consensusModuleAdapter.poll(); return workCount; } long logRecordingId() { return logRecordingId; } void logRecordingId(final long recordingId) { if (NULL_VALUE != recordingId) { logRecordingId = recordingId; } } void truncateLogEntry(final long leadershipTermId, final long logPosition) { archive.stopAllReplays(logRecordingId); archive.truncateRecording(logRecordingId, logPosition); recordingLog.commitLogPosition(leadershipTermId, logPosition); logAdapter.disconnect(ctx.countedErrorHandler(), logPosition); } boolean electionComplete() { final long logPosition = election.logPosition(); final long now = clusterClock.time(); final long nowNs = clusterTimeUnit.toNanos(now); if (Cluster.Role.LEADER == role) { if (!logPublisher.isConnected() || !logPublisher.appendNewLeadershipTermEvent( leadershipTermId, now, logPosition, memberId, logPublisher.sessionId(), clusterTimeUnit, ctx.appVersion())) { return false; } timeOfLastLogUpdateNs = nowNs - leaderHeartbeatIntervalNs; timerService.currentTickTime(now); ClusterControl.ToggleState.activate(controlToggle); prepareSessionsForNewTerm(election.isLeaderStartup()); } else { timeOfLastLogUpdateNs = nowNs; timeOfLastAppendPositionNs = nowNs; } recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), logRecordingId); notifiedCommitPosition = logPosition; commitPosition.setOrdered(logPosition); pendingServiceMessages.consume(followerServiceSessionMessageSweeper, Integer.MAX_VALUE); updateMemberDetails(election.leader()); election = null; connectIngress(); return true; } boolean dynamicJoinComplete() { if (0 == activeMembers.length) { activeMembers = dynamicJoin.clusterMembers(); ClusterMember.addClusterMemberIds(activeMembers, clusterMemberByIdMap); leaderMember = dynamicJoin.leader(); ClusterMember.addConsensusPublications( activeMembers, thisMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron); } if (NULL_VALUE == memberId) { memberId = dynamicJoin.memberId(); ctx.clusterMarkFile().memberId(memberId); thisMember.id(memberId); } dynamicJoin = null; election = new Election( false, leadershipTermId, commitPosition.getWeak(), recoveryPlan.appendedLogPosition, activeMembers, clusterMemberByIdMap, thisMember, consensusPublisher, ctx, this); return true; } void trackCatchupCompletion(final ClusterMember follower, final long leadershipTermId) { if (NULL_VALUE != follower.catchupReplaySessionId()) { if (follower.logPosition() >= logPublisher.position()) { if (NULL_VALUE != follower.catchupReplayCorrelationId()) { if (archive.archiveProxy().stopReplay( follower.catchupReplaySessionId(), aeron.nextCorrelationId(), archive.controlSessionId())) { follower.catchupReplayCorrelationId(NULL_VALUE); } } if (consensusPublisher.stopCatchup(follower.publication(), leadershipTermId, follower.id())) { follower.catchupReplaySessionId(NULL_VALUE); } } } } void catchupInitiated(final long nowNs) { timeOfLastAppendPositionNs = nowNs; } int catchupPoll(final long limitPosition, final long nowNs) { int workCount = 0; if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state) { final int fragments = logAdapter.poll(Math.min(appendPosition.get(), limitPosition)); workCount += fragments; if (fragments == 0 && logAdapter.image().isClosed()) { throw new ClusterException("unexpected close replaying log: position=" + logAdapter.image().position()); } } final long appendPosition = logAdapter.position(); if (appendPosition > lastAppendPosition || nowNs > (timeOfLastAppendPositionNs + leaderHeartbeatIntervalNs)) { commitPosition.proposeMaxOrdered(appendPosition); final ExclusivePublication publication = election.leader().publication(); if (consensusPublisher.appendPosition(publication, replayLeadershipTermId, appendPosition, memberId)) { lastAppendPosition = appendPosition; timeOfLastAppendPositionNs = nowNs; } } if (nowNs > (timeOfLastAppendPositionNs + leaderHeartbeatTimeoutNs) && ConsensusModule.State.ACTIVE == state) { throw new ClusterException("no catchup progress", WARN); } workCount += consensusModuleAdapter.poll(); return workCount; } boolean isCatchupNearLive(final long position) { final Image image = logAdapter.image(); if (null != image) { final long localPosition = image.position(); final long window = Math.min(image.termBufferLength() >> 2, LIVE_ADD_MAX_WINDOW); return localPosition >= (position - window); } return false; } void stopAllCatchups() { for (final ClusterMember member : activeMembers) { if (member.catchupReplaySessionId() != NULL_VALUE) { if (member.catchupReplayCorrelationId() != NULL_VALUE) { try { archive.stopReplay(member.catchupReplaySessionId()); } catch (final Exception ex) { ctx.countedErrorHandler().onError(new ClusterException("catchup already stopped", ex, WARN)); } } member.catchupReplaySessionId(NULL_VALUE); member.catchupReplayCorrelationId(NULL_VALUE); } } } void retrievedSnapshot(final long localRecordingId, final RecordingLog.Snapshot leaderSnapshot) { dynamicJoinSnapshots.add(new RecordingLog.Snapshot( localRecordingId, leaderSnapshot.leadershipTermId, leaderSnapshot.termBaseLogPosition, leaderSnapshot.logPosition, leaderSnapshot.timestamp, leaderSnapshot.serviceId)); } Counter loadSnapshotsForDynamicJoin() { recoveryPlan = RecordingLog.createRecoveryPlan(dynamicJoinSnapshots); final Counter recoveryStateCounter = addRecoveryStateCounter(recoveryPlan); if (!recoveryPlan.snapshots.isEmpty()) { loadSnapshot(recoveryPlan.snapshots.get(0), archive); } return recoveryStateCounter; } boolean pollForSnapshotLoadAck(final Counter recoveryStateCounter, final long nowNs) { consensusModuleAdapter.poll(); if (ServiceAck.hasReached(expectedAckPosition, serviceAckId, serviceAckQueues)) { captureServiceClientIds(); ++serviceAckId; timeOfLastLogUpdateNs = nowNs; CloseHelper.close(ctx.countedErrorHandler(), recoveryStateCounter); state(ConsensusModule.State.ACTIVE); return true; } return false; } int pollArchiveEvents() { int workCount = 0; if (null != archive) { final RecordingSignalPoller poller = this.recordingSignalPoller; workCount += poller.poll(); if (poller.isPollComplete()) { final int templateId = poller.templateId(); if (ControlResponseDecoder.TEMPLATE_ID == templateId && poller.code() == ControlResponseCode.ERROR) { for (final ClusterMember member : activeMembers) { if (member.catchupReplayCorrelationId() != NULL_VALUE && member.catchupReplayCorrelationId() == poller.correlationId()) { member.catchupReplaySessionId(NULL_VALUE); member.catchupReplayCorrelationId(NULL_VALUE); ctx.countedErrorHandler().onError(new ClusterException( "catchup replay failed - " + poller.errorMessage(), WARN)); return workCount; } } final ArchiveException ex = new ArchiveException( poller.errorMessage(), (int)poller.relevantId(), poller.correlationId()); if (ex.errorCode() == ArchiveException.STORAGE_SPACE) { ctx.countedErrorHandler().onError(ex); unexpectedTermination(); } if (null != election) { election.handleError(clusterClock.timeNanos(), ex); } } else if (RecordingSignalEventDecoder.TEMPLATE_ID == templateId) { final long recordingId = poller.recordingId(); final long position = poller.recordingPosition(); final RecordingSignal signal = poller.recordingSignal(); if (RecordingSignal.STOP == signal && recordingId == logRecordingId) { this.logRecordedPosition = position; } if (null != election) { election.onRecordingSignal(poller.correlationId(), recordingId, position, signal); } if (null != dynamicJoin) { dynamicJoin.onRecordingSignal(poller.correlationId(), recordingId, position, signal); } } } else if (0 == workCount && !poller.subscription().isConnected()) { ctx.countedErrorHandler().onError(new ClusterException("local archive is not connected", WARN)); unexpectedTermination(); } } return workCount; } private void startLogRecording(final String channel, final int streamId, final SourceLocation sourceLocation) { try { final long logRecordingId = recordingLog.findLastTermRecordingId(); if (RecordingPos.NULL_RECORDING_ID == logRecordingId) { logSubscriptionId = archive.startRecording(channel, streamId, sourceLocation, true); } else { logSubscriptionId = archive.extendRecording(logRecordingId, channel, streamId, sourceLocation, true); } } catch (final ArchiveException ex) { if (ex.errorCode() == ArchiveException.STORAGE_SPACE) { ctx.countedErrorHandler().onError(ex); unexpectedTermination(); } throw ex; } } private void prepareSessionsForNewTerm(final boolean isStartup) { if (isStartup) { for (final ClusterSession session : sessionByIdMap.values()) { if (session.state() == OPEN) { session.closing(CloseReason.TIMEOUT); } } } else { for (final ClusterSession session : sessionByIdMap.values()) { if (session.state() == OPEN) { session.connect(aeron); } } final long nowNs = clusterClock.timeNanos(); for (final ClusterSession session : sessionByIdMap.values()) { if (session.state() == OPEN) { session.timeOfLastActivityNs(nowNs); session.hasNewLeaderEventPending(true); } } } } private void updateMemberDetails(final ClusterMember newLeader) { leaderMember = newLeader; for (final ClusterMember clusterMember : activeMembers) { clusterMember.isLeader(clusterMember.id() == leaderMember.id()); } ingressEndpoints = ClusterMember.ingressEndpoints(activeMembers); } private int slowTickWork(final long nowMs, final long nowNs) { int workCount = aeronClientInvoker.invoke(); if (aeron.isClosed()) { throw new AgentTerminationException("unexpected Aeron close"); } else if (ConsensusModule.State.CLOSED == state) { unexpectedTermination(); } else if (isElectionRequired) { enterElection(); isElectionRequired = false; } if (nowNs >= markFileUpdateDeadlineNs) { markFileUpdateDeadlineNs = nowNs + MARK_FILE_UPDATE_INTERVAL_NS; markFile.updateActivityTimestamp(nowMs); } workCount += pollArchiveEvents(); workCount += sendRedirects(redirectSessions, nowNs); workCount += sendRejections(rejectedSessions, nowNs); if (null == election) { if (Cluster.Role.LEADER == role) { workCount += checkControlToggle(nowNs); if (ConsensusModule.State.ACTIVE == state) { workCount += processPendingSessions(pendingSessions, nowMs, nowNs); workCount += checkSessions(sessionByIdMap, nowNs); workCount += processPassiveMembers(passiveMembers); if (!ClusterMember.hasActiveQuorum(activeMembers, nowNs, leaderHeartbeatTimeoutNs)) { ctx.countedErrorHandler().onError(new ClusterException("inactive follower quorum", WARN)); enterElection(); workCount += 1; } } else if (ConsensusModule.State.TERMINATING == state) { if (clusterTermination.canTerminate(activeMembers, terminationPosition, nowNs)) { recordingLog.commitLogPosition(leadershipTermId, terminationPosition); closeAndTerminate(); } } } else if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state) { if (nowNs >= (timeOfLastLogUpdateNs + leaderHeartbeatTimeoutNs) && NULL_POSITION == terminationPosition) { ctx.countedErrorHandler().onError(new ClusterException("leader heartbeat timeout", WARN)); enterElection(); workCount += 1; } } } return workCount; } private int consensusWork(final long timestamp, final long nowNs) { int workCount = 0; if (Cluster.Role.LEADER == role) { if (ConsensusModule.State.ACTIVE == state) { workCount += timerService.poll(timestamp); workCount += pendingServiceMessages.forEach( pendingServiceMessageHeadOffset, serviceSessionMessageAppender, SERVICE_MESSAGE_LIMIT); workCount += ingressAdapter.poll(); } workCount += updateLeaderPosition(nowNs); } else { if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state) { if (NULL_POSITION != terminationPosition && logAdapter.position() >= terminationPosition) { serviceProxy.terminationPosition(terminationPosition, ctx.countedErrorHandler()); state(ConsensusModule.State.TERMINATING); } else { final long limit = null != appendPosition ? appendPosition.get() : logRecordedPosition; final int count = logAdapter.poll(min(notifiedCommitPosition, limit)); if (0 == count && logAdapter.isImageClosed()) { ctx.countedErrorHandler().onError(new ClusterException("log disconnected from leader", WARN)); enterElection(); return 1; } commitPosition.proposeMaxOrdered(logAdapter.position()); workCount += ingressAdapter.poll(); workCount += count; } } workCount += updateFollowerPosition(nowNs); } workCount += consensusModuleAdapter.poll(); return workCount; } private int checkControlToggle(final long nowNs) { switch (ClusterControl.ToggleState.get(controlToggle)) { case SUSPEND: if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SUSPEND)) { state(ConsensusModule.State.SUSPENDED); } break; case RESUME: if (ConsensusModule.State.SUSPENDED == state && appendAction(ClusterAction.RESUME)) { state(ConsensusModule.State.ACTIVE); ClusterControl.ToggleState.reset(controlToggle); } break; case SNAPSHOT: if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SNAPSHOT)) { state(ConsensusModule.State.SNAPSHOT); } break; case SHUTDOWN: if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SNAPSHOT)) { final CountedErrorHandler errorHandler = ctx.countedErrorHandler(); final long position = logPublisher.position(); clusterTermination = new ClusterTermination(nowNs + ctx.terminationTimeoutNs()); clusterTermination.terminationPosition( errorHandler, consensusPublisher, activeMembers, thisMember, leadershipTermId, position); terminationPosition = position; state(ConsensusModule.State.SNAPSHOT); } break; case ABORT: if (ConsensusModule.State.ACTIVE == state) { final CountedErrorHandler errorHandler = ctx.countedErrorHandler(); final long position = logPublisher.position(); clusterTermination = new ClusterTermination(nowNs + ctx.terminationTimeoutNs()); clusterTermination.terminationPosition( errorHandler, consensusPublisher, activeMembers, thisMember, leadershipTermId, position); terminationPosition = position; serviceProxy.terminationPosition(terminationPosition, errorHandler); state(ConsensusModule.State.TERMINATING); } break; default: return 0; } return 1; } private boolean appendAction(final ClusterAction action) { return logPublisher.appendClusterAction(leadershipTermId, clusterClock.time(), action); } private int processPendingSessions( final ArrayList<ClusterSession> pendingSessions, final long nowMs, final long nowNs) { int workCount = 0; for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--) { final ClusterSession session = pendingSessions.get(i); if (nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs)) { ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--); session.close(ctx.countedErrorHandler()); ctx.timedOutClientCounter().incrementOrdered(); continue; } if (session.state() == INIT || session.state() == CONNECTED) { if (session.isResponsePublicationConnected()) { session.state(CONNECTED); authenticator.onConnectedSession(sessionProxy.session(session), nowMs); } } if (session.state() == CHALLENGED) { if (session.isResponsePublicationConnected()) { authenticator.onChallengedSession(sessionProxy.session(session), nowMs); } } if (session.state() == AUTHENTICATED) { if (session.isBackupSession()) { final RecordingLog.Entry entry = recordingLog.findLastTerm(); if (null != entry && consensusPublisher.backupResponse( session, commitPosition.id(), leaderMember.id(), entry, recoveryPlan, ClusterMember.encodeAsString(activeMembers))) { ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--); session.close(ctx.countedErrorHandler()); workCount += 1; } } else if (appendSessionAndOpen(session, nowNs)) { ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--); sessionByIdMap.put(session.id(), session); workCount += 1; } } else if (session.state() == REJECTED) { ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--); rejectedSessions.add(session); } } return workCount; } private int sendRejections(final ArrayList<ClusterSession> rejectedSessions, final long nowNs) { int workCount = 0; for (int lastIndex = rejectedSessions.size() - 1, i = lastIndex; i >= 0; i--) { final ClusterSession session = rejectedSessions.get(i); final String detail = session.responseDetail(); final EventCode eventCode = session.eventCode(); if (egressPublisher.sendEvent(session, leadershipTermId, leaderMember.id(), eventCode, detail) || nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs)) { ArrayListUtil.fastUnorderedRemove(rejectedSessions, i, lastIndex--); session.close(ctx.countedErrorHandler()); workCount++; } } return workCount; } private int sendRedirects(final ArrayList<ClusterSession> redirectSessions, final long nowNs) { int workCount = 0; for (int lastIndex = redirectSessions.size() - 1, i = lastIndex; i >= 0; i--) { final ClusterSession session = redirectSessions.get(i); final EventCode eventCode = EventCode.REDIRECT; final int leaderId = leaderMember.id(); if (egressPublisher.sendEvent(session, leadershipTermId, leaderId, eventCode, ingressEndpoints) || nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs)) { ArrayListUtil.fastUnorderedRemove(redirectSessions, i, lastIndex--); session.close(ctx.countedErrorHandler()); workCount++; } } return workCount; } private int processPassiveMembers(final ClusterMember[] passiveMembers) { int workCount = 0; for (final ClusterMember member : passiveMembers) { if (member.correlationId() != NULL_VALUE) { if (consensusPublisher.clusterMemberChange( member.publication(), member.correlationId(), leaderMember.id(), ClusterMember.encodeAsString(activeMembers), ClusterMember.encodeAsString(passiveMembers))) { member.correlationId(NULL_VALUE); workCount++; } } else if (member.hasRequestedJoin() && member.logPosition() == logPublisher.position()) { final ClusterMember[] newMembers = ClusterMember.addMember(activeMembers, member); final long now = clusterClock.time(); if (logPublisher.appendMembershipChangeEvent( leadershipTermId, now, leaderMember.id(), newMembers.length, ChangeType.JOIN, member.id(), ClusterMember.encodeAsString(newMembers)) > 0) { timeOfLastLogUpdateNs = clusterTimeUnit.toNanos(now) - leaderHeartbeatIntervalNs; this.passiveMembers = ClusterMember.removeMember(this.passiveMembers, member.id()); activeMembers = newMembers; rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)]; member.hasRequestedJoin(false); workCount++; break; } } } return workCount; } private int checkSessions(final Long2ObjectHashMap<ClusterSession> sessionByIdMap, final long nowNs) { int workCount = 0; for (final Iterator<ClusterSession> i = sessionByIdMap.values().iterator(); i.hasNext(); ) { final ClusterSession session = i.next(); if (nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs)) { if (session.state() == OPEN) { session.closing(CloseReason.TIMEOUT); if (logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time())) { final String msg = session.closeReason().name(); egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, msg); session.closedLogPosition(logPublisher.position()); uncommittedClosedSessions.addLast(session); i.remove(); session.close(ctx.countedErrorHandler()); ctx.timedOutClientCounter().incrementOrdered(); workCount++; } } else if (session.state() == CLOSING) { if (logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time())) { final String msg = session.closeReason().name(); egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, msg); session.closedLogPosition(logPublisher.position()); uncommittedClosedSessions.addLast(session); i.remove(); session.close(ctx.countedErrorHandler()); if (session.closeReason() == CloseReason.TIMEOUT) { ctx.timedOutClientCounter().incrementOrdered(); } workCount++; } } else { i.remove(); session.close(ctx.countedErrorHandler()); workCount++; } } else if (session.hasOpenEventPending()) { workCount += sendSessionOpenEvent(session); } else if (session.hasNewLeaderEventPending()) { workCount += sendNewLeaderEvent(session); } } return workCount; } private void captureServiceAck(final long logPosition, final long ackId, final long relevantId, final int serviceId) { if (0 == ackId && NULL_VALUE != serviceClientIds[serviceId]) { throw new ClusterException( "initial ack already received from service: possible duplicate serviceId=" + serviceId); } serviceAckQueues[serviceId].offerLast(new ServiceAck(ackId, logPosition, relevantId)); } private ServiceAck[] pollServiceAcks(final long logPosition, final int serviceId) { final ServiceAck[] serviceAcks = new ServiceAck[serviceAckQueues.length]; for (int id = 0, length = serviceAckQueues.length; id < length; id++) { final ServiceAck serviceAck = serviceAckQueues[id].pollFirst(); if (null == serviceAck || serviceAck.logPosition() != logPosition) { throw new ClusterException( "invalid ack for serviceId=" + serviceId + " logPosition=" + logPosition + " " + serviceAck); } serviceAcks[id] = serviceAck; } return serviceAcks; } private int sendNewLeaderEvent(final ClusterSession session) { if (egressPublisher.newLeader(session, leadershipTermId, leaderMember.id(), ingressEndpoints)) { session.hasNewLeaderEventPending(false); return 1; } return 0; } private int sendSessionOpenEvent(final ClusterSession session) { if (egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.OK, "")) { session.hasOpenEventPending(false); return 1; } return 0; } private boolean appendSessionAndOpen(final ClusterSession session, final long nowNs) { final long resultingPosition = logPublisher.appendSessionOpen(session, leadershipTermId, clusterClock.time()); if (resultingPosition > 0) { session.open(resultingPosition); session.timeOfLastActivityNs(nowNs); return true; } return false; } private void createAppendPosition(final int logSessionId) { final CountersReader counters = aeron.countersReader(); final int counterId = awaitRecordingCounter(counters, logSessionId); long registrationId; while (0 == (registrationId = counters.getCounterRegistrationId(counterId))) { idle(); } logRecordingId = RecordingPos.getRecordingId(counters, counterId); appendPosition = new ReadableCounter(counters, registrationId, counterId); logRecordedPosition = NULL_POSITION; } private boolean tryCreateAppendPosition(final int logSessionId) { final CountersReader counters = aeron.countersReader(); final int counterId = RecordingPos.findCounterIdBySession(counters, logSessionId); if (CountersReader.NULL_COUNTER_ID == counterId) { return false; } final long registrationId = counters.getCounterRegistrationId(counterId); if (0 == registrationId) { return false; } logRecordingId = RecordingPos.getRecordingId(counters, counterId); appendPosition = new ReadableCounter(counters, registrationId, counterId); logRecordedPosition = NULL_POSITION; return true; } private void loadSnapshot(final RecordingLog.Snapshot snapshot, final AeronArchive archive) { final String channel = ctx.replayChannel(); final int streamId = ctx.replayStreamId(); final int sessionId = (int)archive.startReplay(snapshot.recordingId, 0, NULL_LENGTH, channel, streamId); final String replaySubscriptionChannel = ChannelUri.addSessionId(channel, sessionId); try (Subscription subscription = aeron.addSubscription(replaySubscriptionChannel, streamId)) { final Image image = awaitImage(sessionId, subscription); final ConsensusModuleSnapshotLoader snapshotLoader = new ConsensusModuleSnapshotLoader(image, this); while (true) { final int fragments = snapshotLoader.poll(); if (fragments == 0) { if (snapshotLoader.isDone()) { break; } if (image.isClosed()) { throw new ClusterException("snapshot ended unexpectedly"); } } idle(fragments); } final int appVersion = snapshotLoader.appVersion(); if (SemanticVersion.major(ctx.appVersion()) != SemanticVersion.major(appVersion)) { throw new ClusterException( "incompatible version: " + SemanticVersion.toString(ctx.appVersion()) + " snapshot=" + SemanticVersion.toString(appVersion)); } final TimeUnit timeUnit = snapshotLoader.timeUnit(); if (timeUnit != clusterTimeUnit) { throw new ClusterException("incompatible time unit: " + clusterTimeUnit + " snapshot=" + timeUnit); } pendingServiceMessages.forEach(this::serviceSessionMessageReset, Integer.MAX_VALUE); } timerService.currentTickTime(clusterClock.time()); leadershipTermId(snapshot.leadershipTermId); commitPosition.setOrdered(snapshot.logPosition); expectedAckPosition = snapshot.logPosition; } private Image awaitImage(final int sessionId, final Subscription subscription) { idleStrategy.reset(); Image image; while ((image = subscription.imageBySessionId(sessionId)) == null) { idle(); } return image; } private Counter addRecoveryStateCounter(final RecordingLog.RecoveryPlan plan) { final int snapshotsCount = plan.snapshots.size(); if (snapshotsCount > 0) { final long[] serviceSnapshotRecordingIds = new long[snapshotsCount - 1]; final RecordingLog.Snapshot snapshot = plan.snapshots.get(0); for (int i = 1; i < snapshotsCount; i++) { final RecordingLog.Snapshot serviceSnapshot = plan.snapshots.get(i); serviceSnapshotRecordingIds[serviceSnapshot.serviceId] = serviceSnapshot.recordingId; } return RecoveryState.allocate( aeron, snapshot.leadershipTermId, snapshot.logPosition, snapshot.timestamp, ctx.clusterId(), serviceSnapshotRecordingIds); } return RecoveryState.allocate(aeron, leadershipTermId, 0, 0, ctx.clusterId()); } private DynamicJoin requiresDynamicJoin() { if (0 == activeMembers.length && null != ctx.clusterConsensusEndpoints()) { return new DynamicJoin(ctx.clusterConsensusEndpoints(), archive, consensusPublisher, ctx, this); } return null; } private void captureServiceClientIds() { for (int i = 0, length = serviceClientIds.length; i < length; i++) { final ServiceAck serviceAck = serviceAckQueues[i].pollFirst(); serviceClientIds[i] = Objects.requireNonNull(serviceAck).relevantId(); } } private void handleMemberRemovals(final long commitPosition) { ClusterMember[] members = activeMembers; for (final ClusterMember member : activeMembers) { if (member.hasRequestedRemove() && member.removalPosition() <= commitPosition) { if (member.id() == memberId) { state(ConsensusModule.State.QUITTING); } members = ClusterMember.removeMember(members, member.id()); clusterMemberByIdMap.remove(member.id()); clusterMemberByIdMap.compact(); member.closePublication(ctx.countedErrorHandler()); logPublisher.removeDestination(ctx.isLogMdc(), member.logEndpoint()); pendingMemberRemovals--; } } activeMembers = members; rankedPositions = new long[ClusterMember.quorumThreshold(members.length)]; } private int updateLeaderPosition(final long nowNs) { if (null != appendPosition) { return updateLeaderPosition(nowNs, appendPosition.get()); } return 0; } int updateLeaderPosition(final long nowNs, final long position) { thisMember.logPosition(position).timeOfLastAppendPositionNs(nowNs); final long commitPosition = min(quorumPosition(activeMembers, rankedPositions), position); if (commitPosition > this.commitPosition.getWeak() || nowNs >= (timeOfLastLogUpdateNs + leaderHeartbeatIntervalNs)) { for (final ClusterMember member : activeMembers) { if (member.id() != memberId) { consensusPublisher.commitPosition( member.publication(), leadershipTermId, commitPosition, memberId); } } this.commitPosition.setOrdered(commitPosition); timeOfLastLogUpdateNs = nowNs; clearUncommittedEntriesTo(commitPosition); if (pendingMemberRemovals > 0) { handleMemberRemovals(commitPosition); } return 1; } return 0; } LogReplication newLogReplication( final String leaderArchiveEndpoint, final long leaderRecordingId, final long stopPosition, final long nowNs) { return new LogReplication( archive, leaderRecordingId, logRecordingId, stopPosition, leaderArchiveEndpoint, ctx.replicationChannel(), ctx.leaderHeartbeatTimeoutNs(), ctx.leaderHeartbeatIntervalNs(), nowNs); } private int updateFollowerPosition(final long nowNs) { final long recordedPosition = null != appendPosition ? appendPosition.get() : logRecordedPosition; final long position = Math.max(recordedPosition, lastAppendPosition); if ((recordedPosition > lastAppendPosition || nowNs >= (timeOfLastAppendPositionNs + leaderHeartbeatIntervalNs)) && consensusPublisher.appendPosition(leaderMember.publication(), leadershipTermId, position, memberId)) { lastAppendPosition = position; timeOfLastAppendPositionNs = nowNs; return 1; } return 0; } private void clearSessionsAfter(final long logPosition) { for (final Iterator<ClusterSession> i = sessionByIdMap.values().iterator(); i.hasNext(); ) { final ClusterSession session = i.next(); if (session.openedLogPosition() > logPosition) { i.remove(); egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, "election"); session.close(ctx.countedErrorHandler()); } } for (final ClusterSession session : pendingSessions) { egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, "election"); session.close(ctx.countedErrorHandler()); } pendingSessions.clear(); } private void clearUncommittedEntriesTo(final long commitPosition) { if (uncommittedServiceMessages > 0) { pendingServiceMessageHeadOffset -= pendingServiceMessages.consume( leaderServiceSessionMessageSweeper, Integer.MAX_VALUE); } while (uncommittedTimers.peekLong() <= commitPosition) { uncommittedTimers.pollLong(); uncommittedTimers.pollLong(); } while (true) { final ClusterSession clusterSession = uncommittedClosedSessions.peekFirst(); if (null == clusterSession || clusterSession.closedLogPosition() > commitPosition) { break; } uncommittedClosedSessions.pollFirst(); } } private void restoreUncommittedEntries(final long commitPosition) { for (final LongArrayQueue.LongIterator i = uncommittedTimers.iterator(); i.hasNext(); ) { final long appendPosition = i.nextValue(); final long correlationId = i.nextValue(); if (appendPosition > commitPosition) { timerService.scheduleTimerForCorrelationId(correlationId, timerService.currentTickTime()); } } uncommittedTimers.clear(); pendingServiceMessages.consume(followerServiceSessionMessageSweeper, Integer.MAX_VALUE); pendingServiceMessageHeadOffset = 0; if (uncommittedServiceMessages > 0) { pendingServiceMessages.consume(leaderServiceSessionMessageSweeper, Integer.MAX_VALUE); pendingServiceMessages.forEach(this::serviceSessionMessageReset, Integer.MAX_VALUE); uncommittedServiceMessages = 0; } ClusterSession session; while (null != (session = uncommittedClosedSessions.pollFirst())) { if (session.closedLogPosition() > commitPosition) { session.closedLogPosition(NULL_POSITION); session.state(CLOSING); sessionByIdMap.put(session.id(), session); } } } private void enterElection() { role(Cluster.Role.FOLLOWER); election = new Election( false, leadershipTermId, commitPosition.getWeak(), null != appendPosition ? appendPosition.get() : recoveryPlan.appendedLogPosition, activeMembers, clusterMemberByIdMap, thisMember, consensusPublisher, ctx, this); } private void idle() { checkInterruptStatus(); aeronClientInvoker.invoke(); if (aeron.isClosed()) { throw new AgentTerminationException("unexpected Aeron close"); } idleStrategy.idle(); pollArchiveEvents(); } private void idle(final int workCount) { checkInterruptStatus(); aeronClientInvoker.invoke(); if (aeron.isClosed()) { throw new AgentTerminationException("unexpected Aeron close"); } idleStrategy.idle(workCount); if (0 == workCount) { pollArchiveEvents(); } } private static void checkInterruptStatus() { if (Thread.currentThread().isInterrupted()) { throw new AgentTerminationException("interrupted"); } } private void takeSnapshot(final long timestamp, final long logPosition, final ServiceAck[] serviceAcks) { final long recordingId; try (ExclusivePublication publication = aeron.addExclusivePublication( ctx.snapshotChannel(), ctx.snapshotStreamId())) { final String channel = ChannelUri.addSessionId(ctx.snapshotChannel(), publication.sessionId()); archive.startRecording(channel, ctx.snapshotStreamId(), LOCAL, true); final CountersReader counters = aeron.countersReader(); final int counterId = awaitRecordingCounter(counters, publication.sessionId()); recordingId = RecordingPos.getRecordingId(counters, counterId); snapshotState(publication, logPosition, replayLeadershipTermId); awaitRecordingComplete(recordingId, publication.position(), counters, counterId); } catch (final ArchiveException ex) { if (ex.errorCode() == ArchiveException.STORAGE_SPACE) { ctx.countedErrorHandler().onError(ex); unexpectedTermination(); } throw ex; } final long termBaseLogPosition = recordingLog.getTermEntry(replayLeadershipTermId).termBaseLogPosition; for (int serviceId = serviceAcks.length - 1; serviceId >= 0; serviceId--) { final long snapshotId = serviceAcks[serviceId].relevantId(); recordingLog.appendSnapshot( snapshotId, replayLeadershipTermId, termBaseLogPosition, logPosition, timestamp, serviceId); } recordingLog.appendSnapshot( recordingId, replayLeadershipTermId, termBaseLogPosition, logPosition, timestamp, SERVICE_ID); recordingLog.force(ctx.fileSyncLevel()); recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), Aeron.NULL_VALUE); ctx.snapshotCounter().incrementOrdered(); final long nowNs = clusterClock.timeNanos(); for (final ClusterSession session : sessionByIdMap.values()) { session.timeOfLastActivityNs(nowNs); } } private void awaitRecordingComplete( final long recordingId, final long position, final CountersReader counters, final int counterId) { idleStrategy.reset(); while (counters.getCounterValue(counterId) < position) { idle(); if (!RecordingPos.isActive(counters, counterId, recordingId)) { throw new ClusterException("recording has stopped unexpectedly: " + recordingId); } } } private int awaitRecordingCounter(final CountersReader counters, final int sessionId) { idleStrategy.reset(); int counterId = RecordingPos.findCounterIdBySession(counters, sessionId); while (CountersReader.NULL_COUNTER_ID == counterId) { idle(); counterId = RecordingPos.findCounterIdBySession(counters, sessionId); } return counterId; } private void snapshotState( final ExclusivePublication publication, final long logPosition, final long leadershipTermId) { final ConsensusModuleSnapshotTaker snapshotTaker = new ConsensusModuleSnapshotTaker( publication, idleStrategy, aeronClientInvoker); snapshotTaker.markBegin(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0, clusterTimeUnit, ctx.appVersion()); snapshotTaker.snapshotConsensusModuleState( nextSessionId, nextServiceSessionId, logServiceSessionId, pendingServiceMessages.size()); snapshotTaker.snapshotClusterMembers(memberId, highMemberId, activeMembers); for (final ClusterSession session : sessionByIdMap.values()) { if (session.state() == OPEN || session.state() == CLOSED) { snapshotTaker.snapshotSession(session); } } timerService.snapshot(snapshotTaker); snapshotTaker.snapshot(pendingServiceMessages); snapshotTaker.markEnd(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0, clusterTimeUnit, ctx.appVersion()); } private void clusterMemberJoined(final int memberId, final ClusterMember[] newMembers) { highMemberId = Math.max(highMemberId, memberId); final ClusterMember eventMember = ClusterMember.findMember(newMembers, memberId); if (null != eventMember) { if (null == eventMember.publication()) { ClusterMember.addConsensusPublication( eventMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron); } activeMembers = ClusterMember.addMember(activeMembers, eventMember); clusterMemberByIdMap.put(memberId, eventMember); rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)]; } } private void clusterMemberQuit(final int memberId) { activeMembers = ClusterMember.removeMember(activeMembers, memberId); clusterMemberByIdMap.remove(memberId); rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)]; } private void onUnavailableIngressImage(final Image image) { ingressAdapter.freeSessionBuffer(image.sessionId()); } private void enqueueServiceSessionMessage( final MutableDirectBuffer buffer, final int offset, final int length, final long clusterSessionId) { final int headerOffset = offset - SessionMessageHeaderDecoder.BLOCK_LENGTH; final int clusterSessionIdOffset = headerOffset + SessionMessageHeaderDecoder.clusterSessionIdEncodingOffset(); final int timestampOffset = headerOffset + SessionMessageHeaderDecoder.timestampEncodingOffset(); buffer.putLong(clusterSessionIdOffset, clusterSessionId, SessionMessageHeaderDecoder.BYTE_ORDER); buffer.putLong(timestampOffset, Long.MAX_VALUE, SessionMessageHeaderDecoder.BYTE_ORDER); if (!pendingServiceMessages.append(buffer, offset - SESSION_HEADER_LENGTH, length + SESSION_HEADER_LENGTH)) { throw new ClusterException("pending service message buffer capacity: " + pendingServiceMessages.size()); } } private boolean serviceSessionMessageAppender( final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset) { final int headerOffset = offset + MessageHeaderDecoder.ENCODED_LENGTH; final int clusterSessionIdOffset = headerOffset + SessionMessageHeaderDecoder.clusterSessionIdEncodingOffset(); final int timestampOffset = headerOffset + SessionMessageHeaderDecoder.timestampEncodingOffset(); final long clusterSessionId = buffer.getLong(clusterSessionIdOffset, SessionMessageHeaderDecoder.BYTE_ORDER); final long appendPosition = logPublisher.appendMessage( leadershipTermId, clusterSessionId, clusterClock.time(), buffer, offset + SESSION_HEADER_LENGTH, length - SESSION_HEADER_LENGTH); if (appendPosition > 0) { ++uncommittedServiceMessages; logServiceSessionId = clusterSessionId; pendingServiceMessageHeadOffset = headOffset; buffer.putLong(timestampOffset, appendPosition, SessionMessageHeaderEncoder.BYTE_ORDER); return true; } return false; } private boolean serviceSessionMessageReset( final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset) { final int timestampOffset = offset + MessageHeaderDecoder.ENCODED_LENGTH + SessionMessageHeaderDecoder.timestampEncodingOffset(); final long appendPosition = buffer.getLong(timestampOffset, SessionMessageHeaderDecoder.BYTE_ORDER); if (appendPosition < Long.MAX_VALUE) { buffer.putLong(timestampOffset, Long.MAX_VALUE, SessionMessageHeaderEncoder.BYTE_ORDER); return true; } return false; } private boolean leaderServiceSessionMessageSweeper( final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset) { final int timestampOffset = offset + MessageHeaderDecoder.ENCODED_LENGTH + SessionMessageHeaderDecoder.timestampEncodingOffset(); final long appendPosition = buffer.getLong(timestampOffset, SessionMessageHeaderDecoder.BYTE_ORDER); if (appendPosition <= commitPosition.getWeak()) { --uncommittedServiceMessages; return true; } return false; } private boolean followerServiceSessionMessageSweeper( final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset) { final int clusterSessionIdOffset = offset + MessageHeaderDecoder.ENCODED_LENGTH + SessionMessageHeaderDecoder.clusterSessionIdEncodingOffset(); return buffer.getLong(clusterSessionIdOffset, SessionMessageHeaderDecoder.BYTE_ORDER) <= logServiceSessionId; } private void onUnavailableCounter(final CountersReader counters, final long registrationId, final int counterId) { if (ConsensusModule.State.TERMINATING != state && ConsensusModule.State.QUITTING != state) { for (final long clientId : serviceClientIds) { if (registrationId == clientId) { ctx.countedErrorHandler().onError(new ClusterException( "Aeron client in service closed unexpectedly", WARN)); state(ConsensusModule.State.CLOSED); return; } } if (null != appendPosition && appendPosition.registrationId() == registrationId) { appendPosition = null; logSubscriptionId = NULL_VALUE; if (null != election) { election.handleError( clusterClock.timeNanos(), new ClusterException("log recording ended unexpectedly", WARN)); } else if (NULL_POSITION == terminationPosition) { ctx.countedErrorHandler().onError(new ClusterException("log recording ended unexpectedly", WARN)); isElectionRequired = true; } } } } private void closeAndTerminate() { tryStopLogRecording(); state(ConsensusModule.State.CLOSED); terminateAgent(); } private void unexpectedTermination() { aeron.removeUnavailableCounterHandler(unavailableCounterHandlerRegistrationId); serviceProxy.terminationPosition(0, ctx.countedErrorHandler()); tryStopLogRecording(); state(ConsensusModule.State.CLOSED); terminateAgent(); } private void terminateAgent() { try { ctx.terminationHook().run(); } catch (final Throwable ex) { ctx.countedErrorHandler().onError(ex); } throw new ClusterTerminationException(); } private void tryStopLogRecording() { appendPosition = null; if (NULL_VALUE != logSubscriptionId && archive.archiveProxy().publication().isConnected()) { try { archive.tryStopRecording(logSubscriptionId); } catch (final Exception ex) { ctx.countedErrorHandler().onError(new ClusterException(ex, WARN)); } logSubscriptionId = NULL_VALUE; } if (NULL_VALUE != logRecordingId && archive.archiveProxy().publication().isConnected()) { try { archive.tryStopRecordingByIdentity(logRecordingId); } catch (final Exception ex) { ctx.countedErrorHandler().onError(new ClusterException(ex, WARN)); } } } private long getLastAppendedPosition() { idleStrategy.reset(); while (true) { final long appendPosition = archive.getStopPosition(logRecordingId); if (NULL_POSITION != appendPosition) { return appendPosition; } idle(); } } private void appendDynamicJoinTermAndSnapshots() { if (!dynamicJoinSnapshots.isEmpty()) { final RecordingLog.Snapshot lastSnapshot = dynamicJoinSnapshots.get(dynamicJoinSnapshots.size() - 1); recordingLog.appendTerm( logRecordingId, lastSnapshot.leadershipTermId, lastSnapshot.termBaseLogPosition, lastSnapshot.timestamp); for (int i = dynamicJoinSnapshots.size() - 1; i >= 0; i--) { final RecordingLog.Snapshot snapshot = dynamicJoinSnapshots.get(i); recordingLog.appendSnapshot( snapshot.recordingId, snapshot.leadershipTermId, snapshot.termBaseLogPosition, snapshot.logPosition, snapshot.timestamp, snapshot.serviceId); } dynamicJoinSnapshots.clear(); } } private void connectIngress() { if (!ctx.ingressChannel().contains(ENDPOINT_PARAM_NAME)) { final ChannelUri ingressUri = ChannelUri.parse(ctx.ingressChannel()); ingressUri.put(ENDPOINT_PARAM_NAME, thisMember.ingressEndpoint()); ingressAdapter.connect(aeron.addSubscription( ingressUri.toString(), ctx.ingressStreamId(), null, this::onUnavailableIngressImage)); } else if (Cluster.Role.LEADER == role) { ingressAdapter.connect(aeron.addSubscription( ctx.ingressChannel(), ctx.ingressStreamId(), null, this::onUnavailableIngressImage)); } } public String toString() { return "ConsensusModuleAgent{" + "election=" + election + '}'; } }
aeron-cluster/src/main/java/io/aeron/cluster/ConsensusModuleAgent.java
/* * Copyright 2014-2021 Real Logic Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.aeron.cluster; import io.aeron.*; import io.aeron.archive.client.AeronArchive; import io.aeron.archive.client.ArchiveException; import io.aeron.archive.client.RecordingSignalPoller; import io.aeron.archive.codecs.*; import io.aeron.archive.status.RecordingPos; import io.aeron.cluster.client.AeronCluster; import io.aeron.cluster.client.ClusterException; import io.aeron.cluster.codecs.MessageHeaderDecoder; import io.aeron.cluster.codecs.*; import io.aeron.cluster.service.*; import io.aeron.exceptions.AeronException; import io.aeron.logbuffer.ControlledFragmentHandler; import io.aeron.security.Authenticator; import io.aeron.status.ReadableCounter; import org.agrona.*; import org.agrona.collections.*; import org.agrona.concurrent.*; import org.agrona.concurrent.status.CountersReader; import java.util.*; import java.util.concurrent.TimeUnit; import static io.aeron.Aeron.NULL_VALUE; import static io.aeron.CommonContext.*; import static io.aeron.archive.client.AeronArchive.NULL_LENGTH; import static io.aeron.archive.client.AeronArchive.NULL_POSITION; import static io.aeron.archive.client.ReplayMerge.LIVE_ADD_MAX_WINDOW; import static io.aeron.archive.codecs.SourceLocation.LOCAL; import static io.aeron.cluster.ClusterMember.quorumPosition; import static io.aeron.cluster.ClusterSession.State.*; import static io.aeron.cluster.ConsensusModule.Configuration.*; import static io.aeron.cluster.client.AeronCluster.SESSION_HEADER_LENGTH; import static io.aeron.cluster.service.ClusteredServiceContainer.Configuration.MARK_FILE_UPDATE_INTERVAL_NS; import static io.aeron.exceptions.AeronException.Category.WARN; import static java.lang.Math.min; import static org.agrona.BitUtil.findNextPositivePowerOfTwo; final class ConsensusModuleAgent implements Agent { static final long SLOW_TICK_INTERVAL_NS = TimeUnit.MILLISECONDS.toNanos(10); private static final int SERVICE_MESSAGE_LIMIT = 20; private final long sessionTimeoutNs; private final long leaderHeartbeatIntervalNs; private final long leaderHeartbeatTimeoutNs; private long unavailableCounterHandlerRegistrationId; private long nextSessionId = 1; private long nextServiceSessionId = Long.MIN_VALUE + 1; private long logServiceSessionId = Long.MIN_VALUE; private long leadershipTermId = NULL_VALUE; private long replayLeadershipTermId = NULL_VALUE; private long expectedAckPosition = 0; private long serviceAckId = 0; private long terminationPosition = NULL_POSITION; private long notifiedCommitPosition = 0; private long lastAppendPosition = 0; private long timeOfLastLogUpdateNs = 0; private long timeOfLastAppendPositionNs = 0; private long slowTickDeadlineNs = 0; private long markFileUpdateDeadlineNs = 0; private int pendingServiceMessageHeadOffset = 0; private int uncommittedServiceMessages = 0; private int memberId; private int highMemberId; private int pendingMemberRemovals = 0; private long logPublicationChannelTag; private ReadableCounter appendPosition = null; private final Counter commitPosition; private ConsensusModule.State state = ConsensusModule.State.INIT; private Cluster.Role role = Cluster.Role.FOLLOWER; private ClusterMember[] activeMembers; private ClusterMember[] passiveMembers = ClusterMember.EMPTY_MEMBERS; private ClusterMember leaderMember; private ClusterMember thisMember; private long[] rankedPositions; private final long[] serviceClientIds; private final ArrayDeque<ServiceAck>[] serviceAckQueues; private final Counter clusterRoleCounter; private final ClusterMarkFile markFile; private final AgentInvoker aeronClientInvoker; private final ClusterClock clusterClock; private final TimeUnit clusterTimeUnit; private final Counter moduleState; private final Counter controlToggle; private final TimerService timerService; private final ConsensusModuleAdapter consensusModuleAdapter; private final ServiceProxy serviceProxy; private final IngressAdapter ingressAdapter; private final EgressPublisher egressPublisher; private final LogPublisher logPublisher; private final LogAdapter logAdapter; private final ConsensusAdapter consensusAdapter; private final ConsensusPublisher consensusPublisher = new ConsensusPublisher(); private final Long2ObjectHashMap<ClusterSession> sessionByIdMap = new Long2ObjectHashMap<>(); private final ArrayList<ClusterSession> pendingSessions = new ArrayList<>(); private final ArrayList<ClusterSession> rejectedSessions = new ArrayList<>(); private final ArrayList<ClusterSession> redirectSessions = new ArrayList<>(); private final Int2ObjectHashMap<ClusterMember> clusterMemberByIdMap = new Int2ObjectHashMap<>(); private final Long2LongCounterMap expiredTimerCountByCorrelationIdMap = new Long2LongCounterMap(0); private final ArrayDeque<ClusterSession> uncommittedClosedSessions = new ArrayDeque<>(); private final LongArrayQueue uncommittedTimers = new LongArrayQueue(Long.MAX_VALUE); private final ExpandableRingBuffer pendingServiceMessages = new ExpandableRingBuffer(); private final ExpandableRingBuffer.MessageConsumer serviceSessionMessageAppender = this::serviceSessionMessageAppender; private final ExpandableRingBuffer.MessageConsumer leaderServiceSessionMessageSweeper = this::leaderServiceSessionMessageSweeper; private final ExpandableRingBuffer.MessageConsumer followerServiceSessionMessageSweeper = this::followerServiceSessionMessageSweeper; private final Authenticator authenticator; private final ClusterSessionProxy sessionProxy; private final Aeron aeron; private final ConsensusModule.Context ctx; private final IdleStrategy idleStrategy; private final RecordingLog recordingLog; private final ArrayList<RecordingLog.Snapshot> dynamicJoinSnapshots = new ArrayList<>(); private RecordingLog.RecoveryPlan recoveryPlan; private AeronArchive archive; private RecordingSignalPoller recordingSignalPoller; private Election election; private DynamicJoin dynamicJoin; private ClusterTermination clusterTermination; private long logSubscriptionId = NULL_VALUE; private long logRecordingId = NULL_VALUE; private long logRecordedPosition = NULL_POSITION; private String liveLogDestination; private String catchupLogDestination; private String ingressEndpoints; private boolean isElectionRequired; ConsensusModuleAgent(final ConsensusModule.Context ctx) { this.ctx = ctx; this.aeron = ctx.aeron(); this.clusterClock = ctx.clusterClock(); this.clusterTimeUnit = clusterClock.timeUnit(); this.sessionTimeoutNs = ctx.sessionTimeoutNs(); this.leaderHeartbeatIntervalNs = ctx.leaderHeartbeatIntervalNs(); this.leaderHeartbeatTimeoutNs = ctx.leaderHeartbeatTimeoutNs(); this.egressPublisher = ctx.egressPublisher(); this.moduleState = ctx.moduleStateCounter(); this.commitPosition = ctx.commitPositionCounter(); this.controlToggle = ctx.controlToggleCounter(); this.logPublisher = ctx.logPublisher(); this.idleStrategy = ctx.idleStrategy(); this.timerService = new TimerService( this, clusterTimeUnit, 0, findNextPositivePowerOfTwo(clusterTimeUnit.convert(ctx.wheelTickResolutionNs(), TimeUnit.NANOSECONDS)), ctx.ticksPerWheel()); this.activeMembers = ClusterMember.parse(ctx.clusterMembers()); this.sessionProxy = new ClusterSessionProxy(egressPublisher); this.memberId = ctx.clusterMemberId(); this.clusterRoleCounter = ctx.clusterNodeRoleCounter(); this.markFile = ctx.clusterMarkFile(); this.recordingLog = ctx.recordingLog(); this.serviceClientIds = new long[ctx.serviceCount()]; Arrays.fill(serviceClientIds, NULL_VALUE); this.serviceAckQueues = ServiceAck.newArray(ctx.serviceCount()); this.highMemberId = ClusterMember.highMemberId(activeMembers); aeronClientInvoker = aeron.conductorAgentInvoker(); aeronClientInvoker.invoke(); rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)]; role(Cluster.Role.FOLLOWER); ClusterMember.addClusterMemberIds(activeMembers, clusterMemberByIdMap); thisMember = ClusterMember.determineMember(activeMembers, ctx.clusterMemberId(), ctx.memberEndpoints()); leaderMember = thisMember; final ChannelUri consensusUri = ChannelUri.parse(ctx.consensusChannel()); if (!consensusUri.containsKey(ENDPOINT_PARAM_NAME)) { consensusUri.put(ENDPOINT_PARAM_NAME, thisMember.consensusEndpoint()); } consensusAdapter = new ConsensusAdapter( aeron.addSubscription(consensusUri.toString(), ctx.consensusStreamId()), this); ClusterMember.addConsensusPublications(activeMembers, thisMember, consensusUri, ctx.consensusStreamId(), aeron); ingressAdapter = new IngressAdapter(ctx.ingressFragmentLimit(), this); logAdapter = new LogAdapter(this, ctx.logFragmentLimit()); consensusModuleAdapter = new ConsensusModuleAdapter( aeron.addSubscription(ctx.controlChannel(), ctx.consensusModuleStreamId()), this); serviceProxy = new ServiceProxy(aeron.addPublication(ctx.controlChannel(), ctx.serviceStreamId())); authenticator = ctx.authenticatorSupplier().get(); } /** * {@inheritDoc} */ public void onClose() { if (!aeron.isClosed()) { aeron.removeUnavailableCounterHandler(unavailableCounterHandlerRegistrationId); tryStopLogRecording(); if (!ctx.ownsAeronClient()) { logPublisher.disconnect(ctx.countedErrorHandler()); logAdapter.disconnect(ctx.countedErrorHandler()); final CountedErrorHandler errorHandler = ctx.countedErrorHandler(); for (final ClusterSession session : sessionByIdMap.values()) { session.close(errorHandler); } CloseHelper.close(errorHandler, ingressAdapter); ClusterMember.closeConsensusPublications(errorHandler, activeMembers); CloseHelper.close(errorHandler, consensusAdapter); CloseHelper.close(errorHandler, serviceProxy); CloseHelper.close(errorHandler, consensusModuleAdapter); CloseHelper.close(errorHandler, archive); } state(ConsensusModule.State.CLOSED); } markFile.updateActivityTimestamp(NULL_VALUE); ctx.close(); } /** * {@inheritDoc} */ public void onStart() { archive = AeronArchive.connect(ctx.archiveContext().clone()); recordingSignalPoller = new RecordingSignalPoller( archive.controlSessionId(), archive.controlResponsePoller().subscription()); if (null == (dynamicJoin = requiresDynamicJoin())) { final long lastTermRecordingId = recordingLog.findLastTermRecordingId(); if (NULL_VALUE != lastTermRecordingId) { archive.tryStopRecordingByIdentity(lastTermRecordingId); } recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), logRecordingId); if (null != recoveryPlan.log) { logRecordingId = recoveryPlan.log.recordingId; } try (Counter ignore = addRecoveryStateCounter(recoveryPlan)) { if (!recoveryPlan.snapshots.isEmpty()) { loadSnapshot(recoveryPlan.snapshots.get(0), archive); } while (!ServiceAck.hasReached(expectedAckPosition, serviceAckId, serviceAckQueues)) { idle(consensusModuleAdapter.poll()); } captureServiceClientIds(); ++serviceAckId; } election = new Election( true, recoveryPlan.lastLeadershipTermId, commitPosition.getWeak(), recoveryPlan.appendedLogPosition, activeMembers, clusterMemberByIdMap, thisMember, consensusPublisher, ctx, this); state(ConsensusModule.State.ACTIVE); } unavailableCounterHandlerRegistrationId = aeron.addUnavailableCounterHandler(this::onUnavailableCounter); } /** * {@inheritDoc} */ public int doWork() { int workCount = 0; final long now = clusterClock.time(); final long nowNs = clusterTimeUnit.toNanos(now); if (nowNs >= slowTickDeadlineNs) { slowTickDeadlineNs = nowNs + SLOW_TICK_INTERVAL_NS; workCount += slowTickWork(clusterTimeUnit.toMillis(now), nowNs); } workCount += consensusAdapter.poll(); if (null != dynamicJoin) { workCount += dynamicJoin.doWork(nowNs); } else if (null != election) { workCount += election.doWork(nowNs); } else { workCount += consensusWork(now, nowNs); } return workCount; } /** * {@inheritDoc} */ public String roleName() { return "consensus-module_" + ctx.clusterId() + "_" + memberId; } void onSessionConnect( final long correlationId, final int responseStreamId, final int version, final String responseChannel, final byte[] encodedCredentials) { final long clusterSessionId = Cluster.Role.LEADER == role ? nextSessionId++ : NULL_VALUE; final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel); session.connect(aeron); final long now = clusterClock.time(); session.lastActivityNs(clusterTimeUnit.toNanos(now), correlationId); if (Cluster.Role.LEADER != role) { redirectSessions.add(session); } else { if (AeronCluster.Configuration.PROTOCOL_MAJOR_VERSION != SemanticVersion.major(version)) { final String detail = SESSION_INVALID_VERSION_MSG + " " + SemanticVersion.toString(version) + ", cluster is " + SemanticVersion.toString(AeronCluster.Configuration.PROTOCOL_SEMANTIC_VERSION); session.reject(EventCode.ERROR, detail); rejectedSessions.add(session); } else if (pendingSessions.size() + sessionByIdMap.size() >= ctx.maxConcurrentSessions()) { session.reject(EventCode.ERROR, SESSION_LIMIT_MSG); rejectedSessions.add(session); } else { authenticator.onConnectRequest(session.id(), encodedCredentials, clusterTimeUnit.toMillis(now)); pendingSessions.add(session); } } } void onSessionClose(final long leadershipTermId, final long clusterSessionId) { if (leadershipTermId == this.leadershipTermId && Cluster.Role.LEADER == role) { final ClusterSession session = sessionByIdMap.get(clusterSessionId); if (null != session && session.state() == OPEN) { session.closing(CloseReason.CLIENT_ACTION); session.disconnect(ctx.countedErrorHandler()); if (logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time())) { session.closedLogPosition(logPublisher.position()); uncommittedClosedSessions.addLast(session); sessionByIdMap.remove(clusterSessionId); session.close(ctx.countedErrorHandler()); } } } } ControlledFragmentAssembler.Action onIngressMessage( final long leadershipTermId, final long clusterSessionId, final DirectBuffer buffer, final int offset, final int length) { if (leadershipTermId == this.leadershipTermId && Cluster.Role.LEADER == role) { final ClusterSession session = sessionByIdMap.get(clusterSessionId); if (null != session && session.state() == OPEN) { final long now = clusterClock.time(); if (logPublisher.appendMessage(leadershipTermId, clusterSessionId, now, buffer, offset, length) > 0) { session.timeOfLastActivityNs(clusterTimeUnit.toNanos(now)); return ControlledFragmentHandler.Action.CONTINUE; } else { return ControlledFragmentHandler.Action.ABORT; } } } return ControlledFragmentHandler.Action.CONTINUE; } void onSessionKeepAlive(final long leadershipTermId, final long clusterSessionId) { if (leadershipTermId == this.leadershipTermId && Cluster.Role.LEADER == role) { final ClusterSession session = sessionByIdMap.get(clusterSessionId); if (null != session && session.state() == OPEN) { session.timeOfLastActivityNs(clusterTimeUnit.toNanos(clusterClock.time())); } } } void onChallengeResponse(final long correlationId, final long clusterSessionId, final byte[] encodedCredentials) { if (Cluster.Role.LEADER == role) { for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--) { final ClusterSession session = pendingSessions.get(i); if (session.id() == clusterSessionId && session.state() == CHALLENGED) { final long now = clusterClock.time(); final long nowMs = clusterTimeUnit.toMillis(now); session.lastActivityNs(clusterTimeUnit.toNanos(now), correlationId); authenticator.onChallengeResponse(clusterSessionId, encodedCredentials, nowMs); break; } } } } boolean onTimerEvent(final long correlationId) { final long appendPosition = logPublisher.appendTimer(correlationId, leadershipTermId, clusterClock.time()); if (appendPosition > 0) { uncommittedTimers.offerLong(appendPosition); uncommittedTimers.offerLong(correlationId); return true; } return false; } void onCanvassPosition( final long logLeadershipTermId, final long logPosition, final long leadershipTermId, final int followerMemberId) { if (null != election) { election.onCanvassPosition(logLeadershipTermId, logPosition, leadershipTermId, followerMemberId); } else if (Cluster.Role.LEADER == role) { final ClusterMember follower = clusterMemberByIdMap.get(followerMemberId); if (null != follower && logLeadershipTermId <= this.leadershipTermId) { final RecordingLog.Entry currentTermEntry = recordingLog.getTermEntry(this.leadershipTermId); final long termBaseLogPosition = currentTermEntry.termBaseLogPosition; final long timestamp = ctx.clusterClock().timeNanos(); final long nextLogLeadershipTermId; final long nextTermBaseLogPosition; final long nextLogPosition; if (logLeadershipTermId < this.leadershipTermId) { final RecordingLog.Entry nextLogEntry = recordingLog.findTermEntry(logLeadershipTermId + 1); nextLogLeadershipTermId = null != nextLogEntry ? nextLogEntry.leadershipTermId : this.leadershipTermId; nextTermBaseLogPosition = null != nextLogEntry ? nextLogEntry.termBaseLogPosition : termBaseLogPosition; nextLogPosition = null != nextLogEntry ? nextLogEntry.logPosition : NULL_POSITION; } else { nextLogLeadershipTermId = NULL_VALUE; nextTermBaseLogPosition = NULL_POSITION; nextLogPosition = NULL_POSITION; } final long appendPosition = logPublisher.position(); consensusPublisher.newLeadershipTerm( follower.publication(), logLeadershipTermId, nextLogLeadershipTermId, nextTermBaseLogPosition, nextLogPosition, this.leadershipTermId, termBaseLogPosition, appendPosition, logRecordingId, timestamp, memberId, logPublisher.sessionId(), false); } } } void onRequestVote( final long logLeadershipTermId, final long logPosition, final long candidateTermId, final int candidateId) { if (null != election) { election.onRequestVote(logLeadershipTermId, logPosition, candidateTermId, candidateId); } else if (candidateTermId > leadershipTermId && null == dynamicJoin) { ctx.countedErrorHandler().onError(new ClusterException("unexpected vote request", WARN)); enterElection(); } } void onVote( final long candidateTermId, final long logLeadershipTermId, final long logPosition, final int candidateMemberId, final int followerMemberId, final boolean vote) { if (null != election) { election.onVote( candidateTermId, logLeadershipTermId, logPosition, candidateMemberId, followerMemberId, vote); } } void onNewLeadershipTerm( final long logLeadershipTermId, final long nextLeadershipTermId, final long nextTermBaseLogPosition, final long nextLogPosition, final long leadershipTermId, final long termBaseLogPosition, final long logPosition, final long leaderRecordingId, final long timestamp, final int leaderId, final int logSessionId, final boolean isStartup) { if (null != election) { election.onNewLeadershipTerm( logLeadershipTermId, nextLeadershipTermId, nextTermBaseLogPosition, nextLogPosition, leadershipTermId, termBaseLogPosition, logPosition, leaderRecordingId, timestamp, leaderId, logSessionId, isStartup); } else if (Cluster.Role.FOLLOWER == role && leadershipTermId == this.leadershipTermId && leaderId == leaderMember.id()) { notifiedCommitPosition = Math.max(notifiedCommitPosition, logPosition); timeOfLastLogUpdateNs = clusterClock.timeNanos(); } else if (leadershipTermId > this.leadershipTermId && null == dynamicJoin) { ctx.countedErrorHandler().onError(new ClusterException("unexpected new leadership term", WARN)); enterElection(); } } void onAppendPosition(final long leadershipTermId, final long logPosition, final int followerMemberId) { if (null != election) { election.onAppendPosition(leadershipTermId, logPosition, followerMemberId); } else if (leadershipTermId <= this.leadershipTermId && Cluster.Role.LEADER == role) { final ClusterMember follower = clusterMemberByIdMap.get(followerMemberId); if (null != follower) { follower .logPosition(logPosition) .timeOfLastAppendPositionNs(clusterClock.timeNanos()); trackCatchupCompletion(follower, leadershipTermId); } } } void onCommitPosition(final long leadershipTermId, final long logPosition, final int leaderMemberId) { if (null != election) { election.onCommitPosition(leadershipTermId, logPosition, leaderMemberId); } else if (leadershipTermId == this.leadershipTermId && leaderMemberId == leaderMember.id() && Cluster.Role.FOLLOWER == role) { notifiedCommitPosition = logPosition; timeOfLastLogUpdateNs = clusterClock.timeNanos(); } else if (leadershipTermId > this.leadershipTermId && null == dynamicJoin) { ctx.countedErrorHandler().onError(new ClusterException("unexpected commit position", WARN)); enterElection(); } } void onCatchupPosition( final long leadershipTermId, final long logPosition, final int followerMemberId, final String catchupEndpoint) { if (leadershipTermId <= this.leadershipTermId && Cluster.Role.LEADER == role) { final ClusterMember follower = clusterMemberByIdMap.get(followerMemberId); if (null != follower && follower.catchupReplaySessionId() == NULL_VALUE) { final String channel = new ChannelUriStringBuilder() .media(CommonContext.UDP_MEDIA) .endpoint(catchupEndpoint) .sessionId(logPublisher.sessionId()) .linger(0L) .eos(Boolean.FALSE) .build(); follower.catchupReplaySessionId(archive.startReplay( logRecordingId, logPosition, Long.MAX_VALUE, channel, ctx.logStreamId())); follower.catchupReplayCorrelationId(archive.lastCorrelationId()); } } } void onStopCatchup(final long leadershipTermId, final int followerMemberId) { if (leadershipTermId == this.replayLeadershipTermId && followerMemberId == memberId) { if (null != catchupLogDestination) { logAdapter.removeDestination(catchupLogDestination); catchupLogDestination = null; } } } void onAddPassiveMember(final long correlationId, final String memberEndpoints) { if (null == election && null == dynamicJoin) { if (Cluster.Role.LEADER == role) { if (ClusterMember.notDuplicateEndpoint(passiveMembers, memberEndpoints)) { final ClusterMember newMember = ClusterMember.parseEndpoints(++highMemberId, memberEndpoints); newMember.correlationId(correlationId); passiveMembers = ClusterMember.addMember(passiveMembers, newMember); clusterMemberByIdMap.put(newMember.id(), newMember); ClusterMember.addConsensusPublication( newMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron); logPublisher.addDestination(ctx.isLogMdc(), newMember.logEndpoint()); } } else if (Cluster.Role.FOLLOWER == role) { consensusPublisher.addPassiveMember(leaderMember.publication(), correlationId, memberEndpoints); } } } void onClusterMembersChange( final long correlationId, final int leaderMemberId, final String activeMembers, final String passiveMembers) { if (null != dynamicJoin) { dynamicJoin.onClusterMembersChange(correlationId, leaderMemberId, activeMembers, passiveMembers); } } void onSnapshotRecordingQuery(final long correlationId, final int requestMemberId) { if (null == election && Cluster.Role.LEADER == role) { final ClusterMember requester = clusterMemberByIdMap.get(requestMemberId); if (null != requester) { consensusPublisher.snapshotRecording( requester.publication(), correlationId, recoveryPlan, ClusterMember.encodeAsString(activeMembers)); } } } void onSnapshotRecordings(final long correlationId, final SnapshotRecordingsDecoder decoder) { if (null != dynamicJoin) { dynamicJoin.onSnapshotRecordings(correlationId, decoder); } } void onJoinCluster(final long leadershipTermId, final int memberId) { if (null == election && Cluster.Role.LEADER == role) { final ClusterMember member = clusterMemberByIdMap.get(memberId); final long snapshotLeadershipTermId = recoveryPlan.snapshots.isEmpty() ? NULL_VALUE : recoveryPlan.snapshots.get(0).leadershipTermId; if (null != member && !member.hasRequestedJoin() && leadershipTermId <= snapshotLeadershipTermId) { if (null == member.publication()) { final ChannelUri consensusUri = ChannelUri.parse(ctx.consensusChannel()); final int streamId = ctx.consensusStreamId(); ClusterMember.addConsensusPublication(member, consensusUri, streamId, aeron); logPublisher.addDestination(ctx.isLogMdc(), member.logEndpoint()); } member.hasRequestedJoin(true); } } } void onTerminationPosition(final long leadershipTermId, final long logPosition) { if (leadershipTermId == this.leadershipTermId && Cluster.Role.FOLLOWER == role) { terminationPosition = logPosition; timeOfLastLogUpdateNs = clusterClock.timeNanos(); } } void onTerminationAck(final long leadershipTermId, final long logPosition, final int memberId) { if (leadershipTermId == this.leadershipTermId && logPosition >= terminationPosition && Cluster.Role.LEADER == role) { final ClusterMember member = clusterMemberByIdMap.get(memberId); if (null != member) { member.hasTerminated(true); if (clusterTermination.canTerminate(activeMembers, terminationPosition, clusterClock.timeNanos())) { recordingLog.commitLogPosition(leadershipTermId, terminationPosition); closeAndTerminate(); } } } } void onBackupQuery( final long correlationId, final int responseStreamId, final int version, final String responseChannel, final byte[] encodedCredentials) { if (null == election && null == dynamicJoin) { if (Cluster.Role.LEADER != role) { consensusPublisher.backupQuery( leaderMember.publication(), correlationId, responseStreamId, version, responseChannel, encodedCredentials); } else if (state == ConsensusModule.State.ACTIVE || state == ConsensusModule.State.SUSPENDED) { final ClusterSession session = new ClusterSession(NULL_VALUE, responseStreamId, responseChannel); session.markAsBackupSession(); session.connect(aeron); final long now = clusterClock.time(); session.lastActivityNs(clusterTimeUnit.toNanos(now), correlationId); if (AeronCluster.Configuration.PROTOCOL_MAJOR_VERSION != SemanticVersion.major(version)) { final String detail = SESSION_INVALID_VERSION_MSG + " " + SemanticVersion.toString(version) + ", cluster=" + SemanticVersion.toString(AeronCluster.Configuration.PROTOCOL_SEMANTIC_VERSION); session.reject(EventCode.ERROR, detail); rejectedSessions.add(session); } else if (pendingSessions.size() + sessionByIdMap.size() >= ctx.maxConcurrentSessions()) { session.reject(EventCode.ERROR, SESSION_LIMIT_MSG); rejectedSessions.add(session); } else { authenticator.onConnectRequest(session.id(), encodedCredentials, clusterTimeUnit.toMillis(now)); pendingSessions.add(session); } } } } void onRemoveMember(final int memberId, final boolean isPassive) { if (null == election && Cluster.Role.LEADER == role) { final ClusterMember member = clusterMemberByIdMap.get(memberId); if (null != member) { if (isPassive) { passiveMembers = ClusterMember.removeMember(passiveMembers, memberId); member.closePublication(ctx.countedErrorHandler()); logPublisher.removeDestination(ctx.isLogMdc(), member.logEndpoint()); clusterMemberByIdMap.remove(memberId); clusterMemberByIdMap.compact(); } else { final long now = clusterClock.time(); final long position = logPublisher.appendMembershipChangeEvent( leadershipTermId, now, this.memberId, activeMembers.length, ChangeType.QUIT, memberId, ClusterMember.encodeAsString(ClusterMember.removeMember(activeMembers, memberId))); if (position > 0) { timeOfLastLogUpdateNs = clusterTimeUnit.toNanos(now) - leaderHeartbeatIntervalNs; member.removalPosition(position); pendingMemberRemovals++; } } } } } void onClusterMembersQuery(final long correlationId, final boolean isExtendedRequest) { if (isExtendedRequest) { serviceProxy.clusterMembersExtendedResponse( correlationId, clusterClock.timeNanos(), leaderMember.id(), memberId, activeMembers, passiveMembers); } else { serviceProxy.clusterMembersResponse( correlationId, leaderMember.id(), ClusterMember.encodeAsString(activeMembers), ClusterMember.encodeAsString(passiveMembers)); } } void state(final ConsensusModule.State newState) { if (newState != state) { stateChange(state, newState, memberId); state = newState; if (!moduleState.isClosed()) { moduleState.set(newState.code()); } } } ConsensusModule.State state() { return state; } void stateChange(final ConsensusModule.State oldState, final ConsensusModule.State newState, final int memberId) { //System.out.println("CM State memberId=" + memberId + " " + oldState + " -> " + newState); } void role(final Cluster.Role newRole) { if (newRole != role) { roleChange(role, newRole, memberId); role = newRole; if (!clusterRoleCounter.isClosed()) { clusterRoleCounter.set(newRole.code()); } } } void roleChange(final Cluster.Role oldRole, final Cluster.Role newRole, final int memberId) { //System.out.println("CM Role memberId=" + memberId + " " + oldRole + " -> " + newRole); } Cluster.Role role() { return role; } long prepareForNewLeadership(final long logPosition) { role(Cluster.Role.FOLLOWER); CloseHelper.close(ctx.countedErrorHandler(), ingressAdapter); ClusterControl.ToggleState.deactivate(controlToggle); if (null != catchupLogDestination) { logAdapter.removeDestination(catchupLogDestination); catchupLogDestination = null; } if (null != liveLogDestination) { logAdapter.removeDestination(liveLogDestination); liveLogDestination = null; } logAdapter.disconnect(ctx.countedErrorHandler()); logPublisher.disconnect(ctx.countedErrorHandler()); if (RecordingPos.NULL_RECORDING_ID != logRecordingId) { tryStopLogRecording(); lastAppendPosition = getLastAppendedPosition(); recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), logRecordingId); clearSessionsAfter(logPosition); for (final ClusterSession session : sessionByIdMap.values()) { session.disconnect(ctx.countedErrorHandler()); } commitPosition.setOrdered(logPosition); restoreUncommittedEntries(logPosition); } return lastAppendPosition; } void onServiceCloseSession(final long clusterSessionId) { final ClusterSession session = sessionByIdMap.get(clusterSessionId); if (null != session) { session.closing(CloseReason.SERVICE_ACTION); if (Cluster.Role.LEADER == role && logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time())) { final String msg = CloseReason.SERVICE_ACTION.name(); egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, msg); session.closedLogPosition(logPublisher.position()); uncommittedClosedSessions.addLast(session); sessionByIdMap.remove(clusterSessionId); session.close(ctx.countedErrorHandler()); } } } void onServiceMessage(final long leadershipTermId, final DirectBuffer buffer, final int offset, final int length) { if (leadershipTermId == this.leadershipTermId) { enqueueServiceSessionMessage((MutableDirectBuffer)buffer, offset, length, nextServiceSessionId++); } } void onScheduleTimer(final long correlationId, final long deadline) { if (expiredTimerCountByCorrelationIdMap.get(correlationId) == 0) { timerService.scheduleTimerForCorrelationId(correlationId, deadline); } else { expiredTimerCountByCorrelationIdMap.decrementAndGet(correlationId); } } void onCancelTimer(final long correlationId) { timerService.cancelTimerByCorrelationId(correlationId); } void onServiceAck( final long logPosition, final long timestamp, final long ackId, final long relevantId, final int serviceId) { captureServiceAck(logPosition, ackId, relevantId, serviceId); if (ServiceAck.hasReached(logPosition, serviceAckId, serviceAckQueues)) { if (ConsensusModule.State.SNAPSHOT == state) { final ServiceAck[] serviceAcks = pollServiceAcks(logPosition, serviceId); ++serviceAckId; takeSnapshot(timestamp, logPosition, serviceAcks); if (null != clusterTermination) { serviceProxy.terminationPosition(terminationPosition, ctx.countedErrorHandler()); clusterTermination.deadlineNs(clusterClock.timeNanos() + ctx.terminationTimeoutNs()); state(ConsensusModule.State.TERMINATING); } else { state(ConsensusModule.State.ACTIVE); if (Cluster.Role.LEADER == role) { ClusterControl.ToggleState.reset(controlToggle); } } } else if (ConsensusModule.State.QUITTING == state) { closeAndTerminate(); } else if (ConsensusModule.State.TERMINATING == state) { if (null == clusterTermination) { consensusPublisher.terminationAck( leaderMember.publication(), leadershipTermId, logPosition, memberId); recordingLog.commitLogPosition(leadershipTermId, logPosition); closeAndTerminate(); } else { clusterTermination.onServicesTerminated(); if (clusterTermination.canTerminate( activeMembers, terminationPosition, clusterClock.timeNanos())) { recordingLog.commitLogPosition(leadershipTermId, logPosition); closeAndTerminate(); } } } } } void onReplaySessionMessage(final long clusterSessionId, final long timestamp) { final ClusterSession clusterSession = sessionByIdMap.get(clusterSessionId); if (null == clusterSession) { logServiceSessionId = clusterSessionId; pendingServiceMessages.consume(followerServiceSessionMessageSweeper, Integer.MAX_VALUE); } else { clusterSession.timeOfLastActivityNs(clusterTimeUnit.toNanos(timestamp)); } } void onReplayTimerEvent(final long correlationId) { if (!timerService.cancelTimerByCorrelationId(correlationId)) { expiredTimerCountByCorrelationIdMap.getAndIncrement(correlationId); } } void onReplaySessionOpen( final long logPosition, final long correlationId, final long clusterSessionId, final long timestamp, final int responseStreamId, final String responseChannel) { final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel); session.open(logPosition); session.lastActivityNs(clusterTimeUnit.toNanos(timestamp), correlationId); sessionByIdMap.put(clusterSessionId, session); if (clusterSessionId >= nextSessionId) { nextSessionId = clusterSessionId + 1; } } void onReplaySessionClose(final long clusterSessionId, final CloseReason closeReason) { final ClusterSession clusterSession = sessionByIdMap.remove(clusterSessionId); if (null != clusterSession) { clusterSession.closing(closeReason); clusterSession.close(ctx.countedErrorHandler()); } } void onReplayClusterAction(final long leadershipTermId, final ClusterAction action) { if (leadershipTermId == this.replayLeadershipTermId) { if (ClusterAction.SUSPEND == action) { state(ConsensusModule.State.SUSPENDED); } else if (ClusterAction.RESUME == action) { state(ConsensusModule.State.ACTIVE); } else if (ClusterAction.SNAPSHOT == action) { state(ConsensusModule.State.SNAPSHOT); } } } void onReplayNewLeadershipTermEvent( final long leadershipTermId, final long logPosition, final long timestamp, final long termBaseLogPosition, final TimeUnit timeUnit, final int appVersion) { if (timeUnit != clusterTimeUnit) { ctx.countedErrorHandler().onError(new ClusterException( "incompatible timestamp units: " + clusterTimeUnit + " log=" + timeUnit, AeronException.Category.FATAL)); unexpectedTermination(); } if (SemanticVersion.major(ctx.appVersion()) != SemanticVersion.major(appVersion)) { ctx.countedErrorHandler().onError(new ClusterException( "incompatible version: " + SemanticVersion.toString(ctx.appVersion()) + " log=" + SemanticVersion.toString(appVersion), AeronException.Category.FATAL)); unexpectedTermination(); } leadershipTermId(leadershipTermId); if (null != election) { election.onReplayNewLeadershipTermEvent( logRecordingId, leadershipTermId, logPosition, timestamp, termBaseLogPosition); } } void onReplayMembershipChange( final long leadershipTermId, final long logPosition, final int leaderMemberId, final ChangeType changeType, final int memberId, final String clusterMembers) { if (leadershipTermId == this.replayLeadershipTermId) { if (ChangeType.JOIN == changeType) { final ClusterMember[] newMembers = ClusterMember.parse(clusterMembers); if (memberId == this.memberId) { activeMembers = newMembers; clusterMemberByIdMap.clear(); clusterMemberByIdMap.compact(); ClusterMember.addClusterMemberIds(newMembers, clusterMemberByIdMap); thisMember = ClusterMember.findMember(activeMembers, memberId); leaderMember = ClusterMember.findMember(activeMembers, leaderMemberId); ClusterMember.addConsensusPublications( newMembers, thisMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron); } else { clusterMemberJoined(memberId, newMembers); } } else if (ChangeType.QUIT == changeType) { if (memberId == this.memberId) { state(ConsensusModule.State.QUITTING); } else { clusterMemberQuit(memberId); if (leaderMemberId == memberId && null == election) { commitPosition.proposeMaxOrdered(logPosition); enterElection(); } } } if (null != election) { election.onMembershipChange(activeMembers, changeType, memberId, logPosition); } } } void onLoadSession( final long clusterSessionId, final long correlationId, final long openedPosition, final long timeOfLastActivity, final CloseReason closeReason, final int responseStreamId, final String responseChannel) { sessionByIdMap.put(clusterSessionId, new ClusterSession( clusterSessionId, correlationId, openedPosition, timeOfLastActivity, responseStreamId, responseChannel, closeReason)); if (clusterSessionId >= nextSessionId) { nextSessionId = clusterSessionId + 1; } } void onLoadPendingMessage(final DirectBuffer buffer, final int offset, final int length) { pendingServiceMessages.append(buffer, offset, length); } void onLoadConsensusModuleState( final long nextSessionId, final long nextServiceSessionId, final long logServiceSessionId, final int pendingMessageCapacity) { this.nextSessionId = nextSessionId; this.nextServiceSessionId = nextServiceSessionId; this.logServiceSessionId = logServiceSessionId; pendingServiceMessages.reset(pendingMessageCapacity); } void onLoadClusterMembers(final int memberId, final int highMemberId, final String members) { if (null == dynamicJoin && !ctx.clusterMembersIgnoreSnapshot()) { if (NULL_VALUE == this.memberId) { this.memberId = memberId; ctx.clusterMarkFile().memberId(memberId); } if (ClusterMember.EMPTY_MEMBERS == activeMembers) { activeMembers = ClusterMember.parse(members); this.highMemberId = Math.max(ClusterMember.highMemberId(activeMembers), highMemberId); rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)]; thisMember = clusterMemberByIdMap.get(memberId); final ChannelUri consensusUri = ChannelUri.parse(ctx.consensusChannel()); consensusUri.put(ENDPOINT_PARAM_NAME, thisMember.consensusEndpoint()); ClusterMember.addConsensusPublications( activeMembers, thisMember, consensusUri, ctx.consensusStreamId(), aeron); } } } int addLogPublication() { final long logPublicationTag = aeron.nextCorrelationId(); logPublicationChannelTag = aeron.nextCorrelationId(); final ChannelUri channelUri = ChannelUri.parse(ctx.logChannel()); channelUri.put(ALIAS_PARAM_NAME, "log"); channelUri.put(TAGS_PARAM_NAME, logPublicationChannelTag + "," + logPublicationTag); if (channelUri.isUdp()) { if (!channelUri.containsKey(FLOW_CONTROL_PARAM_NAME)) { final long timeout = TimeUnit.NANOSECONDS.toSeconds(ctx.leaderHeartbeatTimeoutNs()); channelUri.put(FLOW_CONTROL_PARAM_NAME, "min,t:" + timeout + "s"); } if (ctx.isLogMdc()) { channelUri.put(MDC_CONTROL_MODE_PARAM_NAME, MDC_CONTROL_MODE_MANUAL); } channelUri.put(SPIES_SIMULATE_CONNECTION_PARAM_NAME, Boolean.toString(activeMembers.length == 1)); } if (null != recoveryPlan.log) { channelUri.initialPosition( recoveryPlan.appendedLogPosition, recoveryPlan.log.initialTermId, recoveryPlan.log.termBufferLength); channelUri.put(MTU_LENGTH_PARAM_NAME, Integer.toString(recoveryPlan.log.mtuLength)); } final String channel = channelUri.toString(); final ExclusivePublication publication = aeron.addExclusivePublication(channel, ctx.logStreamId()); if (ctx.isLogMdc()) { for (final ClusterMember member : activeMembers) { if (member.id() != memberId) { publication.asyncAddDestination("aeron:udp?endpoint=" + member.logEndpoint()); } } for (final ClusterMember member : passiveMembers) { publication.asyncAddDestination("aeron:udp?endpoint=" + member.logEndpoint()); } } logPublisher.publication(publication); return publication.sessionId(); } void joinLogAsLeader( final long leadershipTermId, final long logPosition, final int logSessionId, final boolean isStartup) { final boolean isIpc = ctx.logChannel().startsWith(IPC_CHANNEL); final String channel = (isIpc ? IPC_CHANNEL : UDP_CHANNEL) + "?tags=" + logPublicationChannelTag + "|session-id=" + logSessionId + "|alias=log"; leadershipTermId(leadershipTermId); startLogRecording(channel, ctx.logStreamId(), SourceLocation.LOCAL); createAppendPosition(logSessionId); awaitServicesReady( isIpc ? channel : SPY_PREFIX + channel, ctx.logStreamId(), logSessionId, logPosition, Long.MAX_VALUE, isStartup, Cluster.Role.LEADER); } void liveLogDestination(final String liveLogDestination) { this.liveLogDestination = liveLogDestination; } String liveLogDestination() { return liveLogDestination; } void catchupLogDestination(final String catchupLogDestination) { this.catchupLogDestination = catchupLogDestination; } String catchupLogDestination() { return catchupLogDestination; } void joinLogAsFollower(final Image image, final boolean isLeaderStartup) { final Subscription logSubscription = image.subscription(); final int streamId = logSubscription.streamId(); final String channel = logSubscription.channel(); startLogRecording(channel, streamId, SourceLocation.REMOTE); createAppendPosition(image.sessionId()); appendDynamicJoinTermAndSnapshots(); logAdapter.image(image); lastAppendPosition = image.joinPosition(); awaitServicesReady( channel, streamId, image.sessionId(), image.joinPosition(), Long.MAX_VALUE, isLeaderStartup, Cluster.Role.FOLLOWER); } boolean tryJoinLogAsFollower(final Image image, final boolean isLeaderStartup) { final Subscription logSubscription = image.subscription(); final int streamId = logSubscription.streamId(); final String channel = logSubscription.channel(); if (NULL_VALUE == logSubscriptionId) { startLogRecording(channel, streamId, SourceLocation.REMOTE); } if (!tryCreateAppendPosition(image.sessionId())) { return false; } appendDynamicJoinTermAndSnapshots(); logAdapter.image(image); lastAppendPosition = image.joinPosition(); awaitServicesReady( channel, streamId, image.sessionId(), image.joinPosition(), Long.MAX_VALUE, isLeaderStartup, Cluster.Role.FOLLOWER); return true; } void awaitServicesReady( final String logChannel, final int streamId, final int logSessionId, final long logPosition, final long maxLogPosition, final boolean isStartup, final Cluster.Role role) { serviceProxy.joinLog( logPosition, maxLogPosition, memberId, logSessionId, streamId, isStartup, role, logChannel); expectedAckPosition = logPosition; while (!ServiceAck.hasReached(logPosition, serviceAckId, serviceAckQueues)) { idle(consensusModuleAdapter.poll()); } ServiceAck.removeHead(serviceAckQueues); ++serviceAckId; } void leadershipTermId(final long leadershipTermId) { this.leadershipTermId = leadershipTermId; this.replayLeadershipTermId = leadershipTermId; } LogReplay newLogReplay(final long logPosition, final long appendPosition) { return new LogReplay( archive, logRecordingId, logPosition, appendPosition, logAdapter, ctx); } int replayLogPoll(final LogAdapter logAdapter, final long stopPosition) { int workCount = 0; if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state) { final int fragments = logAdapter.poll(stopPosition); final long position = logAdapter.position(); if (fragments > 0) { commitPosition.setOrdered(position); } else if (logAdapter.isImageClosed() && position < stopPosition) { throw new ClusterException("unexpected image close when replaying log: position=" + position); } workCount += fragments; } workCount += consensusModuleAdapter.poll(); return workCount; } long logRecordingId() { return logRecordingId; } void logRecordingId(final long recordingId) { if (NULL_VALUE != recordingId) { logRecordingId = recordingId; } } void truncateLogEntry(final long leadershipTermId, final long logPosition) { archive.stopAllReplays(logRecordingId); archive.truncateRecording(logRecordingId, logPosition); recordingLog.commitLogPosition(leadershipTermId, logPosition); logAdapter.disconnect(ctx.countedErrorHandler(), logPosition); } boolean electionComplete() { final long logPosition = election.logPosition(); final long now = clusterClock.time(); final long nowNs = clusterTimeUnit.toNanos(now); if (Cluster.Role.LEADER == role) { if (!logPublisher.isConnected() || !logPublisher.appendNewLeadershipTermEvent( leadershipTermId, now, logPosition, memberId, logPublisher.sessionId(), clusterTimeUnit, ctx.appVersion())) { return false; } timeOfLastLogUpdateNs = nowNs - leaderHeartbeatIntervalNs; timerService.currentTickTime(now); ClusterControl.ToggleState.activate(controlToggle); prepareSessionsForNewTerm(election.isLeaderStartup()); } else { timeOfLastLogUpdateNs = nowNs; timeOfLastAppendPositionNs = nowNs; } recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), logRecordingId); notifiedCommitPosition = logPosition; commitPosition.setOrdered(logPosition); pendingServiceMessages.consume(followerServiceSessionMessageSweeper, Integer.MAX_VALUE); updateMemberDetails(election.leader()); election = null; connectIngress(); return true; } boolean dynamicJoinComplete() { if (0 == activeMembers.length) { activeMembers = dynamicJoin.clusterMembers(); ClusterMember.addClusterMemberIds(activeMembers, clusterMemberByIdMap); leaderMember = dynamicJoin.leader(); ClusterMember.addConsensusPublications( activeMembers, thisMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron); } if (NULL_VALUE == memberId) { memberId = dynamicJoin.memberId(); ctx.clusterMarkFile().memberId(memberId); thisMember.id(memberId); } dynamicJoin = null; election = new Election( false, leadershipTermId, commitPosition.getWeak(), recoveryPlan.appendedLogPosition, activeMembers, clusterMemberByIdMap, thisMember, consensusPublisher, ctx, this); return true; } void trackCatchupCompletion(final ClusterMember follower, final long leadershipTermId) { if (NULL_VALUE != follower.catchupReplaySessionId()) { if (follower.logPosition() >= logPublisher.position()) { if (NULL_VALUE != follower.catchupReplayCorrelationId()) { if (archive.archiveProxy().stopReplay( follower.catchupReplaySessionId(), aeron.nextCorrelationId(), archive.controlSessionId())) { follower.catchupReplayCorrelationId(NULL_VALUE); } } if (consensusPublisher.stopCatchup(follower.publication(), leadershipTermId, follower.id())) { follower.catchupReplaySessionId(NULL_VALUE); } } } } void catchupInitiated(final long nowNs) { timeOfLastAppendPositionNs = nowNs; } int catchupPoll(final long limitPosition, final long nowNs) { int workCount = 0; if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state) { final int fragments = logAdapter.poll(Math.min(appendPosition.get(), limitPosition)); workCount += fragments; if (fragments == 0 && logAdapter.image().isClosed()) { throw new ClusterException("unexpected close replaying log: position=" + logAdapter.image().position()); } } final long appendPosition = logAdapter.position(); if (appendPosition > lastAppendPosition || nowNs > (timeOfLastAppendPositionNs + leaderHeartbeatIntervalNs)) { commitPosition.proposeMaxOrdered(appendPosition); final ExclusivePublication publication = election.leader().publication(); if (consensusPublisher.appendPosition(publication, replayLeadershipTermId, appendPosition, memberId)) { lastAppendPosition = appendPosition; timeOfLastAppendPositionNs = nowNs; } } if (nowNs > (timeOfLastAppendPositionNs + leaderHeartbeatTimeoutNs) && ConsensusModule.State.ACTIVE == state) { throw new ClusterException("no catchup progress", WARN); } workCount += consensusModuleAdapter.poll(); return workCount; } boolean isCatchupNearLive(final long position) { final Image image = logAdapter.image(); if (null != image) { final long localPosition = image.position(); final long window = Math.min(image.termBufferLength() >> 2, LIVE_ADD_MAX_WINDOW); return localPosition >= (position - window); } return false; } void stopAllCatchups() { for (final ClusterMember member : activeMembers) { if (member.catchupReplaySessionId() != NULL_VALUE) { if (member.catchupReplayCorrelationId() != NULL_VALUE) { try { archive.stopReplay(member.catchupReplaySessionId()); } catch (final Exception ex) { ctx.countedErrorHandler().onError(new ClusterException("catchup already stopped", ex, WARN)); } } member.catchupReplaySessionId(NULL_VALUE); member.catchupReplayCorrelationId(NULL_VALUE); } } } void retrievedSnapshot(final long localRecordingId, final RecordingLog.Snapshot leaderSnapshot) { dynamicJoinSnapshots.add(new RecordingLog.Snapshot( localRecordingId, leaderSnapshot.leadershipTermId, leaderSnapshot.termBaseLogPosition, leaderSnapshot.logPosition, leaderSnapshot.timestamp, leaderSnapshot.serviceId)); } Counter loadSnapshotsForDynamicJoin() { recoveryPlan = RecordingLog.createRecoveryPlan(dynamicJoinSnapshots); final Counter recoveryStateCounter = addRecoveryStateCounter(recoveryPlan); if (!recoveryPlan.snapshots.isEmpty()) { loadSnapshot(recoveryPlan.snapshots.get(0), archive); } return recoveryStateCounter; } boolean pollForSnapshotLoadAck(final Counter recoveryStateCounter, final long nowNs) { consensusModuleAdapter.poll(); if (ServiceAck.hasReached(expectedAckPosition, serviceAckId, serviceAckQueues)) { captureServiceClientIds(); ++serviceAckId; CloseHelper.close(ctx.countedErrorHandler(), recoveryStateCounter); state(ConsensusModule.State.ACTIVE); timeOfLastLogUpdateNs = nowNs; leadershipTermId(recoveryPlan.lastLeadershipTermId); return true; } return false; } int pollArchiveEvents() { int workCount = 0; if (null != archive) { final RecordingSignalPoller poller = this.recordingSignalPoller; workCount += poller.poll(); if (poller.isPollComplete()) { final int templateId = poller.templateId(); if (ControlResponseDecoder.TEMPLATE_ID == templateId && poller.code() == ControlResponseCode.ERROR) { for (final ClusterMember member : activeMembers) { if (member.catchupReplayCorrelationId() != NULL_VALUE && member.catchupReplayCorrelationId() == poller.correlationId()) { member.catchupReplaySessionId(NULL_VALUE); member.catchupReplayCorrelationId(NULL_VALUE); ctx.countedErrorHandler().onError(new ClusterException( "catchup replay failed - " + poller.errorMessage(), WARN)); return workCount; } } final ArchiveException ex = new ArchiveException( poller.errorMessage(), (int)poller.relevantId(), poller.correlationId()); if (ex.errorCode() == ArchiveException.STORAGE_SPACE) { ctx.countedErrorHandler().onError(ex); unexpectedTermination(); } if (null != election) { election.handleError(clusterClock.timeNanos(), ex); } } else if (RecordingSignalEventDecoder.TEMPLATE_ID == templateId) { final long recordingId = poller.recordingId(); final long position = poller.recordingPosition(); final RecordingSignal signal = poller.recordingSignal(); if (RecordingSignal.STOP == signal && recordingId == logRecordingId) { this.logRecordedPosition = position; } if (null != election) { election.onRecordingSignal(poller.correlationId(), recordingId, position, signal); } if (null != dynamicJoin) { dynamicJoin.onRecordingSignal(poller.correlationId(), recordingId, position, signal); } } } else if (0 == workCount && !poller.subscription().isConnected()) { ctx.countedErrorHandler().onError(new ClusterException("local archive is not connected", WARN)); unexpectedTermination(); } } return workCount; } private void startLogRecording(final String channel, final int streamId, final SourceLocation sourceLocation) { try { final long logRecordingId = recordingLog.findLastTermRecordingId(); if (RecordingPos.NULL_RECORDING_ID == logRecordingId) { logSubscriptionId = archive.startRecording(channel, streamId, sourceLocation, true); } else { logSubscriptionId = archive.extendRecording(logRecordingId, channel, streamId, sourceLocation, true); } } catch (final ArchiveException ex) { if (ex.errorCode() == ArchiveException.STORAGE_SPACE) { ctx.countedErrorHandler().onError(ex); unexpectedTermination(); } throw ex; } } private void prepareSessionsForNewTerm(final boolean isStartup) { if (isStartup) { for (final ClusterSession session : sessionByIdMap.values()) { if (session.state() == OPEN) { session.closing(CloseReason.TIMEOUT); } } } else { for (final ClusterSession session : sessionByIdMap.values()) { if (session.state() == OPEN) { session.connect(aeron); } } final long nowNs = clusterClock.timeNanos(); for (final ClusterSession session : sessionByIdMap.values()) { if (session.state() == OPEN) { session.timeOfLastActivityNs(nowNs); session.hasNewLeaderEventPending(true); } } } } private void updateMemberDetails(final ClusterMember newLeader) { leaderMember = newLeader; for (final ClusterMember clusterMember : activeMembers) { clusterMember.isLeader(clusterMember.id() == leaderMember.id()); } ingressEndpoints = ClusterMember.ingressEndpoints(activeMembers); } private int slowTickWork(final long nowMs, final long nowNs) { int workCount = aeronClientInvoker.invoke(); if (aeron.isClosed()) { throw new AgentTerminationException("unexpected Aeron close"); } else if (ConsensusModule.State.CLOSED == state) { unexpectedTermination(); } else if (isElectionRequired) { enterElection(); isElectionRequired = false; } if (nowNs >= markFileUpdateDeadlineNs) { markFileUpdateDeadlineNs = nowNs + MARK_FILE_UPDATE_INTERVAL_NS; markFile.updateActivityTimestamp(nowMs); } workCount += pollArchiveEvents(); workCount += sendRedirects(redirectSessions, nowNs); workCount += sendRejections(rejectedSessions, nowNs); if (null == election) { if (Cluster.Role.LEADER == role) { workCount += checkControlToggle(nowNs); if (ConsensusModule.State.ACTIVE == state) { workCount += processPendingSessions(pendingSessions, nowMs, nowNs); workCount += checkSessions(sessionByIdMap, nowNs); workCount += processPassiveMembers(passiveMembers); if (!ClusterMember.hasActiveQuorum(activeMembers, nowNs, leaderHeartbeatTimeoutNs)) { ctx.countedErrorHandler().onError(new ClusterException("inactive follower quorum", WARN)); enterElection(); workCount += 1; } } else if (ConsensusModule.State.TERMINATING == state) { if (clusterTermination.canTerminate(activeMembers, terminationPosition, nowNs)) { recordingLog.commitLogPosition(leadershipTermId, terminationPosition); closeAndTerminate(); } } } else if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state) { if (nowNs >= (timeOfLastLogUpdateNs + leaderHeartbeatTimeoutNs) && NULL_POSITION == terminationPosition) { ctx.countedErrorHandler().onError(new ClusterException("leader heartbeat timeout", WARN)); enterElection(); workCount += 1; } } } return workCount; } private int consensusWork(final long timestamp, final long nowNs) { int workCount = 0; if (Cluster.Role.LEADER == role) { if (ConsensusModule.State.ACTIVE == state) { workCount += timerService.poll(timestamp); workCount += pendingServiceMessages.forEach( pendingServiceMessageHeadOffset, serviceSessionMessageAppender, SERVICE_MESSAGE_LIMIT); workCount += ingressAdapter.poll(); } workCount += updateLeaderPosition(nowNs); } else { if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state) { if (NULL_POSITION != terminationPosition && logAdapter.position() >= terminationPosition) { serviceProxy.terminationPosition(terminationPosition, ctx.countedErrorHandler()); state(ConsensusModule.State.TERMINATING); } else { final long limit = null != appendPosition ? appendPosition.get() : logRecordedPosition; final int count = logAdapter.poll(min(notifiedCommitPosition, limit)); if (0 == count && logAdapter.isImageClosed()) { ctx.countedErrorHandler().onError(new ClusterException("log disconnected from leader", WARN)); enterElection(); return 1; } commitPosition.proposeMaxOrdered(logAdapter.position()); workCount += ingressAdapter.poll(); workCount += count; } } workCount += updateFollowerPosition(nowNs); } workCount += consensusModuleAdapter.poll(); return workCount; } private int checkControlToggle(final long nowNs) { switch (ClusterControl.ToggleState.get(controlToggle)) { case SUSPEND: if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SUSPEND)) { state(ConsensusModule.State.SUSPENDED); } break; case RESUME: if (ConsensusModule.State.SUSPENDED == state && appendAction(ClusterAction.RESUME)) { state(ConsensusModule.State.ACTIVE); ClusterControl.ToggleState.reset(controlToggle); } break; case SNAPSHOT: if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SNAPSHOT)) { state(ConsensusModule.State.SNAPSHOT); } break; case SHUTDOWN: if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SNAPSHOT)) { final CountedErrorHandler errorHandler = ctx.countedErrorHandler(); final long position = logPublisher.position(); clusterTermination = new ClusterTermination(nowNs + ctx.terminationTimeoutNs()); clusterTermination.terminationPosition( errorHandler, consensusPublisher, activeMembers, thisMember, leadershipTermId, position); terminationPosition = position; state(ConsensusModule.State.SNAPSHOT); } break; case ABORT: if (ConsensusModule.State.ACTIVE == state) { final CountedErrorHandler errorHandler = ctx.countedErrorHandler(); final long position = logPublisher.position(); clusterTermination = new ClusterTermination(nowNs + ctx.terminationTimeoutNs()); clusterTermination.terminationPosition( errorHandler, consensusPublisher, activeMembers, thisMember, leadershipTermId, position); terminationPosition = position; serviceProxy.terminationPosition(terminationPosition, errorHandler); state(ConsensusModule.State.TERMINATING); } break; default: return 0; } return 1; } private boolean appendAction(final ClusterAction action) { return logPublisher.appendClusterAction(leadershipTermId, clusterClock.time(), action); } private int processPendingSessions( final ArrayList<ClusterSession> pendingSessions, final long nowMs, final long nowNs) { int workCount = 0; for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--) { final ClusterSession session = pendingSessions.get(i); if (nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs)) { ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--); session.close(ctx.countedErrorHandler()); ctx.timedOutClientCounter().incrementOrdered(); continue; } if (session.state() == INIT || session.state() == CONNECTED) { if (session.isResponsePublicationConnected()) { session.state(CONNECTED); authenticator.onConnectedSession(sessionProxy.session(session), nowMs); } } if (session.state() == CHALLENGED) { if (session.isResponsePublicationConnected()) { authenticator.onChallengedSession(sessionProxy.session(session), nowMs); } } if (session.state() == AUTHENTICATED) { if (session.isBackupSession()) { final RecordingLog.Entry entry = recordingLog.findLastTerm(); if (null != entry && consensusPublisher.backupResponse( session, commitPosition.id(), leaderMember.id(), entry, recoveryPlan, ClusterMember.encodeAsString(activeMembers))) { ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--); session.close(ctx.countedErrorHandler()); workCount += 1; } } else if (appendSessionAndOpen(session, nowNs)) { ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--); sessionByIdMap.put(session.id(), session); workCount += 1; } } else if (session.state() == REJECTED) { ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--); rejectedSessions.add(session); } } return workCount; } private int sendRejections(final ArrayList<ClusterSession> rejectedSessions, final long nowNs) { int workCount = 0; for (int lastIndex = rejectedSessions.size() - 1, i = lastIndex; i >= 0; i--) { final ClusterSession session = rejectedSessions.get(i); final String detail = session.responseDetail(); final EventCode eventCode = session.eventCode(); if (egressPublisher.sendEvent(session, leadershipTermId, leaderMember.id(), eventCode, detail) || nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs)) { ArrayListUtil.fastUnorderedRemove(rejectedSessions, i, lastIndex--); session.close(ctx.countedErrorHandler()); workCount++; } } return workCount; } private int sendRedirects(final ArrayList<ClusterSession> redirectSessions, final long nowNs) { int workCount = 0; for (int lastIndex = redirectSessions.size() - 1, i = lastIndex; i >= 0; i--) { final ClusterSession session = redirectSessions.get(i); final EventCode eventCode = EventCode.REDIRECT; final int leaderId = leaderMember.id(); if (egressPublisher.sendEvent(session, leadershipTermId, leaderId, eventCode, ingressEndpoints) || nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs)) { ArrayListUtil.fastUnorderedRemove(redirectSessions, i, lastIndex--); session.close(ctx.countedErrorHandler()); workCount++; } } return workCount; } private int processPassiveMembers(final ClusterMember[] passiveMembers) { int workCount = 0; for (final ClusterMember member : passiveMembers) { if (member.correlationId() != NULL_VALUE) { if (consensusPublisher.clusterMemberChange( member.publication(), member.correlationId(), leaderMember.id(), ClusterMember.encodeAsString(activeMembers), ClusterMember.encodeAsString(passiveMembers))) { member.correlationId(NULL_VALUE); workCount++; } } else if (member.hasRequestedJoin() && member.logPosition() == logPublisher.position()) { final ClusterMember[] newMembers = ClusterMember.addMember(activeMembers, member); final long now = clusterClock.time(); if (logPublisher.appendMembershipChangeEvent( leadershipTermId, now, leaderMember.id(), newMembers.length, ChangeType.JOIN, member.id(), ClusterMember.encodeAsString(newMembers)) > 0) { timeOfLastLogUpdateNs = clusterTimeUnit.toNanos(now) - leaderHeartbeatIntervalNs; this.passiveMembers = ClusterMember.removeMember(this.passiveMembers, member.id()); activeMembers = newMembers; rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)]; member.hasRequestedJoin(false); workCount++; break; } } } return workCount; } private int checkSessions(final Long2ObjectHashMap<ClusterSession> sessionByIdMap, final long nowNs) { int workCount = 0; for (final Iterator<ClusterSession> i = sessionByIdMap.values().iterator(); i.hasNext(); ) { final ClusterSession session = i.next(); if (nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs)) { if (session.state() == OPEN) { session.closing(CloseReason.TIMEOUT); if (logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time())) { final String msg = session.closeReason().name(); egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, msg); session.closedLogPosition(logPublisher.position()); uncommittedClosedSessions.addLast(session); i.remove(); session.close(ctx.countedErrorHandler()); ctx.timedOutClientCounter().incrementOrdered(); workCount++; } } else if (session.state() == CLOSING) { if (logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time())) { final String msg = session.closeReason().name(); egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, msg); session.closedLogPosition(logPublisher.position()); uncommittedClosedSessions.addLast(session); i.remove(); session.close(ctx.countedErrorHandler()); if (session.closeReason() == CloseReason.TIMEOUT) { ctx.timedOutClientCounter().incrementOrdered(); } workCount++; } } else { i.remove(); session.close(ctx.countedErrorHandler()); workCount++; } } else if (session.hasOpenEventPending()) { workCount += sendSessionOpenEvent(session); } else if (session.hasNewLeaderEventPending()) { workCount += sendNewLeaderEvent(session); } } return workCount; } private void captureServiceAck(final long logPosition, final long ackId, final long relevantId, final int serviceId) { if (0 == ackId && NULL_VALUE != serviceClientIds[serviceId]) { throw new ClusterException( "initial ack already received from service: possible duplicate serviceId=" + serviceId); } serviceAckQueues[serviceId].offerLast(new ServiceAck(ackId, logPosition, relevantId)); } private ServiceAck[] pollServiceAcks(final long logPosition, final int serviceId) { final ServiceAck[] serviceAcks = new ServiceAck[serviceAckQueues.length]; for (int id = 0, length = serviceAckQueues.length; id < length; id++) { final ServiceAck serviceAck = serviceAckQueues[id].pollFirst(); if (null == serviceAck || serviceAck.logPosition() != logPosition) { throw new ClusterException( "invalid ack for serviceId=" + serviceId + " logPosition=" + logPosition + " " + serviceAck); } serviceAcks[id] = serviceAck; } return serviceAcks; } private int sendNewLeaderEvent(final ClusterSession session) { if (egressPublisher.newLeader(session, leadershipTermId, leaderMember.id(), ingressEndpoints)) { session.hasNewLeaderEventPending(false); return 1; } return 0; } private int sendSessionOpenEvent(final ClusterSession session) { if (egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.OK, "")) { session.hasOpenEventPending(false); return 1; } return 0; } private boolean appendSessionAndOpen(final ClusterSession session, final long nowNs) { final long resultingPosition = logPublisher.appendSessionOpen(session, leadershipTermId, clusterClock.time()); if (resultingPosition > 0) { session.open(resultingPosition); session.timeOfLastActivityNs(nowNs); return true; } return false; } private void createAppendPosition(final int logSessionId) { final CountersReader counters = aeron.countersReader(); final int counterId = awaitRecordingCounter(counters, logSessionId); long registrationId; while (0 == (registrationId = counters.getCounterRegistrationId(counterId))) { idle(); } logRecordingId = RecordingPos.getRecordingId(counters, counterId); appendPosition = new ReadableCounter(counters, registrationId, counterId); logRecordedPosition = NULL_POSITION; } private boolean tryCreateAppendPosition(final int logSessionId) { final CountersReader counters = aeron.countersReader(); final int counterId = RecordingPos.findCounterIdBySession(counters, logSessionId); if (CountersReader.NULL_COUNTER_ID == counterId) { return false; } final long registrationId = counters.getCounterRegistrationId(counterId); if (0 == registrationId) { return false; } logRecordingId = RecordingPos.getRecordingId(counters, counterId); appendPosition = new ReadableCounter(counters, registrationId, counterId); logRecordedPosition = NULL_POSITION; return true; } private void loadSnapshot(final RecordingLog.Snapshot snapshot, final AeronArchive archive) { final String channel = ctx.replayChannel(); final int streamId = ctx.replayStreamId(); final int sessionId = (int)archive.startReplay(snapshot.recordingId, 0, NULL_LENGTH, channel, streamId); final String replaySubscriptionChannel = ChannelUri.addSessionId(channel, sessionId); try (Subscription subscription = aeron.addSubscription(replaySubscriptionChannel, streamId)) { final Image image = awaitImage(sessionId, subscription); final ConsensusModuleSnapshotLoader snapshotLoader = new ConsensusModuleSnapshotLoader(image, this); while (true) { final int fragments = snapshotLoader.poll(); if (fragments == 0) { if (snapshotLoader.isDone()) { break; } if (image.isClosed()) { throw new ClusterException("snapshot ended unexpectedly"); } } idle(fragments); } final int appVersion = snapshotLoader.appVersion(); if (SemanticVersion.major(ctx.appVersion()) != SemanticVersion.major(appVersion)) { throw new ClusterException( "incompatible version: " + SemanticVersion.toString(ctx.appVersion()) + " snapshot=" + SemanticVersion.toString(appVersion)); } final TimeUnit timeUnit = snapshotLoader.timeUnit(); if (timeUnit != clusterTimeUnit) { throw new ClusterException("incompatible time unit: " + clusterTimeUnit + " snapshot=" + timeUnit); } pendingServiceMessages.forEach(this::serviceSessionMessageReset, Integer.MAX_VALUE); } timerService.currentTickTime(clusterClock.time()); leadershipTermId(snapshot.leadershipTermId); commitPosition.setOrdered(snapshot.logPosition); expectedAckPosition = snapshot.logPosition; } private Image awaitImage(final int sessionId, final Subscription subscription) { idleStrategy.reset(); Image image; while ((image = subscription.imageBySessionId(sessionId)) == null) { idle(); } return image; } private Counter addRecoveryStateCounter(final RecordingLog.RecoveryPlan plan) { final int snapshotsCount = plan.snapshots.size(); if (snapshotsCount > 0) { final long[] serviceSnapshotRecordingIds = new long[snapshotsCount - 1]; final RecordingLog.Snapshot snapshot = plan.snapshots.get(0); for (int i = 1; i < snapshotsCount; i++) { final RecordingLog.Snapshot serviceSnapshot = plan.snapshots.get(i); serviceSnapshotRecordingIds[serviceSnapshot.serviceId] = serviceSnapshot.recordingId; } return RecoveryState.allocate( aeron, snapshot.leadershipTermId, snapshot.logPosition, snapshot.timestamp, ctx.clusterId(), serviceSnapshotRecordingIds); } return RecoveryState.allocate(aeron, leadershipTermId, 0, 0, ctx.clusterId()); } private DynamicJoin requiresDynamicJoin() { if (0 == activeMembers.length && null != ctx.clusterConsensusEndpoints()) { return new DynamicJoin(ctx.clusterConsensusEndpoints(), archive, consensusPublisher, ctx, this); } return null; } private void captureServiceClientIds() { for (int i = 0, length = serviceClientIds.length; i < length; i++) { final ServiceAck serviceAck = serviceAckQueues[i].pollFirst(); serviceClientIds[i] = Objects.requireNonNull(serviceAck).relevantId(); } } private void handleMemberRemovals(final long commitPosition) { ClusterMember[] members = activeMembers; for (final ClusterMember member : activeMembers) { if (member.hasRequestedRemove() && member.removalPosition() <= commitPosition) { if (member.id() == memberId) { state(ConsensusModule.State.QUITTING); } members = ClusterMember.removeMember(members, member.id()); clusterMemberByIdMap.remove(member.id()); clusterMemberByIdMap.compact(); member.closePublication(ctx.countedErrorHandler()); logPublisher.removeDestination(ctx.isLogMdc(), member.logEndpoint()); pendingMemberRemovals--; } } activeMembers = members; rankedPositions = new long[ClusterMember.quorumThreshold(members.length)]; } private int updateLeaderPosition(final long nowNs) { if (null != appendPosition) { return updateLeaderPosition(nowNs, appendPosition.get()); } return 0; } int updateLeaderPosition(final long nowNs, final long position) { thisMember.logPosition(position).timeOfLastAppendPositionNs(nowNs); final long commitPosition = min(quorumPosition(activeMembers, rankedPositions), position); if (commitPosition > this.commitPosition.getWeak() || nowNs >= (timeOfLastLogUpdateNs + leaderHeartbeatIntervalNs)) { for (final ClusterMember member : activeMembers) { if (member.id() != memberId) { consensusPublisher.commitPosition( member.publication(), leadershipTermId, commitPosition, memberId); } } this.commitPosition.setOrdered(commitPosition); timeOfLastLogUpdateNs = nowNs; clearUncommittedEntriesTo(commitPosition); if (pendingMemberRemovals > 0) { handleMemberRemovals(commitPosition); } return 1; } return 0; } LogReplication newLogReplication( final String leaderArchiveEndpoint, final long leaderRecordingId, final long stopPosition, final long nowNs) { return new LogReplication( archive, leaderRecordingId, logRecordingId, stopPosition, leaderArchiveEndpoint, ctx.replicationChannel(), ctx.leaderHeartbeatTimeoutNs(), ctx.leaderHeartbeatIntervalNs(), nowNs); } private int updateFollowerPosition(final long nowNs) { final long recordedPosition = null != appendPosition ? appendPosition.get() : logRecordedPosition; final long position = Math.max(recordedPosition, lastAppendPosition); if ((recordedPosition > lastAppendPosition || nowNs >= (timeOfLastAppendPositionNs + leaderHeartbeatIntervalNs)) && consensusPublisher.appendPosition(leaderMember.publication(), leadershipTermId, position, memberId)) { lastAppendPosition = position; timeOfLastAppendPositionNs = nowNs; return 1; } return 0; } private void clearSessionsAfter(final long logPosition) { for (final Iterator<ClusterSession> i = sessionByIdMap.values().iterator(); i.hasNext(); ) { final ClusterSession session = i.next(); if (session.openedLogPosition() > logPosition) { i.remove(); egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, "election"); session.close(ctx.countedErrorHandler()); } } for (final ClusterSession session : pendingSessions) { egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, "election"); session.close(ctx.countedErrorHandler()); } pendingSessions.clear(); } private void clearUncommittedEntriesTo(final long commitPosition) { if (uncommittedServiceMessages > 0) { pendingServiceMessageHeadOffset -= pendingServiceMessages.consume( leaderServiceSessionMessageSweeper, Integer.MAX_VALUE); } while (uncommittedTimers.peekLong() <= commitPosition) { uncommittedTimers.pollLong(); uncommittedTimers.pollLong(); } while (true) { final ClusterSession clusterSession = uncommittedClosedSessions.peekFirst(); if (null == clusterSession || clusterSession.closedLogPosition() > commitPosition) { break; } uncommittedClosedSessions.pollFirst(); } } private void restoreUncommittedEntries(final long commitPosition) { for (final LongArrayQueue.LongIterator i = uncommittedTimers.iterator(); i.hasNext(); ) { final long appendPosition = i.nextValue(); final long correlationId = i.nextValue(); if (appendPosition > commitPosition) { timerService.scheduleTimerForCorrelationId(correlationId, timerService.currentTickTime()); } } uncommittedTimers.clear(); pendingServiceMessages.consume(followerServiceSessionMessageSweeper, Integer.MAX_VALUE); pendingServiceMessageHeadOffset = 0; if (uncommittedServiceMessages > 0) { pendingServiceMessages.consume(leaderServiceSessionMessageSweeper, Integer.MAX_VALUE); pendingServiceMessages.forEach(this::serviceSessionMessageReset, Integer.MAX_VALUE); uncommittedServiceMessages = 0; } ClusterSession session; while (null != (session = uncommittedClosedSessions.pollFirst())) { if (session.closedLogPosition() > commitPosition) { session.closedLogPosition(NULL_POSITION); session.state(CLOSING); sessionByIdMap.put(session.id(), session); } } } private void enterElection() { role(Cluster.Role.FOLLOWER); election = new Election( false, leadershipTermId, commitPosition.getWeak(), null != appendPosition ? appendPosition.get() : recoveryPlan.appendedLogPosition, activeMembers, clusterMemberByIdMap, thisMember, consensusPublisher, ctx, this); } private void idle() { checkInterruptStatus(); aeronClientInvoker.invoke(); if (aeron.isClosed()) { throw new AgentTerminationException("unexpected Aeron close"); } idleStrategy.idle(); pollArchiveEvents(); } private void idle(final int workCount) { checkInterruptStatus(); aeronClientInvoker.invoke(); if (aeron.isClosed()) { throw new AgentTerminationException("unexpected Aeron close"); } idleStrategy.idle(workCount); if (0 == workCount) { pollArchiveEvents(); } } private static void checkInterruptStatus() { if (Thread.currentThread().isInterrupted()) { throw new AgentTerminationException("interrupted"); } } private void takeSnapshot(final long timestamp, final long logPosition, final ServiceAck[] serviceAcks) { final long recordingId; try (ExclusivePublication publication = aeron.addExclusivePublication( ctx.snapshotChannel(), ctx.snapshotStreamId())) { final String channel = ChannelUri.addSessionId(ctx.snapshotChannel(), publication.sessionId()); archive.startRecording(channel, ctx.snapshotStreamId(), LOCAL, true); final CountersReader counters = aeron.countersReader(); final int counterId = awaitRecordingCounter(counters, publication.sessionId()); recordingId = RecordingPos.getRecordingId(counters, counterId); snapshotState(publication, logPosition, replayLeadershipTermId); awaitRecordingComplete(recordingId, publication.position(), counters, counterId); } catch (final ArchiveException ex) { if (ex.errorCode() == ArchiveException.STORAGE_SPACE) { ctx.countedErrorHandler().onError(ex); unexpectedTermination(); } throw ex; } final long termBaseLogPosition = recordingLog.getTermEntry(replayLeadershipTermId).termBaseLogPosition; for (int serviceId = serviceAcks.length - 1; serviceId >= 0; serviceId--) { final long snapshotId = serviceAcks[serviceId].relevantId(); recordingLog.appendSnapshot( snapshotId, replayLeadershipTermId, termBaseLogPosition, logPosition, timestamp, serviceId); } recordingLog.appendSnapshot( recordingId, replayLeadershipTermId, termBaseLogPosition, logPosition, timestamp, SERVICE_ID); recordingLog.force(ctx.fileSyncLevel()); recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), Aeron.NULL_VALUE); ctx.snapshotCounter().incrementOrdered(); final long nowNs = clusterClock.timeNanos(); for (final ClusterSession session : sessionByIdMap.values()) { session.timeOfLastActivityNs(nowNs); } } private void awaitRecordingComplete( final long recordingId, final long position, final CountersReader counters, final int counterId) { idleStrategy.reset(); while (counters.getCounterValue(counterId) < position) { idle(); if (!RecordingPos.isActive(counters, counterId, recordingId)) { throw new ClusterException("recording has stopped unexpectedly: " + recordingId); } } } private int awaitRecordingCounter(final CountersReader counters, final int sessionId) { idleStrategy.reset(); int counterId = RecordingPos.findCounterIdBySession(counters, sessionId); while (CountersReader.NULL_COUNTER_ID == counterId) { idle(); counterId = RecordingPos.findCounterIdBySession(counters, sessionId); } return counterId; } private void snapshotState( final ExclusivePublication publication, final long logPosition, final long leadershipTermId) { final ConsensusModuleSnapshotTaker snapshotTaker = new ConsensusModuleSnapshotTaker( publication, idleStrategy, aeronClientInvoker); snapshotTaker.markBegin(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0, clusterTimeUnit, ctx.appVersion()); snapshotTaker.snapshotConsensusModuleState( nextSessionId, nextServiceSessionId, logServiceSessionId, pendingServiceMessages.size()); snapshotTaker.snapshotClusterMembers(memberId, highMemberId, activeMembers); for (final ClusterSession session : sessionByIdMap.values()) { if (session.state() == OPEN || session.state() == CLOSED) { snapshotTaker.snapshotSession(session); } } timerService.snapshot(snapshotTaker); snapshotTaker.snapshot(pendingServiceMessages); snapshotTaker.markEnd(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0, clusterTimeUnit, ctx.appVersion()); } private void clusterMemberJoined(final int memberId, final ClusterMember[] newMembers) { highMemberId = Math.max(highMemberId, memberId); final ClusterMember eventMember = ClusterMember.findMember(newMembers, memberId); if (null != eventMember) { if (null == eventMember.publication()) { ClusterMember.addConsensusPublication( eventMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron); } activeMembers = ClusterMember.addMember(activeMembers, eventMember); clusterMemberByIdMap.put(memberId, eventMember); rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)]; } } private void clusterMemberQuit(final int memberId) { activeMembers = ClusterMember.removeMember(activeMembers, memberId); clusterMemberByIdMap.remove(memberId); rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)]; } private void onUnavailableIngressImage(final Image image) { ingressAdapter.freeSessionBuffer(image.sessionId()); } private void enqueueServiceSessionMessage( final MutableDirectBuffer buffer, final int offset, final int length, final long clusterSessionId) { final int headerOffset = offset - SessionMessageHeaderDecoder.BLOCK_LENGTH; final int clusterSessionIdOffset = headerOffset + SessionMessageHeaderDecoder.clusterSessionIdEncodingOffset(); final int timestampOffset = headerOffset + SessionMessageHeaderDecoder.timestampEncodingOffset(); buffer.putLong(clusterSessionIdOffset, clusterSessionId, SessionMessageHeaderDecoder.BYTE_ORDER); buffer.putLong(timestampOffset, Long.MAX_VALUE, SessionMessageHeaderDecoder.BYTE_ORDER); if (!pendingServiceMessages.append(buffer, offset - SESSION_HEADER_LENGTH, length + SESSION_HEADER_LENGTH)) { throw new ClusterException("pending service message buffer capacity: " + pendingServiceMessages.size()); } } private boolean serviceSessionMessageAppender( final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset) { final int headerOffset = offset + MessageHeaderDecoder.ENCODED_LENGTH; final int clusterSessionIdOffset = headerOffset + SessionMessageHeaderDecoder.clusterSessionIdEncodingOffset(); final int timestampOffset = headerOffset + SessionMessageHeaderDecoder.timestampEncodingOffset(); final long clusterSessionId = buffer.getLong(clusterSessionIdOffset, SessionMessageHeaderDecoder.BYTE_ORDER); final long appendPosition = logPublisher.appendMessage( leadershipTermId, clusterSessionId, clusterClock.time(), buffer, offset + SESSION_HEADER_LENGTH, length - SESSION_HEADER_LENGTH); if (appendPosition > 0) { ++uncommittedServiceMessages; logServiceSessionId = clusterSessionId; pendingServiceMessageHeadOffset = headOffset; buffer.putLong(timestampOffset, appendPosition, SessionMessageHeaderEncoder.BYTE_ORDER); return true; } return false; } private boolean serviceSessionMessageReset( final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset) { final int timestampOffset = offset + MessageHeaderDecoder.ENCODED_LENGTH + SessionMessageHeaderDecoder.timestampEncodingOffset(); final long appendPosition = buffer.getLong(timestampOffset, SessionMessageHeaderDecoder.BYTE_ORDER); if (appendPosition < Long.MAX_VALUE) { buffer.putLong(timestampOffset, Long.MAX_VALUE, SessionMessageHeaderEncoder.BYTE_ORDER); return true; } return false; } private boolean leaderServiceSessionMessageSweeper( final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset) { final int timestampOffset = offset + MessageHeaderDecoder.ENCODED_LENGTH + SessionMessageHeaderDecoder.timestampEncodingOffset(); final long appendPosition = buffer.getLong(timestampOffset, SessionMessageHeaderDecoder.BYTE_ORDER); if (appendPosition <= commitPosition.getWeak()) { --uncommittedServiceMessages; return true; } return false; } private boolean followerServiceSessionMessageSweeper( final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset) { final int clusterSessionIdOffset = offset + MessageHeaderDecoder.ENCODED_LENGTH + SessionMessageHeaderDecoder.clusterSessionIdEncodingOffset(); return buffer.getLong(clusterSessionIdOffset, SessionMessageHeaderDecoder.BYTE_ORDER) <= logServiceSessionId; } private void onUnavailableCounter(final CountersReader counters, final long registrationId, final int counterId) { if (ConsensusModule.State.TERMINATING != state && ConsensusModule.State.QUITTING != state) { for (final long clientId : serviceClientIds) { if (registrationId == clientId) { ctx.countedErrorHandler().onError(new ClusterException( "Aeron client in service closed unexpectedly", WARN)); state(ConsensusModule.State.CLOSED); return; } } if (null != appendPosition && appendPosition.registrationId() == registrationId) { appendPosition = null; logSubscriptionId = NULL_VALUE; if (null != election) { election.handleError( clusterClock.timeNanos(), new ClusterException("log recording ended unexpectedly", WARN)); } else if (NULL_POSITION == terminationPosition) { ctx.countedErrorHandler().onError(new ClusterException("log recording ended unexpectedly", WARN)); isElectionRequired = true; } } } } private void closeAndTerminate() { tryStopLogRecording(); state(ConsensusModule.State.CLOSED); terminateAgent(); } private void unexpectedTermination() { aeron.removeUnavailableCounterHandler(unavailableCounterHandlerRegistrationId); serviceProxy.terminationPosition(0, ctx.countedErrorHandler()); tryStopLogRecording(); state(ConsensusModule.State.CLOSED); terminateAgent(); } private void terminateAgent() { try { ctx.terminationHook().run(); } catch (final Throwable ex) { ctx.countedErrorHandler().onError(ex); } throw new ClusterTerminationException(); } private void tryStopLogRecording() { appendPosition = null; if (NULL_VALUE != logSubscriptionId && archive.archiveProxy().publication().isConnected()) { try { archive.tryStopRecording(logSubscriptionId); } catch (final Exception ex) { ctx.countedErrorHandler().onError(new ClusterException(ex, WARN)); } logSubscriptionId = NULL_VALUE; } if (NULL_VALUE != logRecordingId && archive.archiveProxy().publication().isConnected()) { try { archive.tryStopRecordingByIdentity(logRecordingId); } catch (final Exception ex) { ctx.countedErrorHandler().onError(new ClusterException(ex, WARN)); } } } private long getLastAppendedPosition() { idleStrategy.reset(); while (true) { final long appendPosition = archive.getStopPosition(logRecordingId); if (NULL_POSITION != appendPosition) { return appendPosition; } idle(); } } private void appendDynamicJoinTermAndSnapshots() { if (!dynamicJoinSnapshots.isEmpty()) { final RecordingLog.Snapshot lastSnapshot = dynamicJoinSnapshots.get(dynamicJoinSnapshots.size() - 1); recordingLog.appendTerm( logRecordingId, lastSnapshot.leadershipTermId, lastSnapshot.termBaseLogPosition, lastSnapshot.timestamp); for (int i = dynamicJoinSnapshots.size() - 1; i >= 0; i--) { final RecordingLog.Snapshot snapshot = dynamicJoinSnapshots.get(i); recordingLog.appendSnapshot( snapshot.recordingId, snapshot.leadershipTermId, snapshot.termBaseLogPosition, snapshot.logPosition, snapshot.timestamp, snapshot.serviceId); } dynamicJoinSnapshots.clear(); } } private void connectIngress() { if (!ctx.ingressChannel().contains(ENDPOINT_PARAM_NAME)) { final ChannelUri ingressUri = ChannelUri.parse(ctx.ingressChannel()); ingressUri.put(ENDPOINT_PARAM_NAME, thisMember.ingressEndpoint()); ingressAdapter.connect(aeron.addSubscription( ingressUri.toString(), ctx.ingressStreamId(), null, this::onUnavailableIngressImage)); } else if (Cluster.Role.LEADER == role) { ingressAdapter.connect(aeron.addSubscription( ctx.ingressChannel(), ctx.ingressStreamId(), null, this::onUnavailableIngressImage)); } } public String toString() { return "ConsensusModuleAgent{" + "election=" + election + '}'; } }
[Java] Don't overwrite leadershipTermId set when snapshot loaded for dynamic join.
aeron-cluster/src/main/java/io/aeron/cluster/ConsensusModuleAgent.java
[Java] Don't overwrite leadershipTermId set when snapshot loaded for dynamic join.
Java
apache-2.0
7d870d07cbb8c621c6eda0abd8ce0c4bac242e2d
0
amir-zeldes/ANNIS,amir-zeldes/ANNIS,korpling/ANNIS,amir-zeldes/ANNIS,zangsir/ANNIS,zangsir/ANNIS,zangsir/ANNIS,korpling/ANNIS,korpling/ANNIS,amir-zeldes/ANNIS,zangsir/ANNIS,zangsir/ANNIS,korpling/ANNIS,amir-zeldes/ANNIS,amir-zeldes/ANNIS,korpling/ANNIS,zangsir/ANNIS
/* * Copyright 2011 Corpuslinguistic working group Humboldt University Berlin. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package annis.gui.controlpanel; import annis.gui.AnnisUI; import annis.gui.components.HelpButton; import static annis.gui.controlpanel.SearchOptionsPanel.NULL_SEGMENTATION_VALUE; import annis.gui.objects.QueryUIState; import annis.libgui.Background; import annis.libgui.Helper; import annis.service.objects.CorpusConfig; import annis.service.objects.CorpusConfigMap; import annis.service.objects.OrderType; import annis.service.objects.SegmentationList; import com.google.common.collect.ImmutableList; import com.google.common.escape.Escaper; import com.google.common.net.UrlEscapers; import com.google.gwt.thirdparty.guava.common.collect.Lists; import com.sun.jersey.api.client.UniformInterfaceException; import com.sun.jersey.api.client.WebResource; import com.vaadin.data.util.BeanItemContainer; import com.vaadin.ui.AbstractSelect; import com.vaadin.ui.ComboBox; import com.vaadin.ui.FormLayout; import com.vaadin.ui.Notification; import com.vaadin.ui.ProgressBar; import com.vaadin.ui.UI; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author thomas * @author Benjamin Weißenfels <b.pixeldrama@gmail.com> */ public class SearchOptionsPanel extends FormLayout { public static final String NULL_SEGMENTATION_VALUE = "tokens (default)"; public static final String KEY_DEFAULT_CONTEXT_SEGMENTATION = "default-context-segmentation"; public static final String KEY_DEFAULT_BASE_TEXT_SEGMENTATION = "default-base-text-segmentation"; public static final String KEY_MAX_CONTEXT_LEFT = "max-context-left"; public static final String KEY_MAX_CONTEXT_RIGHT = "max-context-right"; public static final String KEY_CONTEXT_STEPS = "context-steps"; public static final String KEY_DEFAULT_CONTEXT = "default-context"; public static final String KEY_RESULT_PER_PAGE = "results-per-page"; public static final String DEFAULT_CONFIG = "default-config"; private static final Logger log = LoggerFactory.getLogger( SearchOptionsPanel.class); private final static Escaper urlPathEscape = UrlEscapers. urlPathSegmentEscaper(); /** * Holds all available corpus configuarations, including the defautl * configeruation. * * The default configuration is available with the key "default-config" */ private CorpusConfigMap corpusConfigurations; private final ComboBox cbLeftContext; private final ComboBox cbRightContext; private final ComboBox cbResultsPerPage; private final ComboBox cbSegmentation; private final HelpButton segmentationHelp; private final ComboBox cbOrder; // TODO: make this configurable private static final List<Integer> PREDEFINED_PAGE_SIZES = ImmutableList.of( 1, 2, 5, 10, 20, 25 ); public static final List<Integer> PREDEFINED_CONTEXTS = ImmutableList.of( 0, 1, 2, 5, 10, 20 ); private boolean optionsManuallyChanged = false; private final ProgressBar pbLoadConfig; /** * Caches all calculated corpus configurations. Note, also multiple selection * are stored. The keys for this kind of selection are generated by * {@link #buildKey()}. */ private Map<String, CorpusConfig> lastSelection; public SearchOptionsPanel() { setWidth("100%"); setHeight("-1px"); // init the config cache lastSelection = new HashMap<>(); pbLoadConfig = new ProgressBar(); pbLoadConfig.setIndeterminate(true); pbLoadConfig.setCaption("Loading search options..."); addComponent(pbLoadConfig); cbLeftContext = new ComboBox("Left Context"); cbRightContext = new ComboBox("Right Context"); cbResultsPerPage = new ComboBox("Results Per Page"); cbLeftContext.setNullSelectionAllowed(false); cbRightContext.setNullSelectionAllowed(false); cbResultsPerPage.setNullSelectionAllowed(false); // cbLeftContext.setNewItemsAllowed(true); cbRightContext.setNewItemsAllowed(true); cbResultsPerPage.setNewItemsAllowed(true); // cbLeftContext.setTextInputAllowed(true); cbRightContext.setTextInputAllowed(true); cbResultsPerPage.setTextInputAllowed(true); cbLeftContext.setImmediate(true); cbRightContext.setImmediate(true); cbResultsPerPage.setImmediate(true); // cbLeftContext.addValidator(new IntegerRangeValidator("must be a number", // Integer.MIN_VALUE, Integer.MAX_VALUE)); // cbRightContext.addValidator(new IntegerRangeValidator("must be a number", // Integer.MIN_VALUE, Integer.MAX_VALUE)); // cbResultsPerPage.addValidator(new IntegerRangeValidator("must be a number", // Integer.MIN_VALUE, Integer.MAX_VALUE)); cbSegmentation = new ComboBox("Show context in"); cbSegmentation.setTextInputAllowed( false); cbSegmentation.setNullSelectionAllowed( true); cbSegmentation.setDescription( "If corpora with multiple " + "context definitions are selected, a list of available context units will be " + "displayed. By default context is calculated in ‘tokens’ " + "(e.g. 5 minimal units to the left and right of a search result). " + "Some corpora might offer further context definitions, e.g. in " + "syllables, word forms belonging to different speakers, normalized or " + "diplomatic segmentations of a manuscript, etc."); segmentationHelp = new HelpButton(cbSegmentation); cbOrder = new ComboBox("Order"); cbOrder.setNewItemsAllowed(false); cbOrder.setNullSelectionAllowed(false); cbOrder.setImmediate(true); cbLeftContext.setVisible(false); cbRightContext.setVisible(false); cbResultsPerPage.setVisible(false); cbOrder.setVisible(false); segmentationHelp.setVisible(false); addComponent(cbLeftContext); addComponent(cbRightContext); addComponent(segmentationHelp); addComponent(cbResultsPerPage); addComponent(cbOrder); } @Override public void attach() { super.attach(); Background.run(new CorpusConfigUpdater(getUI())); if (getUI() instanceof AnnisUI) { AnnisUI ui = (AnnisUI) getUI(); QueryUIState state = ui.getQueryState(); cbLeftContext.setPropertyDataSource(state.getLeftContext()); cbRightContext.setPropertyDataSource(state.getRightContext()); cbResultsPerPage.setPropertyDataSource(state.getLimit()); cbSegmentation.setPropertyDataSource(state.getBaseText()); BeanItemContainer<OrderType> orderContainer = new BeanItemContainer<>(OrderType.class, Lists.newArrayList(OrderType.values())); // Unset the property data source first, otherwise the setting of // the container data source will set the property value to null cbOrder.setPropertyDataSource(null); cbOrder.setContainerDataSource(orderContainer); cbOrder.setPropertyDataSource(state.getOrder()); } } public void updateSearchPanelConfigurationInBackground( final Set<String> corpora, final UI ui) { Background.run(new Runnable() { @Override public void run() { final List<String> segNames = getSegmentationNamesFromService(corpora); ui.access(new Runnable() { @Override public void run() { // check if a configuration is already calculated String key = buildKey(corpora); if (!lastSelection.containsKey(key)) { lastSelection.put(key, generateConfig(corpora)); } // get values from configuration Integer maxLeftCtx = Integer.parseInt(lastSelection.get(key). getConfig( KEY_MAX_CONTEXT_LEFT)); Integer maxRightCtx = Integer.parseInt(lastSelection.get(key). getConfig( KEY_MAX_CONTEXT_RIGHT)); Integer defaultCtx = Integer.parseInt(lastSelection.get(key). getConfig( KEY_DEFAULT_CONTEXT)); Integer ctxSteps = Integer.parseInt(lastSelection.get(key). getConfig( KEY_CONTEXT_STEPS)); Integer resultsPerPage = Integer.parseInt(lastSelection.get(key). getConfig( KEY_RESULT_PER_PAGE)); String segment = lastSelection.get(key).getConfig( KEY_DEFAULT_CONTEXT_SEGMENTATION); int selectedLeftCtx = defaultCtx; int selectedRightCtx = defaultCtx; if (optionsManuallyChanged) { // check if we can re-use the old values Integer oldValueLeft = (Integer) cbLeftContext.getValue(); Integer oldValueRight = (Integer) cbRightContext.getValue(); if (oldValueLeft != null && oldValueLeft >= 0 && oldValueLeft <= maxLeftCtx) { selectedLeftCtx = oldValueLeft; } if (oldValueRight != null && oldValueRight >= 0 && oldValueRight <= maxRightCtx) { selectedRightCtx = oldValueRight; } String oldSegment = (String) cbSegmentation.getValue(); if (oldSegment == null || segNames.contains(oldSegment)) { segment = oldSegment; } Integer oldResultsPerPage = (Integer) cbResultsPerPage.getValue(); if (oldResultsPerPage != null) { resultsPerPage = oldResultsPerPage; } // require another explicit manual change if values should be // re-used on next corpus selection change optionsManuallyChanged = false; } // update the left and right context updateContext(cbLeftContext, maxLeftCtx, ctxSteps, selectedLeftCtx, false); updateContext(cbRightContext, maxRightCtx, ctxSteps, selectedRightCtx, false); updateResultsPerPage(resultsPerPage, false); updateSegmentations(segment, segNames); } }); } }); } private static List<String> getSegmentationNamesFromService( Set<String> corpora) { List<String> segNames = new ArrayList<>(); WebResource service = Helper.getAnnisWebResource(); if (service != null) { for (String corpus : corpora) { try { SegmentationList segList = service.path("query").path("corpora").path(urlPathEscape.escape( corpus)) .path("segmentation-names") .get(SegmentationList.class); segNames.addAll(segList.getSegmentatioNames()); } catch (UniformInterfaceException ex) { if (ex.getResponse().getStatus() == 403) { log.debug( "Did not have access rights to query segmentation names for corpus", ex); } else { log.warn("Could not query segmentation names for corpus", ex); } } } } return segNames; } private void updateSegmentations(String segment, List<String> segNames) { cbSegmentation.removeAllItems(); cbSegmentation.setNullSelectionItemId(NULL_SEGMENTATION_VALUE); cbSegmentation.addItem(NULL_SEGMENTATION_VALUE); if ("tok".equalsIgnoreCase(segment)) { cbSegmentation.setValue(NULL_SEGMENTATION_VALUE); } else if (segment != null) { cbSegmentation.addItem(segment); cbSegmentation.setValue(segment); } if (segNames != null && !segNames.isEmpty()) { for (String s : segNames) { if (!s.equalsIgnoreCase(segment)) { cbSegmentation.addItem(s); } } } } /** * If all values of a specific corpus property have the same value, this value * is returned, otherwise the value from the default config is choosen. * * @param key The property key. * @param corpora Specifies the selected corpora. * @return A value defined in the copurs.properties file or in the * admin-service.properties */ private String theGreatestCommonDenominator(String key, Set<String> corpora) { int value = -1; for (String corpus : corpora) { CorpusConfig c = null; try { if (corpus.equals(Helper.DEFAULT_CONFIG)) { continue; } if (corpusConfigurations.get(corpus) == null) { c = corpusConfigurations.get(DEFAULT_CONFIG); } else { c = corpusConfigurations.get(corpus); } // do nothing if not even default config is set if (c == null) { continue; } if (!c.getConfig().containsKey(key)) { value = Integer.parseInt( corpusConfigurations.get(Helper.DEFAULT_CONFIG).getConfig(). getProperty(key)); break; } int tmp = Integer.parseInt(c.getConfig().getProperty(key)); if (value < 0) { value = tmp; } if (value != tmp) { value = Integer.parseInt( corpusConfigurations.get(Helper.DEFAULT_CONFIG).getConfig(). getProperty(key)); } } catch (NumberFormatException ex) { log.error( "Cannot parse the string to an integer for key {} in corpus {} config", key, corpus, ex); } } return String.valueOf(value); } /** * Builds a config for selection of one or muliple corpora. * * @param corpora Specifies the combination of corpora, for which the config * is calculated. * @return A new config which takes into account the segementation of all * selected corpora. */ private CorpusConfig generateConfig(Set<String> corpora) { corpusConfigurations = Helper.getCorpusConfigs(); CorpusConfig corpusConfig = new CorpusConfig(); // calculate the left and right context. String leftCtx = theGreatestCommonDenominator(KEY_MAX_CONTEXT_LEFT, corpora); String rightCtx = theGreatestCommonDenominator(KEY_MAX_CONTEXT_RIGHT, corpora); corpusConfig.setConfig(KEY_MAX_CONTEXT_LEFT, leftCtx); corpusConfig.setConfig(KEY_MAX_CONTEXT_RIGHT, rightCtx); // calculate the default-context corpusConfig.setConfig(KEY_CONTEXT_STEPS, theGreatestCommonDenominator( KEY_CONTEXT_STEPS, corpora)); corpusConfig.setConfig(KEY_DEFAULT_CONTEXT, theGreatestCommonDenominator( KEY_DEFAULT_CONTEXT, corpora)); // get the results per page corpusConfig.setConfig(KEY_RESULT_PER_PAGE, theGreatestCommonDenominator( KEY_RESULT_PER_PAGE, corpora)); corpusConfig.setConfig(KEY_DEFAULT_CONTEXT_SEGMENTATION, checkSegments( KEY_DEFAULT_CONTEXT_SEGMENTATION, corpora)); corpusConfig.setConfig(KEY_DEFAULT_BASE_TEXT_SEGMENTATION, checkSegments( KEY_DEFAULT_BASE_TEXT_SEGMENTATION, corpora)); return corpusConfig; } /** * Checks, if all selected corpora have the same default segmentation layer. * If not the tok layer is taken, because every corpus has this one. * * @param key the key for the segementation config, must be * {@link #KEY_DEFAULT_BASE_TEXT_SEGMENTATION} or * {@link #KEY_DEFAULT_CONTEXT_SEGMENTATION}. * @param corpora the corpora which has to be checked. * @return "tok" or a segment which is defined in all corpora. */ private String checkSegments(String key, Set<String> corpora) { String segmentation = null; for (String corpus : corpora) { CorpusConfig c = null; if (corpusConfigurations.containsConfig(corpus)) { c = corpusConfigurations.get(corpus); } else { c = corpusConfigurations.get(DEFAULT_CONFIG); } // do nothing if not even default config is set if (c == null) { continue; } String tmpSegment = c.getConfig(key); /** * If no segment is set in the corpus config use always the tok segment. */ if (tmpSegment == null) { return corpusConfigurations.get(DEFAULT_CONFIG).getConfig(key); } if (segmentation == null) { segmentation = tmpSegment; continue; } if (!segmentation.equals(tmpSegment)) // return the default config { return corpusConfigurations.get(DEFAULT_CONFIG).getConfig(key); } } if (segmentation == null) { return corpusConfigurations.get(DEFAULT_CONFIG).getConfig(key); } else { return segmentation; } } /** * Updates the results per page combobox. * * @param resultsPerPage The value, which is added to the combobox. * @param keepCustomValues If this flag is true, custom values are kept. * Custom in a sense, that the values are not calculated with * {@link #generateConfig(java.util.Set)} * */ private void updateResultsPerPage(Integer resultsPerPage, boolean keepCustomValues) { Set<Integer> tmpResultsPerPage = new TreeSet<>(); if (keepCustomValues) { Collection<?> itemIds = cbResultsPerPage.getItemIds(); Iterator<?> iterator = itemIds.iterator(); while (iterator.hasNext()) { Object next = iterator.next(); tmpResultsPerPage.add((Integer) next); } } else { for (Integer i : PREDEFINED_PAGE_SIZES) { tmpResultsPerPage.add(i); } } tmpResultsPerPage.add(resultsPerPage); cbResultsPerPage.removeAllItems(); for (Integer i : tmpResultsPerPage) { cbResultsPerPage.addItem(i); } cbResultsPerPage.setValue(resultsPerPage); // /update result per page } /** * Updates context combo boxes. * * @param c the combo box, which is updated. * @param maxCtx the larges context values until context steps are calculated. * @param ctxSteps the step range. * @param defaultCtx the value the combobox is set to. * @param keepCustomValues If this is true all custom values are kept. */ private void updateContext(ComboBox c, int maxCtx, int ctxSteps, int defaultCtx, boolean keepCustomValues) { /** * The sorting via index container is much to complex for me, so I sort the * items first and put them afterwards into the combo boxes. */ SortedSet<Integer> steps = new TreeSet<>(); if (keepCustomValues) { Collection<?> itemIds = c.getItemIds(); Iterator<?> iterator = itemIds.iterator(); while (iterator.hasNext()) { Object next = iterator.next(); steps.add((Integer) next); } } else { for (Integer i : PREDEFINED_CONTEXTS) { if (i < maxCtx) { steps.add(i); } } for (int step = ctxSteps; step < maxCtx; step += ctxSteps) { steps.add(step); } } steps.add(maxCtx); steps.add(defaultCtx); c.removeAllItems(); for (Integer i : steps) { c.addItem(i); } c.setValue(defaultCtx); } /** * Builds a Key for {@link #lastSelection} of multiple corpus selections. * * @param corpusNames A List of corpusnames, for which the key is generated. * @return A String which is a concatenation of all corpus names, sorted by * their names. */ private static String buildKey(Set<String> corpusNames) { SortedSet<String> names = new TreeSet<>(corpusNames); StringBuilder key = new StringBuilder(); for (String name : names) { key.append(name); } return key.toString(); } public void setOptionsManuallyChanged(boolean optionsManuallyChanged) { this.optionsManuallyChanged = optionsManuallyChanged; } private class CustomResultSize implements AbstractSelect.NewItemHandler { ComboBox c; int resultPerPage; CustomResultSize(ComboBox c, int resultPerPage) { this.c = c; this.resultPerPage = resultPerPage; } @Override public void addNewItem(String resultPerPage) { if (!c.containsId(resultPerPage)) { try { int i = Integer.parseInt((String) resultPerPage); if (i < 1) { throw new IllegalArgumentException( "result number has to be a positive number greater or equal than 1"); } updateResultsPerPage(i, true); } catch (NumberFormatException ex) { Notification.show("invalid result per page input", "Please enter valid numbers [0-9]", Notification.Type.WARNING_MESSAGE); } catch (IllegalArgumentException ex) { Notification.show("invalid result per page input", ex.getMessage(), Notification.Type.WARNING_MESSAGE); } } } } private class CorpusConfigUpdater implements Runnable { private final UI ui; public CorpusConfigUpdater(UI ui) { this.ui = ui; } @Override public void run() { final CorpusConfigMap newCorpusConfigurations = Helper.getCorpusConfigs(); // update GUI ui.access(new Runnable() { @Override public void run() { pbLoadConfig.setVisible(false); cbLeftContext.setVisible(true); cbRightContext.setVisible(true); cbResultsPerPage.setVisible(true); cbOrder.setVisible(true); segmentationHelp.setVisible(true); corpusConfigurations = newCorpusConfigurations; if (corpusConfigurations == null || corpusConfigurations.get(DEFAULT_CONFIG) == null || corpusConfigurations.get(DEFAULT_CONFIG).isEmpty()) { CorpusConfig corpusConfig = new CorpusConfig(); corpusConfig.setConfig(KEY_MAX_CONTEXT_LEFT, "5"); corpusConfig.setConfig(KEY_MAX_CONTEXT_RIGHT, "5"); corpusConfig.setConfig(KEY_CONTEXT_STEPS, "5"); corpusConfig.setConfig(KEY_RESULT_PER_PAGE, "10"); corpusConfig.setConfig(KEY_DEFAULT_CONTEXT, "5"); corpusConfig.setConfig(KEY_DEFAULT_CONTEXT_SEGMENTATION, "tok"); corpusConfig.setConfig(KEY_DEFAULT_BASE_TEXT_SEGMENTATION, "tok"); corpusConfigurations = new CorpusConfigMap(); corpusConfigurations.put(DEFAULT_CONFIG, corpusConfig); } Integer resultsPerPage = Integer.parseInt(corpusConfigurations.get( DEFAULT_CONFIG).getConfig(KEY_RESULT_PER_PAGE)); Integer leftCtx = Integer.parseInt(corpusConfigurations. get(DEFAULT_CONFIG). getConfig(KEY_MAX_CONTEXT_LEFT)); Integer rightCtx = Integer.parseInt( corpusConfigurations.get(DEFAULT_CONFIG). getConfig(KEY_MAX_CONTEXT_RIGHT)); Integer defaultCtx = Integer.parseInt(corpusConfigurations.get( DEFAULT_CONFIG).getConfig(KEY_DEFAULT_CONTEXT)); Integer ctxSteps = Integer.parseInt( corpusConfigurations.get(DEFAULT_CONFIG). getConfig(KEY_CONTEXT_STEPS)); String segment = corpusConfigurations.get(DEFAULT_CONFIG).getConfig( KEY_DEFAULT_CONTEXT_SEGMENTATION); updateContext(cbLeftContext, leftCtx, ctxSteps, defaultCtx, false); updateContext(cbRightContext, rightCtx, ctxSteps, defaultCtx, false); updateResultsPerPage(resultsPerPage, false); updateSegmentations(segment, null); cbLeftContext.setNewItemHandler(new CustomContext(cbLeftContext, leftCtx, ctxSteps)); cbRightContext.setNewItemHandler(new CustomContext(cbRightContext, leftCtx, ctxSteps)); cbResultsPerPage.setNewItemHandler(new CustomResultSize( cbResultsPerPage, resultsPerPage)); } }); } } private class CustomContext implements AbstractSelect.NewItemHandler { ComboBox c; int leftCtx; int ctxSteps; CustomContext(ComboBox c, int leftCtx, int ctxSteps) { this.c = c; this.leftCtx = leftCtx; this.ctxSteps = ctxSteps; } @Override public void addNewItem(String context) { if (!c.containsId(context)) { try { int i = Integer.parseInt((String) context); if (i < 0) { throw new IllegalArgumentException( "context has to be a positive number or 0"); } if (i > leftCtx) { throw new IllegalArgumentException( "The context is greater than, than the max value defined in the corpus property file."); } updateContext(c, leftCtx, ctxSteps, i, true); } catch (NumberFormatException ex) { Notification.show("invalid context input", "Please enter valid numbers [0-9]", Notification.Type.WARNING_MESSAGE); } catch (IllegalArgumentException ex) { Notification.show("invalid context input", ex.getMessage(), Notification.Type.WARNING_MESSAGE); } } } } }
annis-gui/src/main/java/annis/gui/controlpanel/SearchOptionsPanel.java
/* * Copyright 2011 Corpuslinguistic working group Humboldt University Berlin. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package annis.gui.controlpanel; import annis.gui.AnnisUI; import annis.gui.components.HelpButton; import static annis.gui.controlpanel.SearchOptionsPanel.NULL_SEGMENTATION_VALUE; import annis.gui.objects.QueryUIState; import annis.libgui.Background; import annis.libgui.Helper; import annis.service.objects.CorpusConfig; import annis.service.objects.CorpusConfigMap; import annis.service.objects.OrderType; import annis.service.objects.SegmentationList; import com.google.common.collect.ImmutableList; import com.google.common.escape.Escaper; import com.google.common.net.UrlEscapers; import com.google.gwt.thirdparty.guava.common.collect.Lists; import com.sun.jersey.api.client.UniformInterfaceException; import com.sun.jersey.api.client.WebResource; import com.vaadin.data.util.BeanItemContainer; import com.vaadin.ui.AbstractSelect; import com.vaadin.ui.ComboBox; import com.vaadin.ui.FormLayout; import com.vaadin.ui.Notification; import com.vaadin.ui.ProgressBar; import com.vaadin.ui.UI; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author thomas * @author Benjamin Weißenfels <b.pixeldrama@gmail.com> */ public class SearchOptionsPanel extends FormLayout { public static final String NULL_SEGMENTATION_VALUE = "tokens (default)"; public static final String KEY_DEFAULT_CONTEXT_SEGMENTATION = "default-context-segmentation"; public static final String KEY_DEFAULT_BASE_TEXT_SEGMENTATION = "default-base-text-segmentation"; public static final String KEY_MAX_CONTEXT_LEFT = "max-context-left"; public static final String KEY_MAX_CONTEXT_RIGHT = "max-context-right"; public static final String KEY_CONTEXT_STEPS = "context-steps"; public static final String KEY_DEFAULT_CONTEXT = "default-context"; public static final String KEY_RESULT_PER_PAGE = "results-per-page"; public static final String DEFAULT_CONFIG = "default-config"; private static final Logger log = LoggerFactory.getLogger( SearchOptionsPanel.class); private final static Escaper urlPathEscape = UrlEscapers. urlPathSegmentEscaper(); /** * Holds all available corpus configuarations, including the defautl * configeruation. * * The default configuration is available with the key "default-config" */ private CorpusConfigMap corpusConfigurations; private final ComboBox cbLeftContext; private final ComboBox cbRightContext; private final ComboBox cbResultsPerPage; private final ComboBox cbSegmentation; private final HelpButton segmentationHelp; private final ComboBox cbOrder; // TODO: make this configurable private static final List<Integer> PREDEFINED_PAGE_SIZES = ImmutableList.of( 1, 2, 5, 10, 20, 25 ); public static final List<Integer> PREDEFINED_CONTEXTS = ImmutableList.of( 0, 1, 2, 5, 10, 20 ); private boolean optionsManuallyChanged = false; private final ProgressBar pbLoadConfig; /** * Caches all calculated corpus configurations. Note, also multiple selection * are stored. The keys for this kind of selection are generated by * {@link #buildKey()}. */ private Map<String, CorpusConfig> lastSelection; public SearchOptionsPanel() { setWidth("100%"); setHeight("-1px"); // init the config cache lastSelection = new HashMap<>(); pbLoadConfig = new ProgressBar(); pbLoadConfig.setIndeterminate(true); pbLoadConfig.setCaption("Loading search options..."); addComponent(pbLoadConfig); cbLeftContext = new ComboBox("Left Context"); cbRightContext = new ComboBox("Right Context"); cbResultsPerPage = new ComboBox("Results Per Page"); cbLeftContext.setNullSelectionAllowed(false); cbRightContext.setNullSelectionAllowed(false); cbResultsPerPage.setNullSelectionAllowed(false); // cbLeftContext.setNewItemsAllowed(true); cbRightContext.setNewItemsAllowed(true); cbResultsPerPage.setNewItemsAllowed(true); // cbLeftContext.setTextInputAllowed(true); cbRightContext.setTextInputAllowed(true); cbResultsPerPage.setTextInputAllowed(true); cbLeftContext.setImmediate(true); cbRightContext.setImmediate(true); cbResultsPerPage.setImmediate(true); // cbLeftContext.addValidator(new IntegerRangeValidator("must be a number", // Integer.MIN_VALUE, Integer.MAX_VALUE)); // cbRightContext.addValidator(new IntegerRangeValidator("must be a number", // Integer.MIN_VALUE, Integer.MAX_VALUE)); // cbResultsPerPage.addValidator(new IntegerRangeValidator("must be a number", // Integer.MIN_VALUE, Integer.MAX_VALUE)); cbSegmentation = new ComboBox("Show context in"); cbSegmentation.setTextInputAllowed( false); cbSegmentation.setNullSelectionAllowed( true); cbSegmentation.setDescription( "If corpora with multiple " + "context definitions are selected, a list of available context units will be " + "displayed. By default context is calculated in ‘tokens’ " + "(e.g. 5 minimal units to the left and right of a search result). " + "Some corpora might offer further context definitions, e.g. in " + "syllables, word forms belonging to different speakers, normalized or " + "diplomatic segmentations of a manuscript, etc."); segmentationHelp = new HelpButton(cbSegmentation); cbOrder = new ComboBox("Order"); cbOrder.setNewItemsAllowed(false); cbOrder.setNullSelectionAllowed(false); cbOrder.setImmediate(true); cbLeftContext.setVisible(false); cbRightContext.setVisible(false); cbResultsPerPage.setVisible(false); cbOrder.setVisible(false); segmentationHelp.setVisible(false); addComponent(cbLeftContext); addComponent(cbRightContext); addComponent(segmentationHelp); addComponent(cbResultsPerPage); addComponent(cbOrder); } @Override public void attach() { super.attach(); Background.run(new CorpusConfigUpdater(getUI())); if (getUI() instanceof AnnisUI) { AnnisUI ui = (AnnisUI) getUI(); QueryUIState state = ui.getQueryState(); cbLeftContext.setPropertyDataSource(state.getLeftContext()); cbRightContext.setPropertyDataSource(state.getRightContext()); cbResultsPerPage.setPropertyDataSource(state.getLimit()); cbSegmentation.setPropertyDataSource(state.getBaseText()); BeanItemContainer<OrderType> orderContainer = new BeanItemContainer<>(OrderType.class, Lists.newArrayList(OrderType.values())); cbOrder.setContainerDataSource(orderContainer); cbOrder.setPropertyDataSource(state.getOrder()); } } public void updateSearchPanelConfigurationInBackground( final Set<String> corpora, final UI ui) { Background.run(new Runnable() { @Override public void run() { final List<String> segNames = getSegmentationNamesFromService(corpora); ui.access(new Runnable() { @Override public void run() { // check if a configuration is already calculated String key = buildKey(corpora); if (!lastSelection.containsKey(key)) { lastSelection.put(key, generateConfig(corpora)); } // get values from configuration Integer maxLeftCtx = Integer.parseInt(lastSelection.get(key). getConfig( KEY_MAX_CONTEXT_LEFT)); Integer maxRightCtx = Integer.parseInt(lastSelection.get(key). getConfig( KEY_MAX_CONTEXT_RIGHT)); Integer defaultCtx = Integer.parseInt(lastSelection.get(key). getConfig( KEY_DEFAULT_CONTEXT)); Integer ctxSteps = Integer.parseInt(lastSelection.get(key). getConfig( KEY_CONTEXT_STEPS)); Integer resultsPerPage = Integer.parseInt(lastSelection.get(key). getConfig( KEY_RESULT_PER_PAGE)); String segment = lastSelection.get(key).getConfig( KEY_DEFAULT_CONTEXT_SEGMENTATION); int selectedLeftCtx = defaultCtx; int selectedRightCtx = defaultCtx; if (optionsManuallyChanged) { // check if we can re-use the old values Integer oldValueLeft = (Integer) cbLeftContext.getValue(); Integer oldValueRight = (Integer) cbRightContext.getValue(); if (oldValueLeft != null && oldValueLeft >= 0 && oldValueLeft <= maxLeftCtx) { selectedLeftCtx = oldValueLeft; } if (oldValueRight != null && oldValueRight >= 0 && oldValueRight <= maxRightCtx) { selectedRightCtx = oldValueRight; } String oldSegment = (String) cbSegmentation.getValue(); if (oldSegment == null || segNames.contains(oldSegment)) { segment = oldSegment; } Integer oldResultsPerPage = (Integer) cbResultsPerPage.getValue(); if (oldResultsPerPage != null) { resultsPerPage = oldResultsPerPage; } // require another explicit manual change if values should be // re-used on next corpus selection change optionsManuallyChanged = false; } // update the left and right context updateContext(cbLeftContext, maxLeftCtx, ctxSteps, selectedLeftCtx, false); updateContext(cbRightContext, maxRightCtx, ctxSteps, selectedRightCtx, false); updateResultsPerPage(resultsPerPage, false); updateSegmentations(segment, segNames); } }); } }); } private static List<String> getSegmentationNamesFromService( Set<String> corpora) { List<String> segNames = new ArrayList<>(); WebResource service = Helper.getAnnisWebResource(); if (service != null) { for (String corpus : corpora) { try { SegmentationList segList = service.path("query").path("corpora").path(urlPathEscape.escape( corpus)) .path("segmentation-names") .get(SegmentationList.class); segNames.addAll(segList.getSegmentatioNames()); } catch (UniformInterfaceException ex) { if (ex.getResponse().getStatus() == 403) { log.debug( "Did not have access rights to query segmentation names for corpus", ex); } else { log.warn("Could not query segmentation names for corpus", ex); } } } } return segNames; } private void updateSegmentations(String segment, List<String> segNames) { cbSegmentation.removeAllItems(); cbSegmentation.setNullSelectionItemId(NULL_SEGMENTATION_VALUE); cbSegmentation.addItem(NULL_SEGMENTATION_VALUE); if ("tok".equalsIgnoreCase(segment)) { cbSegmentation.setValue(NULL_SEGMENTATION_VALUE); } else if (segment != null) { cbSegmentation.addItem(segment); cbSegmentation.setValue(segment); } if (segNames != null && !segNames.isEmpty()) { for (String s : segNames) { if (!s.equalsIgnoreCase(segment)) { cbSegmentation.addItem(s); } } } } /** * If all values of a specific corpus property have the same value, this value * is returned, otherwise the value from the default config is choosen. * * @param key The property key. * @param corpora Specifies the selected corpora. * @return A value defined in the copurs.properties file or in the * admin-service.properties */ private String theGreatestCommonDenominator(String key, Set<String> corpora) { int value = -1; for (String corpus : corpora) { CorpusConfig c = null; try { if (corpus.equals(Helper.DEFAULT_CONFIG)) { continue; } if (corpusConfigurations.get(corpus) == null) { c = corpusConfigurations.get(DEFAULT_CONFIG); } else { c = corpusConfigurations.get(corpus); } // do nothing if not even default config is set if (c == null) { continue; } if (!c.getConfig().containsKey(key)) { value = Integer.parseInt( corpusConfigurations.get(Helper.DEFAULT_CONFIG).getConfig(). getProperty(key)); break; } int tmp = Integer.parseInt(c.getConfig().getProperty(key)); if (value < 0) { value = tmp; } if (value != tmp) { value = Integer.parseInt( corpusConfigurations.get(Helper.DEFAULT_CONFIG).getConfig(). getProperty(key)); } } catch (NumberFormatException ex) { log.error( "Cannot parse the string to an integer for key {} in corpus {} config", key, corpus, ex); } } return String.valueOf(value); } /** * Builds a config for selection of one or muliple corpora. * * @param corpora Specifies the combination of corpora, for which the config * is calculated. * @return A new config which takes into account the segementation of all * selected corpora. */ private CorpusConfig generateConfig(Set<String> corpora) { corpusConfigurations = Helper.getCorpusConfigs(); CorpusConfig corpusConfig = new CorpusConfig(); // calculate the left and right context. String leftCtx = theGreatestCommonDenominator(KEY_MAX_CONTEXT_LEFT, corpora); String rightCtx = theGreatestCommonDenominator(KEY_MAX_CONTEXT_RIGHT, corpora); corpusConfig.setConfig(KEY_MAX_CONTEXT_LEFT, leftCtx); corpusConfig.setConfig(KEY_MAX_CONTEXT_RIGHT, rightCtx); // calculate the default-context corpusConfig.setConfig(KEY_CONTEXT_STEPS, theGreatestCommonDenominator( KEY_CONTEXT_STEPS, corpora)); corpusConfig.setConfig(KEY_DEFAULT_CONTEXT, theGreatestCommonDenominator( KEY_DEFAULT_CONTEXT, corpora)); // get the results per page corpusConfig.setConfig(KEY_RESULT_PER_PAGE, theGreatestCommonDenominator( KEY_RESULT_PER_PAGE, corpora)); corpusConfig.setConfig(KEY_DEFAULT_CONTEXT_SEGMENTATION, checkSegments( KEY_DEFAULT_CONTEXT_SEGMENTATION, corpora)); corpusConfig.setConfig(KEY_DEFAULT_BASE_TEXT_SEGMENTATION, checkSegments( KEY_DEFAULT_BASE_TEXT_SEGMENTATION, corpora)); return corpusConfig; } /** * Checks, if all selected corpora have the same default segmentation layer. * If not the tok layer is taken, because every corpus has this one. * * @param key the key for the segementation config, must be * {@link #KEY_DEFAULT_BASE_TEXT_SEGMENTATION} or * {@link #KEY_DEFAULT_CONTEXT_SEGMENTATION}. * @param corpora the corpora which has to be checked. * @return "tok" or a segment which is defined in all corpora. */ private String checkSegments(String key, Set<String> corpora) { String segmentation = null; for (String corpus : corpora) { CorpusConfig c = null; if (corpusConfigurations.containsConfig(corpus)) { c = corpusConfigurations.get(corpus); } else { c = corpusConfigurations.get(DEFAULT_CONFIG); } // do nothing if not even default config is set if (c == null) { continue; } String tmpSegment = c.getConfig(key); /** * If no segment is set in the corpus config use always the tok segment. */ if (tmpSegment == null) { return corpusConfigurations.get(DEFAULT_CONFIG).getConfig(key); } if (segmentation == null) { segmentation = tmpSegment; continue; } if (!segmentation.equals(tmpSegment)) // return the default config { return corpusConfigurations.get(DEFAULT_CONFIG).getConfig(key); } } if (segmentation == null) { return corpusConfigurations.get(DEFAULT_CONFIG).getConfig(key); } else { return segmentation; } } /** * Updates the results per page combobox. * * @param resultsPerPage The value, which is added to the combobox. * @param keepCustomValues If this flag is true, custom values are kept. * Custom in a sense, that the values are not calculated with * {@link #generateConfig(java.util.Set)} * */ private void updateResultsPerPage(Integer resultsPerPage, boolean keepCustomValues) { Set<Integer> tmpResultsPerPage = new TreeSet<>(); if (keepCustomValues) { Collection<?> itemIds = cbResultsPerPage.getItemIds(); Iterator<?> iterator = itemIds.iterator(); while (iterator.hasNext()) { Object next = iterator.next(); tmpResultsPerPage.add((Integer) next); } } else { for (Integer i : PREDEFINED_PAGE_SIZES) { tmpResultsPerPage.add(i); } } tmpResultsPerPage.add(resultsPerPage); cbResultsPerPage.removeAllItems(); for (Integer i : tmpResultsPerPage) { cbResultsPerPage.addItem(i); } cbResultsPerPage.setValue(resultsPerPage); // /update result per page } /** * Updates context combo boxes. * * @param c the combo box, which is updated. * @param maxCtx the larges context values until context steps are calculated. * @param ctxSteps the step range. * @param defaultCtx the value the combobox is set to. * @param keepCustomValues If this is true all custom values are kept. */ private void updateContext(ComboBox c, int maxCtx, int ctxSteps, int defaultCtx, boolean keepCustomValues) { /** * The sorting via index container is much to complex for me, so I sort the * items first and put them afterwards into the combo boxes. */ SortedSet<Integer> steps = new TreeSet<>(); if (keepCustomValues) { Collection<?> itemIds = c.getItemIds(); Iterator<?> iterator = itemIds.iterator(); while (iterator.hasNext()) { Object next = iterator.next(); steps.add((Integer) next); } } else { for (Integer i : PREDEFINED_CONTEXTS) { if (i < maxCtx) { steps.add(i); } } for (int step = ctxSteps; step < maxCtx; step += ctxSteps) { steps.add(step); } } steps.add(maxCtx); steps.add(defaultCtx); c.removeAllItems(); for (Integer i : steps) { c.addItem(i); } c.setValue(defaultCtx); } /** * Builds a Key for {@link #lastSelection} of multiple corpus selections. * * @param corpusNames A List of corpusnames, for which the key is generated. * @return A String which is a concatenation of all corpus names, sorted by * their names. */ private static String buildKey(Set<String> corpusNames) { SortedSet<String> names = new TreeSet<>(corpusNames); StringBuilder key = new StringBuilder(); for (String name : names) { key.append(name); } return key.toString(); } public void setOptionsManuallyChanged(boolean optionsManuallyChanged) { this.optionsManuallyChanged = optionsManuallyChanged; } private class CustomResultSize implements AbstractSelect.NewItemHandler { ComboBox c; int resultPerPage; CustomResultSize(ComboBox c, int resultPerPage) { this.c = c; this.resultPerPage = resultPerPage; } @Override public void addNewItem(String resultPerPage) { if (!c.containsId(resultPerPage)) { try { int i = Integer.parseInt((String) resultPerPage); if (i < 1) { throw new IllegalArgumentException( "result number has to be a positive number greater or equal than 1"); } updateResultsPerPage(i, true); } catch (NumberFormatException ex) { Notification.show("invalid result per page input", "Please enter valid numbers [0-9]", Notification.Type.WARNING_MESSAGE); } catch (IllegalArgumentException ex) { Notification.show("invalid result per page input", ex.getMessage(), Notification.Type.WARNING_MESSAGE); } } } } private class CorpusConfigUpdater implements Runnable { private final UI ui; public CorpusConfigUpdater(UI ui) { this.ui = ui; } @Override public void run() { final CorpusConfigMap newCorpusConfigurations = Helper.getCorpusConfigs(); // update GUI ui.access(new Runnable() { @Override public void run() { pbLoadConfig.setVisible(false); cbLeftContext.setVisible(true); cbRightContext.setVisible(true); cbResultsPerPage.setVisible(true); cbOrder.setVisible(true); segmentationHelp.setVisible(true); corpusConfigurations = newCorpusConfigurations; if (corpusConfigurations == null || corpusConfigurations.get(DEFAULT_CONFIG) == null || corpusConfigurations.get(DEFAULT_CONFIG).isEmpty()) { CorpusConfig corpusConfig = new CorpusConfig(); corpusConfig.setConfig(KEY_MAX_CONTEXT_LEFT, "5"); corpusConfig.setConfig(KEY_MAX_CONTEXT_RIGHT, "5"); corpusConfig.setConfig(KEY_CONTEXT_STEPS, "5"); corpusConfig.setConfig(KEY_RESULT_PER_PAGE, "10"); corpusConfig.setConfig(KEY_DEFAULT_CONTEXT, "5"); corpusConfig.setConfig(KEY_DEFAULT_CONTEXT_SEGMENTATION, "tok"); corpusConfig.setConfig(KEY_DEFAULT_BASE_TEXT_SEGMENTATION, "tok"); corpusConfigurations = new CorpusConfigMap(); corpusConfigurations.put(DEFAULT_CONFIG, corpusConfig); } Integer resultsPerPage = Integer.parseInt(corpusConfigurations.get( DEFAULT_CONFIG).getConfig(KEY_RESULT_PER_PAGE)); Integer leftCtx = Integer.parseInt(corpusConfigurations. get(DEFAULT_CONFIG). getConfig(KEY_MAX_CONTEXT_LEFT)); Integer rightCtx = Integer.parseInt( corpusConfigurations.get(DEFAULT_CONFIG). getConfig(KEY_MAX_CONTEXT_RIGHT)); Integer defaultCtx = Integer.parseInt(corpusConfigurations.get( DEFAULT_CONFIG).getConfig(KEY_DEFAULT_CONTEXT)); Integer ctxSteps = Integer.parseInt( corpusConfigurations.get(DEFAULT_CONFIG). getConfig(KEY_CONTEXT_STEPS)); String segment = corpusConfigurations.get(DEFAULT_CONFIG).getConfig( KEY_DEFAULT_CONTEXT_SEGMENTATION); updateContext(cbLeftContext, leftCtx, ctxSteps, defaultCtx, false); updateContext(cbRightContext, rightCtx, ctxSteps, defaultCtx, false); updateResultsPerPage(resultsPerPage, false); updateSegmentations(segment, null); cbLeftContext.setNewItemHandler(new CustomContext(cbLeftContext, leftCtx, ctxSteps)); cbRightContext.setNewItemHandler(new CustomContext(cbRightContext, leftCtx, ctxSteps)); cbResultsPerPage.setNewItemHandler(new CustomResultSize( cbResultsPerPage, resultsPerPage)); } }); } } private class CustomContext implements AbstractSelect.NewItemHandler { ComboBox c; int leftCtx; int ctxSteps; CustomContext(ComboBox c, int leftCtx, int ctxSteps) { this.c = c; this.leftCtx = leftCtx; this.ctxSteps = ctxSteps; } @Override public void addNewItem(String context) { if (!c.containsId(context)) { try { int i = Integer.parseInt((String) context); if (i < 0) { throw new IllegalArgumentException( "context has to be a positive number or 0"); } if (i > leftCtx) { throw new IllegalArgumentException( "The context is greater than, than the max value defined in the corpus property file."); } updateContext(c, leftCtx, ctxSteps, i, true); } catch (NumberFormatException ex) { Notification.show("invalid context input", "Please enter valid numbers [0-9]", Notification.Type.WARNING_MESSAGE); } catch (IllegalArgumentException ex) { Notification.show("invalid context input", ex.getMessage(), Notification.Type.WARNING_MESSAGE); } } } } }
fix #446 (order field empty when search view is re-attached)
annis-gui/src/main/java/annis/gui/controlpanel/SearchOptionsPanel.java
fix #446 (order field empty when search view is re-attached)
Java
apache-2.0
87f70249d134ae8bf740b40bc728cb1a6eec0d0c
0
bridje/bridje-framework,bridje/bridje-framework,touwolf/bridje-framework,bridje/bridje-framework,touwolf/bridje-framework
/* * Copyright 2016 Bridje Framework. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bridje.jdbc.impl; import java.io.PrintWriter; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.ArrayList; import java.util.Deque; import java.util.List; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.logging.Level; import java.util.logging.Logger; import javax.sql.DataSource; import org.bridje.jdbc.config.DataSourceConfig; class DataSourceImpl implements DataSource { private static final Logger LOG = Logger.getLogger(DataSourceImpl.class.getName()); private final Deque<ConnectionImpl> freeConnections = new ConcurrentLinkedDeque<>(); private final Deque<ConnectionImpl> usedConnections = new ConcurrentLinkedDeque<>(); private final DataSourceConfig config; private PrintWriter logWriter; private int loginTimeout; private boolean closed; private long lastCheck; public DataSourceImpl(DataSourceConfig config) { this.config = config; this.lastCheck = System.currentTimeMillis(); } @Override public Connection getConnection() throws SQLException { if(closed) { throw new SQLException("The DataSource is close."); } Connection result = getFreeConnection(); if(result != null) { checkIdleConnections(); return result; } result = getNewConnection(); if(result != null) { return result; } return waitFreeConnection(); } @Override public Connection getConnection(String username, String password) throws SQLException { if(closed) { throw new SQLException("The DataSource is close."); } return createNewConnection(username, password); } private synchronized Connection getFreeConnection() throws SQLException { if(!freeConnections.isEmpty()) { ConnectionImpl nextConnection = freeConnections.poll(); if(needToReconnect(nextConnection)) { if(!nextConnection.isValid(10)) { nextConnection.realClose(); nextConnection = createNewConnection(); } } nextConnection.open(); usedConnections.add(nextConnection); LOG.log(Level.FINE, "Current free connections in {0}: {1}", new Object[]{ config.getName(), freeConnections.size() }); return nextConnection; } return null; } private synchronized Connection getNewConnection() throws SQLException { if(usedConnections.size() < config.getMaxConnections()) { ConnectionImpl newConnection = createNewConnection(); newConnection.open(); usedConnections.add(newConnection); LOG.log(Level.FINE, "Current used connections in {0}: {1}", new Object[]{ config.getName(), usedConnections.size() }); return newConnection; } return null; } private synchronized Connection waitFreeConnection() throws SQLException { Connection cnn = null; while(cnn == null) { try { wait(20 * 1000); } catch (InterruptedException ex) { LOG.log(Level.SEVERE, ex.getMessage(), ex); } cnn = getFreeConnection(); } return cnn; } @Override public PrintWriter getLogWriter() throws SQLException { return logWriter; } @Override public void setLogWriter(PrintWriter out) throws SQLException { logWriter = out; } @Override public void setLoginTimeout(int seconds) throws SQLException { loginTimeout = seconds; } @Override public int getLoginTimeout() throws SQLException { return loginTimeout; } @Override public Logger getParentLogger() throws SQLFeatureNotSupportedException { throw new SQLFeatureNotSupportedException(); } @Override public <T> T unwrap(Class<T> iface) throws SQLException { return null; } @Override public boolean isWrapperFor(Class<?> iface) throws SQLException { return false; } private ConnectionImpl createNewConnection() throws SQLException { LOG.log(Level.INFO, "Creating new connection for {0}.", config.getName()); try { Class.forName(config.getDriver()); } catch (ClassNotFoundException ex) { throw new SQLException(ex.getMessage(), ex); } Connection internalConnection = DriverManager.getConnection(config.getUrl(), config.getUser(), config.getPassword()); return new ConnectionImpl(internalConnection, this); } private Connection createNewConnection(String user, String password) throws SQLException { try { Class.forName(config.getDriver()); } catch (ClassNotFoundException ex) { throw new SQLException(ex.getMessage(), ex); } return DriverManager.getConnection(config.getUrl(), user, password); } protected synchronized void connectionClosed(ConnectionImpl closedConnection) { usedConnections.remove(closedConnection); freeConnections.add(closedConnection); LOG.log(Level.FINE, "Current free connections in {0}: {1}", new Object[]{ config.getName(), freeConnections.size() }); notify(); } protected synchronized void close() throws SQLException { closed = true; while(!usedConnections.isEmpty()) { try { wait(); } catch (InterruptedException e) { LOG.log(Level.FINE, e.getMessage(), e); } } for (ConnectionImpl freeConnection : freeConnections) { freeConnection.realClose(); } } private void checkIdleConnections() { long idleTime = config.getIdleTime() * 1000; if( (System.currentTimeMillis() - lastCheck) > idleTime ) { synchronized(this) { lastCheck = System.currentTimeMillis(); List<ConnectionImpl> toRemove = new ArrayList<>(); for (ConnectionImpl freeConnection : freeConnections) { if( (System.currentTimeMillis() - freeConnection.getLastUse()) > idleTime ) { try { int totalConnections = freeConnections.size() + usedConnections.size(); if(totalConnections - toRemove.size() <= config.getMinConnections()) { break; } freeConnection.realClose(); toRemove.add(freeConnection); } catch (SQLException ex) { LOG.log(Level.SEVERE, ex.getMessage(), ex); } } } LOG.log(Level.FINE, "Removing {0} connections for {1}.", new Object[]{ toRemove.size(), config.getName() }); freeConnections.removeAll(toRemove); LOG.log(Level.FINE, "Current total connections in {0}: {1}", new Object[]{ config.getName(), freeConnections.size() + usedConnections.size() }); } } } private boolean needToReconnect(ConnectionImpl connection) { long reTime = config.getReconnectTime() * 1000; long timePass = System.currentTimeMillis() - connection.getLastUse(); return reTime < timePass; } }
bridje-jdbc/src/main/java/org/bridje/jdbc/impl/DataSourceImpl.java
/* * Copyright 2016 Bridje Framework. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bridje.jdbc.impl; import java.io.PrintWriter; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.ArrayList; import java.util.Deque; import java.util.List; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.logging.Level; import java.util.logging.Logger; import javax.sql.DataSource; import org.bridje.jdbc.config.DataSourceConfig; class DataSourceImpl implements DataSource { private static final Logger LOG = Logger.getLogger(DataSourceImpl.class.getName()); private final Deque<ConnectionImpl> freeConnections = new ConcurrentLinkedDeque<>(); private final Deque<ConnectionImpl> usedConnections = new ConcurrentLinkedDeque<>(); private final DataSourceConfig config; private PrintWriter logWriter; private int loginTimeout; private boolean closed; private long lastCheck; public DataSourceImpl(DataSourceConfig config) { this.config = config; this.lastCheck = System.currentTimeMillis(); } @Override public Connection getConnection() throws SQLException { if(closed) { throw new SQLException("The DataSource is close."); } Connection result = getFreeConnection(); if(result != null) { checkIdleConnections(); return result; } result = getNewConnection(); if(result != null) { return result; } return waitFreeConnection(); } @Override public Connection getConnection(String username, String password) throws SQLException { if(closed) { throw new SQLException("The DataSource is close."); } return createNewConnection(username, password); } private synchronized Connection getFreeConnection() throws SQLException { if(!freeConnections.isEmpty()) { ConnectionImpl nextConnection = freeConnections.poll(); if(needToReconnect(nextConnection)) { if(!nextConnection.isValid(10)) { nextConnection.realClose(); nextConnection = createNewConnection(); } } nextConnection.open(); usedConnections.add(nextConnection); LOG.log(Level.FINE, "Current free connections in {0}: {1}", new Object[]{ config.getName(), freeConnections.size() }); return nextConnection; } return null; } private synchronized Connection getNewConnection() throws SQLException { if(usedConnections.size() < config.getMaxConnections()) { ConnectionImpl newConnection = createNewConnection(); newConnection.open(); usedConnections.add(newConnection); LOG.log(Level.FINE, "Current used connections in {0}: {1}", new Object[]{ config.getName(), usedConnections.size() }); return newConnection; } return null; } private synchronized Connection waitFreeConnection() throws SQLException { Connection cnn = null; while(cnn == null) { try { wait(); } catch (InterruptedException ex) { LOG.log(Level.SEVERE, ex.getMessage(), ex); } cnn = getFreeConnection(); } return cnn; } @Override public PrintWriter getLogWriter() throws SQLException { return logWriter; } @Override public void setLogWriter(PrintWriter out) throws SQLException { logWriter = out; } @Override public void setLoginTimeout(int seconds) throws SQLException { loginTimeout = seconds; } @Override public int getLoginTimeout() throws SQLException { return loginTimeout; } @Override public Logger getParentLogger() throws SQLFeatureNotSupportedException { throw new SQLFeatureNotSupportedException(); } @Override public <T> T unwrap(Class<T> iface) throws SQLException { return null; } @Override public boolean isWrapperFor(Class<?> iface) throws SQLException { return false; } private ConnectionImpl createNewConnection() throws SQLException { LOG.log(Level.INFO, "Creating new connection for {0}.", config.getName()); try { Class.forName(config.getDriver()); } catch (ClassNotFoundException ex) { throw new SQLException(ex.getMessage(), ex); } Connection internalConnection = DriverManager.getConnection(config.getUrl(), config.getUser(), config.getPassword()); return new ConnectionImpl(internalConnection, this); } private Connection createNewConnection(String user, String password) throws SQLException { try { Class.forName(config.getDriver()); } catch (ClassNotFoundException ex) { throw new SQLException(ex.getMessage(), ex); } return DriverManager.getConnection(config.getUrl(), user, password); } protected synchronized void connectionClosed(ConnectionImpl closedConnection) { usedConnections.remove(closedConnection); freeConnections.add(closedConnection); LOG.log(Level.FINE, "Current free connections in {0}: {1}", new Object[]{ config.getName(), freeConnections.size() }); notify(); } protected synchronized void close() throws SQLException { closed = true; while(!usedConnections.isEmpty()) { try { wait(); } catch (InterruptedException e) { LOG.log(Level.FINE, e.getMessage(), e); } } for (ConnectionImpl freeConnection : freeConnections) { freeConnection.realClose(); } } private void checkIdleConnections() { long idleTime = config.getIdleTime() * 1000; if( (System.currentTimeMillis() - lastCheck) > idleTime ) { synchronized(this) { lastCheck = System.currentTimeMillis(); List<ConnectionImpl> toRemove = new ArrayList<>(); for (ConnectionImpl freeConnection : freeConnections) { if( (System.currentTimeMillis() - freeConnection.getLastUse()) > idleTime ) { try { int totalConnections = freeConnections.size() + usedConnections.size(); if(totalConnections - toRemove.size() <= config.getMinConnections()) { break; } freeConnection.realClose(); toRemove.add(freeConnection); } catch (SQLException ex) { LOG.log(Level.SEVERE, ex.getMessage(), ex); } } } LOG.log(Level.FINE, "Removing {0} connections for {1}.", new Object[]{ toRemove.size(), config.getName() }); freeConnections.removeAll(toRemove); LOG.log(Level.FINE, "Current total connections in {0}: {1}", new Object[]{ config.getName(), freeConnections.size() + usedConnections.size() }); } } } private boolean needToReconnect(ConnectionImpl connection) { long reTime = config.getReconnectTime() * 1000; long timePass = System.currentTimeMillis() - connection.getLastUse(); return reTime < timePass; } }
Fixing conection timeout.
bridje-jdbc/src/main/java/org/bridje/jdbc/impl/DataSourceImpl.java
Fixing conection timeout.
Java
apache-2.0
1082a6b5667ed4399e089b040948ab43e50f838a
0
plucena/lojamvn,plucena/lojamvn,plucena/lojamvn
package net.mybluemix.dao; import java.util.List; import javax.persistence.TypedQuery; import javax.ws.rs.GET; import net.mybluemix.entity.Fornecedor; import net.mybluemix.entity.MateriaPrima; public class FornecedorDAO extends BaseDAO{ public FornecedorDAO() { super(Fornecedor.class); // TODO Auto-generated constructor stub } public List<Fornecedor> findAll(){ return this.createQuery("Select a From Fornecedor a", Fornecedor.class); } public Fornecedor find(String cnpj) { TypedQuery<Fornecedor> query = manager.createQuery( "SELECT c FROM Fornecedor c WHERE c.cnpj = :cnpj", Fornecedor.class); return query.setParameter("cnpj", cnpj).getSingleResult(); } }
src/main/java/net/mybluemix/dao/FornecedorDAO.java
package net.mybluemix.dao; import java.util.List; import javax.persistence.TypedQuery; import javax.ws.rs.GET; import net.mybluemix.entity.Fornecedor; import net.mybluemix.entity.MateriaPrima; public class FornecedorDAO extends BaseDAO{ public FornecedorDAO() { super(Fornecedor.class); // TODO Auto-generated constructor stub } public List<Fornecedor> findAll(){ return this.createQuery("Select a From Fornecedor a", Fornecedor.class); } public Fornecedor find(String cnpj) { TypedQuery<Fornecedor> query = manager.createQuery( "SELECT c FROM Fornecedor c WHERE c.cnoj = :cnpj", Fornecedor.class); return query.setParameter("cnpj", cnpj).getSingleResult(); } }
Fornecedor DAO fixes
src/main/java/net/mybluemix/dao/FornecedorDAO.java
Fornecedor DAO fixes
Java
apache-2.0
edb5174a387532ddfcb49255c89e61b3f3871266
0
vobruba-martin/closure-compiler,monetate/closure-compiler,ChadKillingsworth/closure-compiler,nawawi/closure-compiler,nawawi/closure-compiler,ChadKillingsworth/closure-compiler,monetate/closure-compiler,Yannic/closure-compiler,google/closure-compiler,Yannic/closure-compiler,vobruba-martin/closure-compiler,google/closure-compiler,vobruba-martin/closure-compiler,ChadKillingsworth/closure-compiler,monetate/closure-compiler,google/closure-compiler,ChadKillingsworth/closure-compiler,nawawi/closure-compiler,google/closure-compiler,Yannic/closure-compiler,monetate/closure-compiler,vobruba-martin/closure-compiler,Yannic/closure-compiler,nawawi/closure-compiler
/* * Copyright 2020 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Strings.lenientFormat; import com.google.debugging.sourcemap.Base64VLQ; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Objects; /** * Instrument production coverage for javascript. This type of coverage will instrument different * levels of source code such as function and branch instrumentation. This instrumentation differs * from the previous implementations ({@link CoverageInstrumentationCallback} and {@link * BranchCoverageInstrumentationCallback}) in that it is properly optimized and obfuscated so that * it can be run on client browsers with the goal of better detecting dead code. The callback will * instrument with a function call which is provided in the source code as opposed to an array. */ final class ProductionCoverageInstrumentationCallback extends NodeTraversal.AbstractPostOrderCallback { // TODO(psokol): Make this dynamic so that instrumentation does not rely on hardcoded files private static final String INSTRUMENT_CODE_FUNCTION_NAME = "instrumentCode"; private static final String INSTRUMENT_CODE_FILE_NAME = "InstrumentCode.js"; /** * The compiler runs an earlier pass that combines all modules and renames them appropriately. * This constant represents what the INSTRUMENT_CODE_FILE_NAME module will be renamed to by the * compiler and this will be used to make the correct call to INSTRUMENT_CODE_FUNCTION_NAME. */ private static final String MODULE_RENAMING = "module$exports$instrument$code"; /** * INSTRUMENT_CODE_FILE_NAME will contain an instance of the instrumentCode class and that * instance name is stored in this constant. */ private static final String INSTRUMENT_CODE_INSTANCE = "instrumentCodeInstance"; private final AbstractCompiler compiler; private final ParameterMapping parameterMapping; boolean visitedInstrumentCodeFile = false; /** * Stores the name of the current function that encapsulates the node being instrumented */ private String cachedFunctionName = "Anonymous"; public ProductionCoverageInstrumentationCallback( AbstractCompiler compiler) { this.compiler = compiler; this.parameterMapping = new ParameterMapping(); } @Override public void visit(NodeTraversal traversal, Node node, Node parent) { String fileName = traversal.getSourceName(); String sourceFileName = node.getSourceFileName(); // If origin of node is not from sourceFile, do not instrument. This typically occurs when // polyfill code is injected into the sourceFile AST and this check avoids instrumenting it. We // avoid instrumentation as this callback does not distinguish between sourceFile code and // injected code and can result in an error. if (!Objects.equals(fileName, sourceFileName)) { return; } // If Source File INSTRUMENT_CODE_FILE_NAME has not yet been visited, do not instrument as // the instrument function has not yet been defined and any call made to it will result in an // error in the compiled JS code. if (!visitedInstrumentCodeFile || sourceFileName.endsWith(INSTRUMENT_CODE_FILE_NAME)) { if (sourceFileName.endsWith(INSTRUMENT_CODE_FILE_NAME)) { visitedInstrumentCodeFile = true; } return; } if (node.isFunction()) { cachedFunctionName = NodeUtil.getBestLValueName(NodeUtil.getBestLValue(node)); instrumentCode(traversal, node.getLastChild(), cachedFunctionName); } } /** * Iterate over all collected block nodes within a Script node and add a new child to the front of * each block node which is the instrumentation Node * * @param traversal The node traversal context which maintains information such as fileName being * traversed * @param block The block node to be instrumented instrumented * @param fnName The function name that encapsulates the current node block being instrumented */ private void instrumentCode(NodeTraversal traversal, Node block, String fnName) { block.addChildToFront( newInstrumentationNode(traversal, block, fnName)); compiler.reportChangeToEnclosingScope(block); } /** * Create a function call to the Instrument Code function with properly encoded parameters. The * instrumented function call will be of the following form: MODULE_RENAMING.INSTRUMENT_CODE_INSTANCE.INSTRUMENT_CODE_FUNCTION_NAME(param1, * param2). This with the given constants evaluates to: module$exports$instrument$code.instrumentCodeInstance.instrumentCode(encodedParam, * lineNum); * * @param traversal The context of the current traversal. * @param node The block node to be instrumented. * @param fnName The function name that the node exists within. * @return The newly constructed function call node. */ private Node newInstrumentationNode(NodeTraversal traversal, Node node, String fnName) { String type = "Type.FUNCTION"; String encodedParam = parameterMapping.getEncodedParam(traversal.getSourceName(), fnName, type); Node innerProp = IR .getprop(IR.name(MODULE_RENAMING), IR.string(INSTRUMENT_CODE_INSTANCE)); Node outerProp = IR.getprop(innerProp, IR.string(INSTRUMENT_CODE_FUNCTION_NAME)); Node functionCall = IR.call(outerProp, IR.string(encodedParam), IR.number(node.getLineno())); Node exprNode = IR.exprResult(functionCall); return exprNode.useSourceInfoIfMissingFromForTree(node); } /** * A class the maintains a mapping of unique identifiers to parameter values. It also generates * unique identifiers by creating a counter starting form 0 and increments this value when * assigning a new unique identifier. */ private static final class ParameterMapping { private final List<String> uniqueIdentifier; private final List<String> paramValue; private long nextUniqueIdentifier; ParameterMapping() { nextUniqueIdentifier = 0; uniqueIdentifier = new ArrayList<>(); paramValue = new ArrayList<>(); } public String getEncodedParam(String fileName, String functionName, String type) { String combinedParam = lenientFormat("%s %s %s", fileName, functionName, type); long uniqueIdentifier = getUniqueIdentifier(combinedParam); if (uniqueIdentifier > Integer.MAX_VALUE) { throw new ArithmeticException( "Unique Identifier exceeds value of Integer.MAX_VALUE, could not encode with Base 64 VLQ"); } StringBuilder sb = new StringBuilder(); try { Base64VLQ.encode(sb, Math.toIntExact(uniqueIdentifier)); } catch (IOException e) { throw new AssertionError("Encountered unexpected IOException error"); } String result = sb.toString(); addParamMapping(result, combinedParam); return result; } private long getUniqueIdentifier(String param) { nextUniqueIdentifier++; return nextUniqueIdentifier; } private void addParamMapping(String identifier, String param) { uniqueIdentifier.add(identifier); paramValue.add(param); } } }
src/com/google/javascript/jscomp/ProductionCoverageInstrumentationCallback.java
/* * Copyright 2020 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Strings.lenientFormat; import com.google.debugging.sourcemap.Base64VLQ; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Objects; /** * Instrument production coverage for javascript. This type of coverage will instrument different * levels of source code such as function and branch instrumentation. This instrumentation differs * from the previous implementations ({@link CoverageInstrumentationCallback} and {@link * BranchCoverageInstrumentationCallback}) in that it is properly optimized and obfuscated so that * it can be run on client browsers with the goal of better detecting dead code. The callback will * instrument with a function call which is provided in the source code as opposed to an array. */ final class ProductionCoverageInstrumentationCallback extends NodeTraversal.AbstractPostOrderCallback { // TODO(psokol): Make this dynamic so that instrumentation does not rely on hardcoded files private static final String INSTRUMENT_CODE_FUNCTION_NAME = "instrumentCode"; private static final String INSTRUMENT_CODE_FILE_NAME = "InstrumentCode.js"; /** * The compiler runs an earlier pass that combines all modules and renames them appropriately. * This constant represents what the INSTRUMENT_CODE_FILE_NAME module will be renamed to by the * compiler and this will be used to make the correct call to INSTRUMENT_CODE_FUNCTION_NAME. */ private static final String MODULE_RENAMING = "module$exports$instrument$code"; /** * INSTRUMENT_CODE_FILE_NAME will contain an instance of the instrumentCode class and that * instance name is stored in this constant. */ private static final String INSTRUMENT_CODE_INSTANCE = "instrumentCodeInstance"; private final AbstractCompiler compiler; private final ParameterMapping parameterMapping; boolean visitedInstrumentCodeFile = false; private String functionName = "Anonymous"; public ProductionCoverageInstrumentationCallback( AbstractCompiler compiler) { this.compiler = compiler; this.parameterMapping = new ParameterMapping(); } @Override public void visit(NodeTraversal traversal, Node node, Node parent) { String fileName = traversal.getSourceName(); String sourceFileName = node.getSourceFileName(); // If origin of node is not from sourceFile, do not instrument. This typically occurs when // polyfill code is injected into the sourceFile AST and this check avoids instrumenting it. We // avoid instrumentation as this callback does not distinguish between sourceFile code and // injected code and can result in an error. if (!Objects.equals(fileName, sourceFileName)) { return; } // If Source File INSTRUMENT_CODE_FILE_NAME has not yet been visited, do not instrument as // the instrument function has not yet been defined and any call made to it will result in an // error in the compiled JS code. if (!visitedInstrumentCodeFile || sourceFileName.endsWith(INSTRUMENT_CODE_FILE_NAME)) { if (sourceFileName.endsWith(INSTRUMENT_CODE_FILE_NAME)) { visitedInstrumentCodeFile = true; } return; } if (node.isFunction()) { functionName = NodeUtil.getBestLValueName(NodeUtil.getBestLValue(node)); instrumentCode(traversal, node.getLastChild(), functionName); } } /** * Iterate over all collected block nodes within a Script node and add a new child to the front of * each block node which is the instrumentation Node * * @param traversal The node traversal context which maintains information such as fileName being * traversed * @param block The block node to be instrumented instrumented * @param fnName The function name that encapsulates the current node block being instrumented */ private void instrumentCode(NodeTraversal traversal, Node block, String fnName) { block.addChildToFront( newInstrumentationNode(traversal, block, fnName)); compiler.reportChangeToEnclosingScope(block); } /** * Create a function call to the Instrument Code function with properly encoded parameters. The * instrumented function call will be of the following form: MODULE_RENAMING.INSTRUMENT_CODE_INSTANCE.INSTRUMENT_CODE_FUNCTION_NAME(param1, * param2). This with the given constants evaluates to: module$exports$instrument$code.instrumentCodeInstance.instrumentCode(encodedParam, * lineNum); * * @param traversal The context of the current traversal. * @param node The block node to be instrumented. * @param fnName The function name that the node exists within. * @return The newly constructed function call node. */ private Node newInstrumentationNode(NodeTraversal traversal, Node node, String fnName) { String type = "Type.FUNCTION"; String encodedParam = parameterMapping.getEncodedParam(traversal.getSourceName(), fnName, type); Node innerProp = IR .getprop(IR.name(MODULE_RENAMING), IR.string(INSTRUMENT_CODE_INSTANCE)); Node outerProp = IR.getprop(innerProp, IR.string(INSTRUMENT_CODE_FUNCTION_NAME)); Node functionCall = IR.call(outerProp, IR.string(encodedParam), IR.number(node.getLineno())); Node exprNode = IR.exprResult(functionCall); return exprNode.useSourceInfoIfMissingFromForTree(node); } /** * A class the maintains a mapping of unique identifiers to parameter values. It also generates * unique identifiers by creating a counter starting form 0 and increments this value when * assigning a new unique identifier. */ private static final class ParameterMapping { private final List<String> uniqueIdentifier; private final List<String> paramValue; private long nextUniqueIdentifier; ParameterMapping() { nextUniqueIdentifier = 0; uniqueIdentifier = new ArrayList<>(); paramValue = new ArrayList<>(); } public String getEncodedParam(String fileName, String functionName, String type) { String combinedParam = lenientFormat("%s %s %s", fileName, functionName, type); long uniqueIdentifier = getUniqueIdentifier(combinedParam); if (uniqueIdentifier > Integer.MAX_VALUE) { throw new ArithmeticException( "Unique Identifier exceeds value of Integer.MAX_VALUE, could not encode with Base 64 VLQ"); } StringBuilder sb = new StringBuilder(); try { Base64VLQ.encode(sb, Math.toIntExact(uniqueIdentifier)); } catch (IOException e) { throw new AssertionError("Encountered unexpected IOException error"); } String result = sb.toString(); addParamMapping(result, combinedParam); return result; } private long getUniqueIdentifier(String param) { nextUniqueIdentifier++; return nextUniqueIdentifier; } private void addParamMapping(String identifier, String param) { uniqueIdentifier.add(identifier); paramValue.add(param); } } }
Added comment to better diclose purpsoe of class parameter
src/com/google/javascript/jscomp/ProductionCoverageInstrumentationCallback.java
Added comment to better diclose purpsoe of class parameter
Java
apache-2.0
29c5e906bf7222723274eba8dd188889fb6137d2
0
davinkevin/Podcast-Server,davinkevin/Podcast-Server,davinkevin/Podcast-Server,davinkevin/Podcast-Server,davinkevin/Podcast-Server
package lan.dk.podcastserver.controller.api; import javaslang.collection.HashSet; import javaslang.collection.List; import javaslang.collection.Set; import lan.dk.podcastserver.business.TagBusiness; import lan.dk.podcastserver.entity.Tag; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import java.util.UUID; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.*; /** * Created by kevin on 15/09/15 for Podcast Server */ @RunWith(MockitoJUnitRunner.class) public class TagControllerTest { private @Mock TagBusiness tagBusiness; private @InjectMocks TagController tagController; @Test public void should_find_tag_by_id() { /* Given */ UUID id = UUID.randomUUID(); Tag value = new Tag(); when(tagBusiness.findOne(id)).thenReturn(value); /* When */ Tag tagById = tagController.findById(id); /* Then */ assertThat(tagById).isSameAs(value); verify(tagBusiness, only()).findOne(id); } @Test public void should_find_all_tag() { /* Given */ List<Tag> tags = List.empty(); when(tagBusiness.findAll()).thenReturn(tags); /* When */ List<Tag> tagList = tagController.findAll(); /* Then */ assertThat(tagList).isSameAs(tags); verify(tagBusiness, only()).findAll(); } @Test public void should_find_by_name() { /* Given */ String name = "Foo"; Set<Tag> tags = HashSet.empty(); when(tagBusiness.findByNameLike(name)).thenReturn(tags); /* When */ Set<Tag> tagsFoundByName = tagController.findByNameLike(name); /* Then */ assertThat(tagsFoundByName).isSameAs(tags); verify(tagBusiness, only()).findByNameLike(name); } }
Backend/src/test/java/lan/dk/podcastserver/controller/api/TagControllerTest.java
package lan.dk.podcastserver.controller.api; import javaslang.collection.HashSet; import javaslang.collection.List; import javaslang.collection.Set; import lan.dk.podcastserver.business.TagBusiness; import lan.dk.podcastserver.entity.Tag; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import java.util.UUID; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.*; /** * Created by kevin on 15/09/15 for Podcast Server */ @RunWith(MockitoJUnitRunner.class) public class TagControllerTest { @Mock TagBusiness tagBusiness; @InjectMocks TagController tagController; @Test public void should_find_tag_by_id() { /* Given */ UUID id = UUID.randomUUID(); Tag value = new Tag(); when(tagBusiness.findOne(eq(id))).thenReturn(value); /* When */ Tag tagById = tagController.findById(id); /* Then */ assertThat(tagById).isSameAs(value); verify(tagBusiness, only()).findOne(eq(id)); } @Test public void should_find_all_tag() { /* Given */ List<Tag> tags = List.empty(); when(tagBusiness.findAll()).thenReturn(tags); /* When */ List<Tag> tagList = tagController.findAll(); /* Then */ assertThat(tagList).isSameAs(tags); verify(tagBusiness, only()).findAll(); } @Test public void should_find_by_name() { /* Given */ String name = "Foo"; Set<Tag> tags = HashSet.empty(); when(tagBusiness.findByNameLike(eq(name))).thenReturn(tags); /* When */ Set<Tag> tagsFoundByName = tagController.findByNameLike(name); /* Then */ assertThat(tagsFoundByName).isSameAs(tags); verify(tagBusiness, only()).findByNameLike(eq(name)); } }
refactor(tags): simplify code execution with mockito
Backend/src/test/java/lan/dk/podcastserver/controller/api/TagControllerTest.java
refactor(tags): simplify code execution with mockito
Java
apache-2.0
dc07df358897fc1fdd0f8ab11008b75818c45016
0
resmo/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,argv0/cloudstack,GabrielBrascher/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,wido/cloudstack,argv0/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,argv0/cloudstack,cinderella/incubator-cloudstack,resmo/cloudstack,argv0/cloudstack,wido/cloudstack,mufaddalq/cloudstack-datera-driver,wido/cloudstack,GabrielBrascher/cloudstack,jcshen007/cloudstack,cinderella/incubator-cloudstack,argv0/cloudstack,jcshen007/cloudstack,wido/cloudstack,resmo/cloudstack,mufaddalq/cloudstack-datera-driver,cinderella/incubator-cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,resmo/cloudstack,argv0/cloudstack,mufaddalq/cloudstack-datera-driver,mufaddalq/cloudstack-datera-driver,DaanHoogland/cloudstack,resmo/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,jcshen007/cloudstack,cinderella/incubator-cloudstack,mufaddalq/cloudstack-datera-driver,jcshen007/cloudstack,cinderella/incubator-cloudstack,GabrielBrascher/cloudstack,jcshen007/cloudstack
/** * Copyright (C) 2010 Cloud.com, Inc. All rights reserved. * * This software is licensed under the GNU General Public License v3 or later. * * It is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ package com.cloud.api.commands; import java.util.ArrayList; import java.util.List; import java.util.Set; import org.apache.log4j.Logger; import com.cloud.api.ApiConstants; import com.cloud.api.BaseListCmd; import com.cloud.api.IdentityMapper; import com.cloud.api.Implementation; import com.cloud.api.Parameter; import com.cloud.api.response.ListResponse; import com.cloud.api.response.TemplateResponse; import com.cloud.async.AsyncJob; import com.cloud.template.VirtualMachineTemplate.TemplateFilter; import com.cloud.user.Account; import com.cloud.user.UserContext; import com.cloud.utils.Pair; @Implementation(description="Lists all available ISO files.", responseObject=TemplateResponse.class) public class ListIsosCmd extends BaseListCmd { public static final Logger s_logger = Logger.getLogger(ListIsosCmd.class.getName()); private static final String s_name = "listisosresponse"; ///////////////////////////////////////////////////// //////////////// API parameters ///////////////////// ///////////////////////////////////////////////////// @Parameter(name=ApiConstants.ACCOUNT, type=CommandType.STRING, description="the account of the ISO file. Must be used with the domainId parameter.") private String accountName; @Parameter(name=ApiConstants.BOOTABLE, type=CommandType.BOOLEAN, description="true if the ISO is bootable, false otherwise") private Boolean bootable; @IdentityMapper(entityTableName="domain") @Parameter(name=ApiConstants.DOMAIN_ID, type=CommandType.LONG, description="lists all available ISO files by ID of a domain. If used with the account parameter, lists all available ISO files for the account in the ID of a domain.") private Long domainId; @Parameter(name=ApiConstants.HYPERVISOR, type=CommandType.STRING, description="the hypervisor for which to restrict the search") private String hypervisor; @IdentityMapper(entityTableName="vm_template") @Parameter(name=ApiConstants.ID, type=CommandType.LONG, description="list all isos by id") private Long id; @Parameter(name=ApiConstants.IS_PUBLIC, type=CommandType.BOOLEAN, description="true if the ISO is publicly available to all users, false otherwise.") private Boolean publicIso; @Parameter(name=ApiConstants.IS_READY, type=CommandType.BOOLEAN, description="true if this ISO is ready to be deployed") private Boolean ready; @Parameter(name=ApiConstants.ISO_FILTER, type=CommandType.STRING, description="possible values are \"featured\", \"self\", \"self-executable\",\"executable\", and \"community\". " + "* featured-ISOs that are featured and are publicself-ISOs that have been registered/created by the owner. " + "* selfexecutable-ISOs that have been registered/created by the owner that can be used to deploy a new VM. " + "* executable-all ISOs that can be used to deploy a new VM " + "* community-ISOs that are public.") private String isoFilter = TemplateFilter.selfexecutable.toString(); @Parameter(name=ApiConstants.NAME, type=CommandType.STRING, description="list all isos by name") private String isoName; @IdentityMapper(entityTableName="data_center") @Parameter(name=ApiConstants.ZONE_ID, type=CommandType.LONG, description="the ID of the zone") private Long zoneId; @IdentityMapper(entityTableName="projects") @Parameter(name=ApiConstants.PROJECT_ID, type=CommandType.LONG, description="list isos by project") private Long projectId; ///////////////////////////////////////////////////// /////////////////// Accessors /////////////////////// ///////////////////////////////////////////////////// public String getAccountName() { return accountName; } public Boolean isBootable() { return bootable; } public Long getDomainId() { return domainId; } public String getHypervisor() { return hypervisor; } public Long getId() { return id; } public Boolean isPublic() { return publicIso; } public Boolean isReady() { return ready; } public String getIsoFilter() { return isoFilter; } public String getIsoName() { return isoName; } public Long getZoneId() { return zoneId; } public Long getProjectId() { return projectId; } public boolean listInReadyState() { Account account = UserContext.current().getCaller(); // It is account specific if account is admin type and domainId and accountName are not null boolean isAccountSpecific = (account == null || isAdmin(account.getType())) && (getAccountName() != null) && (getDomainId() != null); // Show only those that are downloaded. TemplateFilter templateFilter = TemplateFilter.valueOf(getIsoFilter()); boolean onlyReady = (templateFilter == TemplateFilter.featured) || (templateFilter == TemplateFilter.selfexecutable) || (templateFilter == TemplateFilter.sharedexecutable) || (templateFilter == TemplateFilter.executable && isAccountSpecific) || (templateFilter == TemplateFilter.community); if (!onlyReady) { if (isReady() != null && isReady().booleanValue() != onlyReady) { onlyReady = isReady().booleanValue(); } } return onlyReady; } ///////////////////////////////////////////////////// /////////////// API Implementation/////////////////// ///////////////////////////////////////////////////// @Override public String getCommandName() { return s_name; } public AsyncJob.Type getInstanceType() { return AsyncJob.Type.Iso; } @Override public void execute(){ Set<Pair<Long, Long>> isoZonePairSet = _mgr.listIsos(this); ListResponse<TemplateResponse> response = new ListResponse<TemplateResponse>(); List<TemplateResponse> templateResponses = new ArrayList<TemplateResponse>(); for (Pair<Long, Long> iso : isoZonePairSet) { List<TemplateResponse> responses = new ArrayList<TemplateResponse>(); responses = _responseGenerator.createIsoResponses(iso.first(), iso.second(), listInReadyState()); templateResponses.addAll(responses); } response.setResponses(templateResponses); response.setResponseName(getCommandName()); this.setResponseObject(response); } }
api/src/com/cloud/api/commands/ListIsosCmd.java
/** * Copyright (C) 2010 Cloud.com, Inc. All rights reserved. * * This software is licensed under the GNU General Public License v3 or later. * * It is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ package com.cloud.api.commands; import java.util.ArrayList; import java.util.List; import java.util.Set; import org.apache.log4j.Logger; import com.cloud.api.ApiConstants; import com.cloud.api.BaseListCmd; import com.cloud.api.IdentityMapper; import com.cloud.api.Implementation; import com.cloud.api.Parameter; import com.cloud.api.response.ListResponse; import com.cloud.api.response.TemplateResponse; import com.cloud.async.AsyncJob; import com.cloud.template.VirtualMachineTemplate.TemplateFilter; import com.cloud.user.Account; import com.cloud.user.UserContext; import com.cloud.utils.Pair; @Implementation(description="Lists all available ISO files.", responseObject=TemplateResponse.class) public class ListIsosCmd extends BaseListCmd { public static final Logger s_logger = Logger.getLogger(ListIsosCmd.class.getName()); private static final String s_name = "listisosresponse"; ///////////////////////////////////////////////////// //////////////// API parameters ///////////////////// ///////////////////////////////////////////////////// @Parameter(name=ApiConstants.ACCOUNT, type=CommandType.STRING, description="the account of the ISO file. Must be used with the domainId parameter.") private String accountName; @Parameter(name=ApiConstants.BOOTABLE, type=CommandType.BOOLEAN, description="true if the ISO is bootable, false otherwise") private Boolean bootable; @IdentityMapper(entityTableName="domain") @Parameter(name=ApiConstants.DOMAIN_ID, type=CommandType.LONG, description="lists all available ISO files by ID of a domain. If used with the account parameter, lists all available ISO files for the account in the ID of a domain.") private Long domainId; @Parameter(name=ApiConstants.HYPERVISOR, type=CommandType.STRING, description="the hypervisor for which to restrict the search") private String hypervisor; @IdentityMapper(entityTableName="vm_template") @Parameter(name=ApiConstants.ID, type=CommandType.LONG, description="list all isos by id") private Long id; @Parameter(name=ApiConstants.IS_PUBLIC, type=CommandType.BOOLEAN, description="true if the ISO is publicly available to all users, false otherwise.") private Boolean publicIso; @Parameter(name=ApiConstants.IS_READY, type=CommandType.BOOLEAN, description="true if this ISO is ready to be deployed") private Boolean ready; @Parameter(name=ApiConstants.ISO_FILTER, type=CommandType.STRING, description="possible values are \"featured\", \"self\", \"self-executable\",\"executable\", and \"community\". " + "* featured-ISOs that are featured and are publicself-ISOs that have been registered/created by the owner. " + "* selfexecutable-ISOs that have been registered/created by the owner that can be used to deploy a new VM. " + "* executable-all ISOs that can be used to deploy a new VM " + "* community-ISOs that are public.") private String isoFilter = TemplateFilter.selfexecutable.toString(); @Parameter(name=ApiConstants.NAME, type=CommandType.STRING, description="list all isos by name") private String isoName; @IdentityMapper(entityTableName="data_center") @Parameter(name=ApiConstants.ZONE_ID, type=CommandType.LONG, description="the ID of the zone") private Long zoneId; @IdentityMapper(entityTableName="projects") @Parameter(name=ApiConstants.PROJECT_ID, type=CommandType.LONG, description="list isos by project") private Long projectId; ///////////////////////////////////////////////////// /////////////////// Accessors /////////////////////// ///////////////////////////////////////////////////// public String getAccountName() { return accountName; } public Boolean isBootable() { return bootable; } public Long getDomainId() { return domainId; } public String getHypervisor() { return hypervisor; } public Long getId() { return id; } public Boolean isPublic() { return publicIso; } public Boolean isReady() { return ready; } public String getIsoFilter() { return isoFilter; } public String getIsoName() { return isoName; } public Long getZoneId() { return zoneId; } public Long getProjectId() { return projectId; } public boolean listInReadyState() { Account account = UserContext.current().getCaller(); // It is account specific if account is admin type and domainId and accountName are not null boolean isAccountSpecific = (account == null || isAdmin(account.getType())) && (getAccountName() != null) && (getDomainId() != null); // Show only those that are downloaded. TemplateFilter templateFilter = TemplateFilter.valueOf(getIsoFilter()); boolean onlyReady = (templateFilter == TemplateFilter.featured) || (templateFilter == TemplateFilter.selfexecutable) || (templateFilter == TemplateFilter.sharedexecutable) || (templateFilter == TemplateFilter.executable && isAccountSpecific) || (templateFilter == TemplateFilter.community); return onlyReady; } ///////////////////////////////////////////////////// /////////////// API Implementation/////////////////// ///////////////////////////////////////////////////// @Override public String getCommandName() { return s_name; } public AsyncJob.Type getInstanceType() { return AsyncJob.Type.Iso; } @Override public void execute(){ Set<Pair<Long, Long>> isoZonePairSet = _mgr.listIsos(this); ListResponse<TemplateResponse> response = new ListResponse<TemplateResponse>(); List<TemplateResponse> templateResponses = new ArrayList<TemplateResponse>(); for (Pair<Long, Long> iso : isoZonePairSet) { List<TemplateResponse> responses = new ArrayList<TemplateResponse>(); responses = _responseGenerator.createIsoResponses(iso.first(), iso.second(), listInReadyState()); templateResponses.addAll(responses); } response.setResponses(templateResponses); response.setResponseName(getCommandName()); this.setResponseObject(response); } }
bug 12068: fixed listIsos by "isReady" flag status 12068: resolved fixed
api/src/com/cloud/api/commands/ListIsosCmd.java
bug 12068: fixed listIsos by "isReady" flag status 12068: resolved fixed
Java
apache-2.0
a87e8393bdab0f643d2fcf0dda95d7663bf0640b
0
digipost/signature-api-client-java
package no.digipost.signature.client; import no.digipost.signature.client.asice.ASiCEConfiguration; import no.digipost.signature.client.asice.DocumentBundleProcessor; import no.digipost.signature.client.asice.DumpDocumentBundleToDisk; import no.digipost.signature.client.core.Sender; import no.digipost.signature.client.core.SignatureJob; import no.digipost.signature.client.core.exceptions.KeyException; import no.digipost.signature.client.core.internal.http.AddRequestHeaderFilter; import no.digipost.signature.client.core.internal.http.EnterpriseCertificateTrustStrategy; import no.digipost.signature.client.core.internal.http.HttpIntegrationConfiguration; import no.digipost.signature.client.core.internal.security.ProvidesCertificateResourcePaths; import no.digipost.signature.client.core.internal.security.TrustStoreLoader; import no.digipost.signature.client.core.internal.xml.JaxbMessageReaderWriterProvider; import no.digipost.signature.client.security.KeyStoreConfig; import org.apache.commons.lang3.StringUtils; import org.apache.http.conn.ssl.TrustStrategy; import org.apache.http.ssl.SSLContexts; import org.glassfish.jersey.client.ClientConfig; import org.glassfish.jersey.client.ClientProperties; import org.glassfish.jersey.logging.LoggingFeature; import org.glassfish.jersey.media.multipart.MultiPartFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.net.ssl.SSLContext; import javax.ws.rs.core.Configurable; import javax.ws.rs.core.Configuration; import javax.ws.rs.core.HttpHeaders; import java.io.InputStream; import java.net.URI; import java.nio.file.Path; import java.security.KeyManagementException; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.UnrecoverableKeyException; import java.time.Clock; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.UUID; import java.util.function.Consumer; import static java.util.Arrays.asList; import static javax.ws.rs.core.HttpHeaders.USER_AGENT; import static no.digipost.signature.client.Certificates.TEST; import static no.digipost.signature.client.ClientMetadata.VERSION; public final class ClientConfiguration implements ProvidesCertificateResourcePaths, HttpIntegrationConfiguration, ASiCEConfiguration { private static final Logger LOG = LoggerFactory.getLogger(ClientConfiguration.class); private static final String JAVA_DESCRIPTION = System.getProperty("java.vendor", "unknown Java") + ", " + System.getProperty("java.version", "unknown version"); /** * The {@link HttpHeaders#USER_AGENT User-Agent} header which will be included in all requests. You may include a custom part * using {@link Builder#includeInUserAgent(String)}. */ public static final String MANDATORY_USER_AGENT = "posten-signature-api-client-java/" + VERSION + " (" + JAVA_DESCRIPTION + ")"; /** * {@value #HTTP_REQUEST_RESPONSE_LOGGER_NAME} is the name of the logger which will log the HTTP requests and responses, * if enabled with {@link ClientConfiguration.Builder#enableRequestAndResponseLogging()}. */ public static final String HTTP_REQUEST_RESPONSE_LOGGER_NAME = "no.digipost.signature.client.http.requestresponse"; /** * Socket timeout is used for both requests and, if any, * underlying layered sockets (typically for * secure sockets). The default value is {@value #DEFAULT_SOCKET_TIMEOUT_MS} ms. */ public static final int DEFAULT_SOCKET_TIMEOUT_MS = 10_000; /** * The default connect timeout for requests: {@value #DEFAULT_CONNECT_TIMEOUT_MS} ms. */ public static final int DEFAULT_CONNECT_TIMEOUT_MS = 10_000; private final Configurable<? extends Configuration> jaxrsConfig; private final boolean preInitializeHttpClient; private final KeyStoreConfig keyStoreConfig; private final Iterable<String> certificatePaths; private final Optional<Sender> sender; private final URI signatureServiceRoot; private final Iterable<DocumentBundleProcessor> documentBundleProcessors; private final TrustStrategy serverTrustStrategy; private final Clock clock; private ClientConfiguration( KeyStoreConfig keyStoreConfig, Configurable<? extends Configuration> jaxrsConfig, Optional<Sender> sender, URI serviceRoot, Iterable<String> certificatePaths, Iterable<DocumentBundleProcessor> documentBundleProcessors, TrustStrategy serverTrustStrategy, boolean preInitializeHttpClient, Clock clock) { this.jaxrsConfig = jaxrsConfig; this.preInitializeHttpClient = preInitializeHttpClient; this.keyStoreConfig = keyStoreConfig; this.certificatePaths = certificatePaths; this.sender = sender; this.signatureServiceRoot = serviceRoot; this.documentBundleProcessors = documentBundleProcessors; this.serverTrustStrategy = serverTrustStrategy; this.clock = clock; } @Override public KeyStoreConfig getKeyStoreConfig() { return keyStoreConfig; } @Override public Optional<Sender> getGlobalSender() { return sender; } @Override public Iterable<DocumentBundleProcessor> getDocumentBundleProcessors() { return documentBundleProcessors; } @Override public Clock getClock() { return clock; } @Override public URI getServiceRoot() { return signatureServiceRoot; } @Override public Iterable<String> getCertificatePaths() { return certificatePaths; } /** * Get the JAX-RS {@link Configuration} based on the current state of this {@link ClientConfiguration}. * * @return the JAX-RS {@link Configuration} */ @Override public Configuration getJaxrsConfiguration() { return jaxrsConfig.getConfiguration(); } @Override public boolean preInitializeClient() { return preInitializeHttpClient; } @Override public SSLContext getSSLContext() { try { return SSLContexts.custom() .loadKeyMaterial(keyStoreConfig.keyStore, keyStoreConfig.privatekeyPassword.toCharArray(), (aliases, socket) -> keyStoreConfig.alias) .loadTrustMaterial(TrustStoreLoader.build(this), serverTrustStrategy) .build(); } catch (NoSuchAlgorithmException | KeyManagementException | KeyStoreException | UnrecoverableKeyException e) { if (e instanceof UnrecoverableKeyException && "Given final block not properly padded".equals(e.getMessage())) { throw new KeyException( "Unable to load key from keystore, because " + e.getClass().getSimpleName() + ": '" + e.getMessage() + "'. Possible causes:\n" + "* Wrong password for private key (the password for the keystore and the private key may not be the same)\n" + "* Multiple private keys in the keystore with different passwords (private keys in the same key store must have the same password)", e); } else { throw new KeyException("Unable to create the SSLContext, because " + e.getClass().getSimpleName() + ": '" + e.getMessage() + "'", e); } } } /** * Build a new {@link ClientConfiguration}. */ public static Builder builder(KeyStoreConfig keystore) { return new Builder(keystore); } public static class Builder { private final Configurable<? extends Configuration> jaxrsConfig; private final KeyStoreConfig keyStoreConfig; private boolean preInitializeHttpClient = true; private int socketTimeoutMs = DEFAULT_SOCKET_TIMEOUT_MS; private int connectTimeoutMs = DEFAULT_CONNECT_TIMEOUT_MS; private Optional<String> customUserAgentPart = Optional.empty(); private URI serviceRoot = ServiceUri.PRODUCTION.uri; private Optional<Sender> globalSender = Optional.empty(); private Iterable<String> certificatePaths = Certificates.PRODUCTION.certificatePaths; private TrustStrategy serverCertificateTrustStrategy = new EnterpriseCertificateTrustStrategy("984661185"); // Posten Norge AS organization number private Optional<LoggingFeature> loggingFeature = Optional.empty(); private List<DocumentBundleProcessor> documentBundleProcessors = new ArrayList<>(); private Clock clock = Clock.systemDefaultZone(); private Builder(KeyStoreConfig keyStoreConfig) { this.keyStoreConfig = keyStoreConfig; this.jaxrsConfig = new ClientConfig(); } /** * Set the service URI to one of the predefined environments. */ public Builder serviceUri(ServiceUri environment) { return serviceUri(environment.uri); } /** * Override the service endpoint URI to a custom environment. */ public Builder serviceUri(URI uri) { this.serviceRoot = uri; return this; } /** * Override the * {@link ClientConfiguration#DEFAULT_SOCKET_TIMEOUT_MS default socket timeout value}. */ public Builder socketTimeoutMillis(int millis) { this.socketTimeoutMs = millis; return this; } /** * Override the * {@link ClientConfiguration#DEFAULT_CONNECT_TIMEOUT_MS default connect timeout value}. */ public Builder connectTimeoutMillis(int millis) { this.connectTimeoutMs = millis; return this; } public Builder trustStore(Certificates certificates) { if (certificates == TEST) { LOG.warn("Using test certificates in trust store. This should never be done for production environments."); } return trustStore(certificates.certificatePaths); } /** * Override the trust store configuration to load DER-encoded certificates from the given folder(s). * * @see java.security.cert.CertificateFactory#generateCertificate(InputStream) */ public Builder trustStore(String ... certificatePaths) { return trustStore(asList(certificatePaths)); } /** * Override the trust store configuration to load DER-encoded certificates from the given folder(s). * * @see java.security.cert.CertificateFactory#generateCertificate(InputStream) */ public Builder trustStore(Iterable<String> certificatePaths) { this.certificatePaths = certificatePaths; return this; } /** * Set the sender used globally for every signature job. * <p> * Use {@link no.digipost.signature.client.portal.PortalJob.Builder#withSender(Sender)} or {@link no.digipost.signature.client.direct.DirectJob.Builder#withSender(Sender)} * if you need to specify different senders per signature job (typically when acting as a broker on * behalf of multiple other organizations) */ public Builder globalSender(Sender sender) { this.globalSender = Optional.of(sender); return this; } /** * Customize the {@link HttpHeaders#USER_AGENT User-Agent} header value to include the * given string. * * @param userAgentCustomPart The custom part to include in the User-Agent HTTP header. */ public Builder includeInUserAgent(String userAgentCustomPart) { customUserAgentPart = Optional.of(userAgentCustomPart).filter(StringUtils::isNoneBlank); return this; } /** * Makes the client log the sent requests and received responses to the logger named * {@link ClientConfiguration#HTTP_REQUEST_RESPONSE_LOGGER_NAME}. */ public Builder enableRequestAndResponseLogging() { loggingFeature = Optional.of(new LoggingFeature(java.util.logging.Logger.getLogger(HTTP_REQUEST_RESPONSE_LOGGER_NAME), 16 * 1024)); return this; } /** * Have the library dump the generated document bundle zip files to disk before they are * sent to the service to create signature jobs. * <p> * The files will be given names on the format * <pre>{@code timestamp-[reference_from_job-]asice.zip}</pre> * The <em>timestamp</em> part may use a clock of your choosing, make sure to override the system clock with * {@link #usingClock(Clock)} before calling this method if that is desired. * <p> * The <em>reference_from_job</em> part is only included if the job is given such a reference using * {@link no.digipost.signature.client.direct.DirectJob.Builder#withReference(UUID) DirectJob.Builder.withReference(..)} or {@link no.digipost.signature.client.portal.PortalJob.Builder#withReference(UUID) PortalJob.Builder.withReference(..)}. * * @param directory the directory to dump to. This directory must already exist, or * creating new signature jobs will fail. Miserably. */ public Builder enableDocumentBundleDiskDump(Path directory) { return addDocumentBundleProcessor(new DumpDocumentBundleToDisk(directory, clock)); } /** * Add a {@link DocumentBundleProcessor} which will be passed the generated zipped document bundle * together with the {@link SignatureJob job} it was created for. The processor is not responsible for closing * the stream, as this is handled by the library itself. * * <h2>A note on performance</h2> * The processor is free to do what it want with the passed stream, but bear in mind that the time * used by a processor adds to the processing time to create signature jobs. * * @param processor the {@link DocumentBundleProcessor} which will be passed the generated zipped document bundle * together with the {@link SignatureJob job} it was created for. */ public Builder addDocumentBundleProcessor(DocumentBundleProcessor processor) { documentBundleProcessors.add(processor); return this; } /** * This methods allows for custom configuration of JAX-RS (i.e. Jersey) if anything is * needed that is not already supported by the {@link ClientConfiguration.Builder}. * This method should not be used to configure anything that is already directly supported by the * {@code ClientConfiguration.Builder} API. * <p> * If you still need to use this method, consider requesting first-class support for your requirement * on the library's <a href="https://github.com/digipost/signature-api-client-java/issues">web site on GitHub</a>. * * @param customizer The operations to do on the JAX-RS {@link Configurable}, e.g. * {@link Configurable#register(Object) registering components}. */ public Builder customizeJaxRs(Consumer<? super Configurable<? extends Configuration>> customizer) { customizer.accept(jaxrsConfig); return this; } /** * Override which organization number which is expected from the server's certificate. * By default, this is the organization number of Posten Norge AS, and should <em>not</em> * be overridden unless you have a specific need such as doing testing against your own * stubbed implementation of the Posten signering API. * * @param serverOrganizationNumber the organization number expected in the server's enterprise certificate */ public Builder serverOrganizationNumber(String serverOrganizationNumber) { return serverCertificateTrustStrategy(new EnterpriseCertificateTrustStrategy(serverOrganizationNumber)); } /** * Override the strategy used for validating the server's certificate. This method is mainly * intended for tests if you need to override (or even disable) the default * validation that the server identifies itself as "Posten Norge AS". * * Calling this method for a production deployment is probably <em>not</em> what you intend to do! * * @param strategy the strategy for validating the server's certificate */ public Builder serverCertificateTrustStrategy(TrustStrategy strategy) { LOG.warn( "Overriding server certificate TrustStrategy! This should NOT be done for any production deployment, " + "or any integration with Posten Norge"); this.serverCertificateTrustStrategy = strategy; return this; } /** * Allows for overriding which {@link Clock} is used to convert between Java and XML, * may be useful for e.g. automated tests. * <p> * Uses {@link Clock#systemDefaultZone() the best available system clock} if not specified. */ public Builder usingClock(Clock clock) { this.clock = clock; return this; } /** * Disable the pre-initialization step of the internal HTTP client (Jersey Client) when * instantiating the Signature API Client. * * @see org.glassfish.jersey.client.JerseyClient#preInitialize() */ public Builder disablePreInitializingHttpClient() { this.preInitializeHttpClient = false; return this; } public ClientConfiguration build() { jaxrsConfig.property(ClientProperties.READ_TIMEOUT, socketTimeoutMs); jaxrsConfig.property(ClientProperties.CONNECT_TIMEOUT, connectTimeoutMs); jaxrsConfig.register(MultiPartFeature.class); jaxrsConfig.register(JaxbMessageReaderWriterProvider.class); jaxrsConfig.register(new AddRequestHeaderFilter(USER_AGENT, createUserAgentString())); this.loggingFeature.ifPresent(jaxrsConfig::register); return new ClientConfiguration( keyStoreConfig, jaxrsConfig, globalSender, serviceRoot, certificatePaths, documentBundleProcessors, serverCertificateTrustStrategy, preInitializeHttpClient, clock); } String createUserAgentString() { return MANDATORY_USER_AGENT + customUserAgentPart.map(ua -> String.format(" (%s)", ua)).orElse(""); } } }
src/main/java/no/digipost/signature/client/ClientConfiguration.java
package no.digipost.signature.client; import no.digipost.signature.client.asice.ASiCEConfiguration; import no.digipost.signature.client.asice.DocumentBundleProcessor; import no.digipost.signature.client.asice.DumpDocumentBundleToDisk; import no.digipost.signature.client.core.Sender; import no.digipost.signature.client.core.SignatureJob; import no.digipost.signature.client.core.exceptions.KeyException; import no.digipost.signature.client.core.internal.http.AddRequestHeaderFilter; import no.digipost.signature.client.core.internal.http.EnterpriseCertificateTrustStrategy; import no.digipost.signature.client.core.internal.http.HttpIntegrationConfiguration; import no.digipost.signature.client.core.internal.security.ProvidesCertificateResourcePaths; import no.digipost.signature.client.core.internal.security.TrustStoreLoader; import no.digipost.signature.client.core.internal.xml.JaxbMessageReaderWriterProvider; import no.digipost.signature.client.security.KeyStoreConfig; import org.apache.commons.lang3.StringUtils; import org.apache.http.conn.ssl.TrustStrategy; import org.apache.http.ssl.PrivateKeyDetails; import org.apache.http.ssl.PrivateKeyStrategy; import org.apache.http.ssl.SSLContexts; import org.glassfish.jersey.client.ClientConfig; import org.glassfish.jersey.client.ClientProperties; import org.glassfish.jersey.logging.LoggingFeature; import org.glassfish.jersey.media.multipart.MultiPartFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.net.ssl.SSLContext; import javax.ws.rs.core.Configurable; import javax.ws.rs.core.Configuration; import javax.ws.rs.core.HttpHeaders; import java.io.InputStream; import java.net.Socket; import java.net.URI; import java.nio.file.Path; import java.security.KeyManagementException; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.UnrecoverableKeyException; import java.time.Clock; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.function.Consumer; import static java.util.Arrays.asList; import static javax.ws.rs.core.HttpHeaders.USER_AGENT; import static no.digipost.signature.client.Certificates.TEST; import static no.digipost.signature.client.ClientMetadata.VERSION; public final class ClientConfiguration implements ProvidesCertificateResourcePaths, HttpIntegrationConfiguration, ASiCEConfiguration { private static final Logger LOG = LoggerFactory.getLogger(ClientConfiguration.class); private static final String JAVA_DESCRIPTION = System.getProperty("java.vendor", "unknown Java") + ", " + System.getProperty("java.version", "unknown version"); /** * The {@link HttpHeaders#USER_AGENT User-Agent} header which will be included in all requests. You may include a custom part * using {@link Builder#includeInUserAgent(String)}. */ public static final String MANDATORY_USER_AGENT = "posten-signature-api-client-java/" + VERSION + " (" + JAVA_DESCRIPTION + ")"; /** * {@value #HTTP_REQUEST_RESPONSE_LOGGER_NAME} is the name of the logger which will log the HTTP requests and responses, * if enabled with {@link ClientConfiguration.Builder#enableRequestAndResponseLogging()}. */ public static final String HTTP_REQUEST_RESPONSE_LOGGER_NAME = "no.digipost.signature.client.http.requestresponse"; /** * Socket timeout is used for both requests and, if any, * underlying layered sockets (typically for * secure sockets). The default value is {@value #DEFAULT_SOCKET_TIMEOUT_MS} ms. */ public static final int DEFAULT_SOCKET_TIMEOUT_MS = 10_000; /** * The default connect timeout for requests: {@value #DEFAULT_CONNECT_TIMEOUT_MS} ms. */ public static final int DEFAULT_CONNECT_TIMEOUT_MS = 10_000; private final Configurable<? extends Configuration> jaxrsConfig; private final boolean preInitializeHttpClient; private final KeyStoreConfig keyStoreConfig; private final Iterable<String> certificatePaths; private final Optional<Sender> sender; private final URI signatureServiceRoot; private final Iterable<DocumentBundleProcessor> documentBundleProcessors; private final TrustStrategy serverTrustStrategy; private final Clock clock; private ClientConfiguration( KeyStoreConfig keyStoreConfig, Configurable<? extends Configuration> jaxrsConfig, Optional<Sender> sender, URI serviceRoot, Iterable<String> certificatePaths, Iterable<DocumentBundleProcessor> documentBundleProcessors, TrustStrategy serverTrustStrategy, boolean preInitializeHttpClient, Clock clock) { this.jaxrsConfig = jaxrsConfig; this.preInitializeHttpClient = preInitializeHttpClient; this.keyStoreConfig = keyStoreConfig; this.certificatePaths = certificatePaths; this.sender = sender; this.signatureServiceRoot = serviceRoot; this.documentBundleProcessors = documentBundleProcessors; this.serverTrustStrategy = serverTrustStrategy; this.clock = clock; } @Override public KeyStoreConfig getKeyStoreConfig() { return keyStoreConfig; } @Override public Optional<Sender> getGlobalSender() { return sender; } @Override public Iterable<DocumentBundleProcessor> getDocumentBundleProcessors() { return documentBundleProcessors; } @Override public Clock getClock() { return clock; } @Override public URI getServiceRoot() { return signatureServiceRoot; } @Override public Iterable<String> getCertificatePaths() { return certificatePaths; } /** * Get the JAX-RS {@link Configuration} based on the current state of this {@link ClientConfiguration}. * * @return the JAX-RS {@link Configuration} */ @Override public Configuration getJaxrsConfiguration() { return jaxrsConfig.getConfiguration(); } @Override public boolean preInitializeClient() { return preInitializeHttpClient; } @Override public SSLContext getSSLContext() { try { return SSLContexts.custom() .loadKeyMaterial(keyStoreConfig.keyStore, keyStoreConfig.privatekeyPassword.toCharArray(), new PrivateKeyStrategy() { @Override public String chooseAlias(Map<String, PrivateKeyDetails> aliases, Socket socket) { return keyStoreConfig.alias; } }) .loadTrustMaterial(TrustStoreLoader.build(this), serverTrustStrategy) .build(); } catch (NoSuchAlgorithmException | KeyManagementException | KeyStoreException | UnrecoverableKeyException e) { if (e instanceof UnrecoverableKeyException && "Given final block not properly padded".equals(e.getMessage())) { throw new KeyException( "Unable to load key from keystore, because " + e.getClass().getSimpleName() + ": '" + e.getMessage() + "'. Possible causes:\n" + "* Wrong password for private key (the password for the keystore and the private key may not be the same)\n" + "* Multiple private keys in the keystore with different passwords (private keys in the same key store must have the same password)", e); } else { throw new KeyException("Unable to create the SSLContext, because " + e.getClass().getSimpleName() + ": '" + e.getMessage() + "'", e); } } } /** * Build a new {@link ClientConfiguration}. */ public static Builder builder(KeyStoreConfig keystore) { return new Builder(keystore); } public static class Builder { private final Configurable<? extends Configuration> jaxrsConfig; private final KeyStoreConfig keyStoreConfig; private boolean preInitializeHttpClient = true; private int socketTimeoutMs = DEFAULT_SOCKET_TIMEOUT_MS; private int connectTimeoutMs = DEFAULT_CONNECT_TIMEOUT_MS; private Optional<String> customUserAgentPart = Optional.empty(); private URI serviceRoot = ServiceUri.PRODUCTION.uri; private Optional<Sender> globalSender = Optional.empty(); private Iterable<String> certificatePaths = Certificates.PRODUCTION.certificatePaths; private TrustStrategy serverCertificateTrustStrategy = new EnterpriseCertificateTrustStrategy("984661185"); // Posten Norge AS organization number private Optional<LoggingFeature> loggingFeature = Optional.empty(); private List<DocumentBundleProcessor> documentBundleProcessors = new ArrayList<>(); private Clock clock = Clock.systemDefaultZone(); private Builder(KeyStoreConfig keyStoreConfig) { this.keyStoreConfig = keyStoreConfig; this.jaxrsConfig = new ClientConfig(); } /** * Set the service URI to one of the predefined environments. */ public Builder serviceUri(ServiceUri environment) { return serviceUri(environment.uri); } /** * Override the service endpoint URI to a custom environment. */ public Builder serviceUri(URI uri) { this.serviceRoot = uri; return this; } /** * Override the * {@link ClientConfiguration#DEFAULT_SOCKET_TIMEOUT_MS default socket timeout value}. */ public Builder socketTimeoutMillis(int millis) { this.socketTimeoutMs = millis; return this; } /** * Override the * {@link ClientConfiguration#DEFAULT_CONNECT_TIMEOUT_MS default connect timeout value}. */ public Builder connectTimeoutMillis(int millis) { this.connectTimeoutMs = millis; return this; } public Builder trustStore(Certificates certificates) { if (certificates == TEST) { LOG.warn("Using test certificates in trust store. This should never be done for production environments."); } return trustStore(certificates.certificatePaths); } /** * Override the trust store configuration to load DER-encoded certificates from the given folder(s). * * @see java.security.cert.CertificateFactory#generateCertificate(InputStream) */ public Builder trustStore(String ... certificatePaths) { return trustStore(asList(certificatePaths)); } /** * Override the trust store configuration to load DER-encoded certificates from the given folder(s). * * @see java.security.cert.CertificateFactory#generateCertificate(InputStream) */ public Builder trustStore(Iterable<String> certificatePaths) { this.certificatePaths = certificatePaths; return this; } /** * Set the sender used globally for every signature job. * <p> * Use {@link no.digipost.signature.client.portal.PortalJob.Builder#withSender(Sender)} or {@link no.digipost.signature.client.direct.DirectJob.Builder#withSender(Sender)} * if you need to specify different senders per signature job (typically when acting as a broker on * behalf of multiple other organizations) */ public Builder globalSender(Sender sender) { this.globalSender = Optional.of(sender); return this; } /** * Customize the {@link HttpHeaders#USER_AGENT User-Agent} header value to include the * given string. * * @param userAgentCustomPart The custom part to include in the User-Agent HTTP header. */ public Builder includeInUserAgent(String userAgentCustomPart) { customUserAgentPart = Optional.of(userAgentCustomPart).filter(StringUtils::isNoneBlank); return this; } /** * Makes the client log the sent requests and received responses to the logger named * {@link ClientConfiguration#HTTP_REQUEST_RESPONSE_LOGGER_NAME}. */ public Builder enableRequestAndResponseLogging() { loggingFeature = Optional.of(new LoggingFeature(java.util.logging.Logger.getLogger(HTTP_REQUEST_RESPONSE_LOGGER_NAME), 16 * 1024)); return this; } /** * Have the library dump the generated document bundle zip files to disk before they are * sent to the service to create signature jobs. * <p> * The files will be given names on the format * <pre>{@code timestamp-[reference_from_job-]asice.zip}</pre> * The <em>timestamp</em> part may use a clock of your choosing, make sure to override the system clock with * {@link #usingClock(Clock)} before calling this method if that is desired. * <p> * The <em>reference_from_job</em> part is only included if the job is given such a reference using * {@link no.digipost.signature.client.direct.DirectJob.Builder#withReference(UUID) DirectJob.Builder.withReference(..)} or {@link no.digipost.signature.client.portal.PortalJob.Builder#withReference(UUID) PortalJob.Builder.withReference(..)}. * * @param directory the directory to dump to. This directory must already exist, or * creating new signature jobs will fail. Miserably. */ public Builder enableDocumentBundleDiskDump(Path directory) { return addDocumentBundleProcessor(new DumpDocumentBundleToDisk(directory, clock)); } /** * Add a {@link DocumentBundleProcessor} which will be passed the generated zipped document bundle * together with the {@link SignatureJob job} it was created for. The processor is not responsible for closing * the stream, as this is handled by the library itself. * * <h2>A note on performance</h2> * The processor is free to do what it want with the passed stream, but bear in mind that the time * used by a processor adds to the processing time to create signature jobs. * * @param processor the {@link DocumentBundleProcessor} which will be passed the generated zipped document bundle * together with the {@link SignatureJob job} it was created for. */ public Builder addDocumentBundleProcessor(DocumentBundleProcessor processor) { documentBundleProcessors.add(processor); return this; } /** * This methods allows for custom configuration of JAX-RS (i.e. Jersey) if anything is * needed that is not already supported by the {@link ClientConfiguration.Builder}. * This method should not be used to configure anything that is already directly supported by the * {@code ClientConfiguration.Builder} API. * <p> * If you still need to use this method, consider requesting first-class support for your requirement * on the library's <a href="https://github.com/digipost/signature-api-client-java/issues">web site on GitHub</a>. * * @param customizer The operations to do on the JAX-RS {@link Configurable}, e.g. * {@link Configurable#register(Object) registering components}. */ public Builder customizeJaxRs(Consumer<? super Configurable<? extends Configuration>> customizer) { customizer.accept(jaxrsConfig); return this; } /** * Override which organization number which is expected from the server's certificate. * By default, this is the organization number of Posten Norge AS, and should <em>not</em> * be overridden unless you have a specific need such as doing testing against your own * stubbed implementation of the Posten signering API. * * @param serverOrganizationNumber the organization number expected in the server's enterprise certificate */ public Builder serverOrganizationNumber(String serverOrganizationNumber) { return serverCertificateTrustStrategy(new EnterpriseCertificateTrustStrategy(serverOrganizationNumber)); } /** * Override the strategy used for validating the server's certificate. This method is mainly * intended for tests if you need to override (or even disable) the default * validation that the server identifies itself as "Posten Norge AS". * * Calling this method for a production deployment is probably <em>not</em> what you intend to do! * * @param strategy the strategy for validating the server's certificate */ public Builder serverCertificateTrustStrategy(TrustStrategy strategy) { LOG.warn( "Overriding server certificate TrustStrategy! This should NOT be done for any production deployment, " + "or any integration with Posten Norge"); this.serverCertificateTrustStrategy = strategy; return this; } /** * Allows for overriding which {@link Clock} is used to convert between Java and XML, * may be useful for e.g. automated tests. * <p> * Uses {@link Clock#systemDefaultZone() the best available system clock} if not specified. */ public Builder usingClock(Clock clock) { this.clock = clock; return this; } /** * Disable the pre-initialization step of the internal HTTP client (Jersey Client) when * instantiating the Signature API Client. * * @see org.glassfish.jersey.client.JerseyClient#preInitialize() */ public Builder disablePreInitializingHttpClient() { this.preInitializeHttpClient = false; return this; } public ClientConfiguration build() { jaxrsConfig.property(ClientProperties.READ_TIMEOUT, socketTimeoutMs); jaxrsConfig.property(ClientProperties.CONNECT_TIMEOUT, connectTimeoutMs); jaxrsConfig.register(MultiPartFeature.class); jaxrsConfig.register(JaxbMessageReaderWriterProvider.class); jaxrsConfig.register(new AddRequestHeaderFilter(USER_AGENT, createUserAgentString())); this.loggingFeature.ifPresent(jaxrsConfig::register); return new ClientConfiguration( keyStoreConfig, jaxrsConfig, globalSender, serviceRoot, certificatePaths, documentBundleProcessors, serverCertificateTrustStrategy, preInitializeHttpClient, clock); } String createUserAgentString() { return MANDATORY_USER_AGENT + customUserAgentPart.map(ua -> String.format(" (%s)", ua)).orElse(""); } } }
A little lambda-refactoring
src/main/java/no/digipost/signature/client/ClientConfiguration.java
A little lambda-refactoring
Java
apache-2.0
7b54a03a7ef67fc9ce8e12130ce205dce395cf92
0
electricalwind/greycat,electricalwind/greycat,Neoskai/greycat,datathings/greycat,datathings/greycat,Neoskai/greycat,electricalwind/greycat,Neoskai/greycat,datathings/greycat,electricalwind/greycat,Neoskai/greycat,datathings/greycat,Neoskai/greycat,datathings/greycat,Neoskai/greycat,electricalwind/greycat,datathings/greycat,electricalwind/greycat
package ml.profiling; import org.junit.Test; import org.mwg.Callback; import org.mwg.Graph; import org.mwg.GraphBuilder; import org.mwg.ml.algorithm.profiling.GaussianGmmNode; import org.mwg.core.NoopScheduler; import java.util.Random; /** * Created by assaad on 01/04/16. */ public class GaussianMixtureModelTest { @Test public void mixtureTest() { Graph graph = GraphBuilder.builder().withFactory(new GaussianGmmNode.Factory()).withScheduler(new NoopScheduler()).build(); graph.connect(new Callback<Boolean>() { @Override public void on(Boolean result) { double[] data = new double[3]; Random rand = new Random(); GaussianGmmNode node1 = (GaussianGmmNode) graph.newNode(0, 0, "GaussianGmm"); node1.configMixture(1, 100); for (int i = 0; i < 220; i++) { data[0] = 8 + rand.nextDouble() * 4; //avg =10, [8,12] data[1] = 90 + rand.nextDouble() * 20; //avg=100 [90,110] data[2] = -60 + rand.nextDouble() * 20; //avg=-50 [-60,-40] node1.setTrainingVector(data); node1.learn(new Callback<Boolean>() { @Override public void on(Boolean result) { } }); } } }); } }
plugins/ml/src/test/java/ml/profiling/GaussianMixtureModelTest.java
package ml.profiling; import org.junit.Test; import org.mwg.Callback; import org.mwg.Graph; import org.mwg.GraphBuilder; import org.mwg.ml.algorithm.profiling.GaussianGmmNode; import org.mwg.core.NoopScheduler; import java.util.Random; /** * Created by assaad on 01/04/16. */ public class GaussianMixtureModelTest { @Test public void mixtureTest() { Graph graph = GraphBuilder.builder().withFactory(new GaussianGmmNode.Factory()).withScheduler(new NoopScheduler()).build(); graph.connect(new Callback<Boolean>() { @Override public void on(Boolean result) { double[] data = new double[3]; Random rand = new Random(); GaussianGmmNode node1 = (GaussianGmmNode) graph.newNode(0, 0, "GaussianGmm"); node1.configMixture(1, 100); for (int i = 0; i < 220; i++) { data[0] = 8 + rand.nextDouble() * 4; //avg =10, [8,12] data[1] = 90 + rand.nextDouble() * 20; //avg=100 [90,110] data[2] = -60 + rand.nextDouble() * 20; //avg=-50 [-60,-40] node1.learn(new Callback<Boolean>() { @Override public void on(Boolean result) { } }); } } }); } }
update test
plugins/ml/src/test/java/ml/profiling/GaussianMixtureModelTest.java
update test
Java
apache-2.0
b8cb7b4fc80b284e34820e8042960f019bbd5b97
0
realityforge/arez,realityforge/arez,realityforge/arez
package arez; import arez.component.TypeBasedLocator; import arez.spy.ActionCompleteEvent; import arez.spy.ActionStartEvent; import arez.spy.ComponentCreateStartEvent; import arez.spy.ComponentInfo; import arez.spy.ComputableValueCreateEvent; import arez.spy.ComputeCompleteEvent; import arez.spy.ComputeStartEvent; import arez.spy.ObservableValueChangeEvent; import arez.spy.ObservableValueCreateEvent; import arez.spy.ObserveCompleteEvent; import arez.spy.ObserveScheduleEvent; import arez.spy.ObserveStartEvent; import arez.spy.ObserverCreateEvent; import arez.spy.ObserverErrorEvent; import arez.spy.ObserverInfo; import arez.spy.Priority; import arez.spy.PropertyAccessor; import arez.spy.PropertyMutator; import arez.spy.TaskCompleteEvent; import arez.spy.TaskStartEvent; import arez.spy.TransactionCompleteEvent; import arez.spy.TransactionStartEvent; import java.io.IOException; import java.security.AccessControlException; import java.util.ArrayList; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nonnull; import org.realityforge.guiceyloops.shared.ValueUtil; import org.testng.annotations.Test; import static org.testng.Assert.*; @SuppressWarnings( "Duplicates" ) public class ArezContextTest extends AbstractArezTest { @Test public void generateName() { final ArezContext context = Arez.context(); // Use passed in name assertEquals( context.generateName( "ComputableValue", "MyName" ), "MyName" ); //synthesize name context.setNextNodeId( 1 ); assertEquals( context.generateName( "ComputableValue", null ), "ComputableValue@1" ); assertEquals( context.getNextNodeId(), 2 ); ArezTestUtil.disableNames(); //Ignore name assertNull( context.generateName( "ComputableValue", "MyName" ) ); //Null name also fine assertNull( context.generateName( "ComputableValue", null ) ); } @Test public void triggerScheduler() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); context.observer( () -> { observeADependency(); callCount.incrementAndGet(); }, Flags.RUN_LATER ); assertEquals( callCount.get(), 0 ); context.triggerScheduler(); assertEquals( callCount.get(), 1 ); } @Test public void triggerScheduler_alreadyActive() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); context.observer( () -> { observeADependency(); callCount.incrementAndGet(); }, Flags.RUN_LATER ); assertEquals( callCount.get(), 0 ); context.markSchedulerAsActive(); context.triggerScheduler(); assertEquals( callCount.get(), 0 ); } @Test public void triggerScheduler_inEnvironment() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final AtomicReference<String> environment = new AtomicReference<>(); context.setEnvironment( new Environment() { @Override public <T> T run( @Nonnull final SafeFunction<T> function ) { environment.set( "RED" ); try { return function.call(); } finally { environment.set( null ); } } @Override public <T> T run( @Nonnull final Function<T> function ) throws Throwable { environment.set( "RED" ); try { return function.call(); } finally { environment.set( null ); } } } ); context.observer( () -> { observeADependency(); callCount.incrementAndGet(); assertEquals( environment.get(), "RED" ); }, Flags.RUN_LATER ); assertEquals( callCount.get(), 0 ); assertNull( environment.get() ); context.triggerScheduler(); assertEquals( callCount.get(), 1 ); assertNull( environment.get() ); } @Test public void triggerScheduler_inEnvironment_whereEnvironmentSchedulesActions() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final AtomicReference<String> environment = new AtomicReference<>(); final AtomicInteger count = new AtomicInteger( 3 ); final AtomicReference<Observer> observerReference = new AtomicReference<>(); context.setEnvironment( new Environment() { @Override public <T> T run( @Nonnull final SafeFunction<T> function ) { environment.set( "RED" ); T result = function.call(); /* * This simulates the scenario where something like react4j has only scheduler that will * react to changes in arez and potentially re-schedule arez events. */ if ( count.decrementAndGet() > 0 ) { context.safeAction( () -> observerReference.get().setState( Flags.STATE_STALE ), Flags.NO_VERIFY_ACTION_REQUIRED ); } environment.set( null ); return result; } @Override public <T> T run( @Nonnull final Function<T> function ) throws Throwable { environment.set( "RED" ); T result = function.call(); /* * This simulates the scenario where something like react4j has only scheduler that will * react to changes in arez and potentially re-schedule arez events. */ if ( count.decrementAndGet() > 0 ) { context.safeAction( () -> observerReference.get().setState( Flags.STATE_STALE ), Flags.NO_VERIFY_ACTION_REQUIRED ); } environment.set( null ); return result; } } ); final Observer observer = context.observer( () -> { final ObservableValue<Object> observableValue = Arez.context().observable(); observableValue.reportObserved(); callCount.incrementAndGet(); assertEquals( environment.get(), "RED" ); }, Flags.RUN_LATER ); observerReference.set( observer ); assertEquals( callCount.get(), 0 ); assertNull( environment.get() ); context.triggerScheduler(); assertEquals( callCount.get(), 3 ); assertEquals( count.get(), 0 ); assertNull( environment.get() ); } @Test public void isReadOnlyTransactionActive() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertFalse( context.isReadOnlyTransactionActive() ); context.action( () -> { assertTrue( context.isTransactionActive() ); assertFalse( context.isReadOnlyTransactionActive() ); observeADependency(); context.action( () -> { assertTrue( context.isTransactionActive() ); assertTrue( context.isReadOnlyTransactionActive() ); observeADependency(); }, Flags.READ_ONLY ); } ); assertFalse( context.isTransactionActive() ); assertFalse( context.isReadOnlyTransactionActive() ); } @Test public void isWriteTransactionActive() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertFalse( context.isReadWriteTransactionActive() ); context.action( () -> { assertTrue( context.isTransactionActive() ); assertTrue( context.isReadWriteTransactionActive() ); observeADependency(); context.action( () -> { assertTrue( context.isTransactionActive() ); assertFalse( context.isReadWriteTransactionActive() ); observeADependency(); }, Flags.READ_ONLY ); } ); assertFalse( context.isTransactionActive() ); assertFalse( context.isReadWriteTransactionActive() ); } @Test public void isTrackingTransactionActive() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertFalse( context.isReadOnlyTransactionActive() ); assertFalse( context.isReadWriteTransactionActive() ); assertFalse( context.isTrackingTransactionActive() ); context.action( () -> { assertTrue( context.isTransactionActive() ); assertFalse( context.isReadOnlyTransactionActive() ); assertTrue( context.isReadWriteTransactionActive() ); observeADependency(); } ); final Observer tracker = context.tracker( () -> assertFalse( context.isTrackingTransactionActive() ) ); context.observe( tracker, () -> { observeADependency(); assertTrue( context.isTransactionActive() ); assertTrue( context.isReadOnlyTransactionActive() ); assertFalse( context.isReadWriteTransactionActive() ); assertTrue( context.isTrackingTransactionActive() ); } ); assertFalse( context.isTransactionActive() ); assertFalse( context.isReadOnlyTransactionActive() ); assertFalse( context.isReadWriteTransactionActive() ); assertFalse( context.isTrackingTransactionActive() ); } @SuppressWarnings( "unused" ) @Test public void requireNewTransaction_false() throws Throwable { final ArezContext context = Arez.context(); context.action( () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); context.action( () -> assertNotEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); final int result1 = context.action( () -> { assertNotEquals( context.getTransaction(), transaction ); return 0; }, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); context.safeAction( () -> assertNotEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); final int result2 = context.safeAction( () -> { assertNotEquals( context.getTransaction(), transaction ); return 0; }, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); context.action( () -> assertEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED ); final int result3 = context.action( () -> { assertEquals( context.getTransaction(), transaction ); return 0; }, Flags.NO_VERIFY_ACTION_REQUIRED ); context.safeAction( () -> assertEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED ); final int result4 = context.safeAction( () -> { assertEquals( context.getTransaction(), transaction ); return 0; }, Flags.NO_VERIFY_ACTION_REQUIRED ); }, Flags.NO_VERIFY_ACTION_REQUIRED ); } @SuppressWarnings( "unused" ) @Test public void nestedAction_allowed() throws Throwable { final ArezContext context = Arez.context(); final AtomicInteger updateCalled = new AtomicInteger(); final Observer tracker = context.tracker( updateCalled::incrementAndGet, Flags.READ_WRITE | Flags.NESTED_ACTIONS_ALLOWED ); context.observe( tracker, () -> { observeADependency(); assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); context.action( () -> assertNotEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); final int result1 = context.action( () -> { assertNotEquals( context.getTransaction(), transaction ); return 0; }, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); context.safeAction( () -> assertNotEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); final int result2 = context.safeAction( () -> { assertNotEquals( context.getTransaction(), transaction ); return 0; }, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); context.action( () -> assertEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED ); } ); } @SuppressWarnings( "unused" ) @Test public void nestedAction_notAllowed() throws Throwable { final ArezContext context = Arez.context(); final AtomicInteger updateCalled = new AtomicInteger(); final Observer tracker = context.tracker( updateCalled::incrementAndGet, Flags.READ_WRITE | Flags.NESTED_ACTIONS_DISALLOWED ); context.observe( tracker, () -> { observeADependency(); assertInvariantFailure( () -> context.action( "A1", AbstractArezTest::observeADependency, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ), "Arez-0187: Attempting to nest action named 'A1' inside transaction named 'Observer@1' created by an observer that does not allow nested actions." ); assertInvariantFailure( () -> context.action( "A2", () -> 1, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ), "Arez-0187: Attempting to nest action named 'A2' inside transaction named 'Observer@1' created by an observer that does not allow nested actions." ); assertInvariantFailure( () -> context.safeAction( "A3", AbstractArezTest::observeADependency, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ), "Arez-0187: Attempting to nest action named 'A3' inside transaction named 'Observer@1' created by an observer that does not allow nested actions." ); assertInvariantFailure( () -> context.safeAction( "A4", () -> 1, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ), "Arez-0187: Attempting to nest action named 'A4' inside transaction named 'Observer@1' created by an observer that does not allow nested actions." ); } ); } @Test public void verifyActionFlags() { final Procedure executable = () -> { }; assertInvariantFailure( () -> Arez.context().action( executable, Flags.DEACTIVATE_ON_UNOBSERVE ), "Arez-0212: Flags passed to action 'Action@1' include some " + "unexpected flags set: " + Flags.DEACTIVATE_ON_UNOBSERVE ); } @Test public void verifyActionFlags_badEnvironmentFlags() { final Procedure executable = () -> { }; assertInvariantFailure( () -> Arez.context() .action( executable, Flags.ENVIRONMENT_REQUIRED | Flags.ENVIRONMENT_NOT_REQUIRED ), "Arez-0125: Flags passed to action 'Action@1' include both ENVIRONMENT_REQUIRED and ENVIRONMENT_NOT_REQUIRED." ); } @Test public void verifyActionFlags_badTransactionFlags() { final Procedure executable = () -> { }; assertInvariantFailure( () -> Arez.context() .action( executable, Flags.READ_ONLY | Flags.READ_WRITE ), "Arez-0126: Flags passed to action 'Action@1' include both READ_ONLY and READ_WRITE." ); } @Test public void verifyActionFlags_badVerifyAction() { final Procedure executable = () -> { }; assertInvariantFailure( () -> Arez.context() .action( executable, Flags.VERIFY_ACTION_REQUIRED | Flags.NO_VERIFY_ACTION_REQUIRED ), "Arez-0127: Flags passed to action 'Action@1' include both VERIFY_ACTION_REQUIRED and NO_VERIFY_ACTION_REQUIRED." ); } @Test public void action_function() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final String expectedValue = ValueUtil.randomString(); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); final String name = ValueUtil.randomString(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String v0 = context.action( name, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), name ); assertNull( transaction.getPrevious() ); assertEquals( transaction.getContext(), context ); assertEquals( transaction.getId(), nextNodeId ); assertFalse( transaction.isMutation() ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); //Not tracking so no state updated assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); return expectedValue; }, Flags.READ_ONLY, new Object[]{ param1, param2, param3 } ); assertFalse( context.isTransactionActive() ); assertEquals( v0, expectedValue ); //ObservableValue still not updated assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); assertEquals( observableValue.getObservers().size(), 0 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertNull( e.getThrowable() ); assertTrue( e.returnsResult() ); assertEquals( e.getResult(), v0 ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void action_NO_REPORT_RESULT() throws Throwable { final ArezContext context = Arez.context(); final ObservableValue<?> observableValue = context.observable(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.action( () -> { observableValue.reportObserved(); return ValueUtil.randomString(); }, Flags.NO_REPORT_RESULT ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class ); handler.assertNextEvent( TransactionStartEvent.class ); handler.assertNextEvent( TransactionCompleteEvent.class ); handler.assertNextEvent( ActionCompleteEvent.class, e -> assertNull( e.getResult() ) ); } @Test public void safeAction_NO_REPORT_RESULT() { final ArezContext context = Arez.context(); final ObservableValue<?> observableValue = context.observable(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.safeAction( () -> { observableValue.reportObserved(); return ValueUtil.randomString(); }, Flags.NO_REPORT_RESULT ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class ); handler.assertNextEvent( TransactionStartEvent.class ); handler.assertNextEvent( TransactionCompleteEvent.class ); handler.assertNextEvent( ActionCompleteEvent.class, e -> assertNull( e.getResult() ) ); } @Test public void action_function_throwsException() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final String name = ValueUtil.randomString(); final IOException ioException = new IOException(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); assertThrows( IOException.class, () -> context.action( name, () -> { throw ioException; }, 0, new Object[]{ param1, param2, param3 } ) ); assertFalse( context.isTransactionActive() ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertTrue( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertTrue( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertEquals( e.getThrowable(), ioException ); assertTrue( e.returnsResult() ); assertNull( e.getResult() ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void action_Environment_Required() throws Throwable { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.action( AbstractArezTest::observeADependency, Flags.ENVIRONMENT_REQUIRED ); assertEquals( inEnvironmentCallCount.get(), 1 ); } @Test public void action_Environment_Not_Required() throws Throwable { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.action( AbstractArezTest::observeADependency, Flags.ENVIRONMENT_NOT_REQUIRED ); assertEquals( inEnvironmentCallCount.get(), 0 ); } @Test public void action_Environment_Default() throws Throwable { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.action( AbstractArezTest::observeADependency ); assertEquals( inEnvironmentCallCount.get(), 0 ); } @Test public void safeAction_Environment_Required() { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.safeAction( AbstractArezTest::observeADependency, Flags.ENVIRONMENT_REQUIRED ); assertEquals( inEnvironmentCallCount.get(), 1 ); } @Test public void safeAction_Environment_Not_Required() { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.safeAction( AbstractArezTest::observeADependency, Flags.ENVIRONMENT_NOT_REQUIRED ); assertEquals( inEnvironmentCallCount.get(), 0 ); } @Test public void safeAction_Environment_Default() { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.safeAction( AbstractArezTest::observeADependency ); assertEquals( inEnvironmentCallCount.get(), 0 ); } @Test public void action_function_NameButNoMutationVariant() throws Throwable { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); context.action( name, () -> { observeADependency(); assertTrue( context.getTransaction().isMutation() ); assertEquals( context.getTransaction().getName(), name ); return ValueUtil.randomString(); } ); } @Test public void action_procedure_verifyActionRequired_false() throws Throwable { final Procedure executable = ValueUtil::randomString; Arez.context().action( executable, Flags.NO_VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void action_procedure_verifyActionRequired_true_butInvariantsDisabled() throws Throwable { ArezTestUtil.noCheckInvariants(); final Procedure executable = ValueUtil::randomString; Arez.context().action( executable, Flags.VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void action_procedure_verifyActionRequired_true() { final Procedure procedure = ValueUtil::randomString; assertInvariantFailure( () -> Arez.context().action( "X", procedure, Flags.VERIFY_ACTION_REQUIRED ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void action_procedure_verifyActionRequired_true_is_default() { assertInvariantFailure( () -> Arez.context().action( "X", (Procedure) ValueUtil::randomString ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void action_function_verifyActionRequired_false() throws Throwable { Arez.context().action( (Function<String>) ValueUtil::randomString, Flags.NO_VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void action_function_verifyActionRequired_true_butInvariantsDisabled() throws Throwable { ArezTestUtil.noCheckInvariants(); Arez.context().action( (Function<String>) ValueUtil::randomString, Flags.VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void action_function_verifyActionRequired_true() { final Function<String> function = ValueUtil::randomString; assertInvariantFailure( () -> Arez.context().action( "X", function, Flags.VERIFY_ACTION_REQUIRED ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void action_function_verifyActionRequired_true_is_default() { final Function<String> function = ValueUtil::randomString; assertInvariantFailure( () -> Arez.context().action( "X", function ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void safeAction_procedure_verifyActionRequired_false() { final SafeProcedure procedure = ValueUtil::randomString; Arez.context().safeAction( ValueUtil.randomString(), procedure, Flags.NO_VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void safeAction_procedure_verifyActionRequired_true_butInvariantsDisabled() { ArezTestUtil.noCheckInvariants(); final SafeProcedure executable = ValueUtil::randomString; Arez.context().safeAction( ValueUtil.randomString(), executable, Flags.VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void safeAction_procedure_verifyActionRequired_true() { final SafeProcedure procedure = ValueUtil::randomString; assertInvariantFailure( () -> Arez.context().safeAction( "X", procedure, Flags.VERIFY_ACTION_REQUIRED ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void safeAction_procedure_verifyActionRequired_true_is_default() { assertInvariantFailure( () -> Arez.context().safeAction( "X", (SafeProcedure) ValueUtil::randomString ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void safeAction_function_verifyActionRequired_false() { Arez.context().safeAction( (SafeFunction<String>) ValueUtil::randomString, Flags.NO_VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void safeAction_function_verifyActionRequired_true_butInvariantsDisabled() { ArezTestUtil.noCheckInvariants(); Arez.context().safeAction( (SafeFunction<String>) ValueUtil::randomString, Flags.VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void safeAction_function_verifyActionRequired_true() { final SafeFunction<String> function = ValueUtil::randomString; assertInvariantFailure( () -> Arez.context().safeAction( "X", function, Flags.VERIFY_ACTION_REQUIRED ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void safeAction_function_verifyActionRequired_true_is_default() { assertInvariantFailure( () -> Arez.context().safeAction( "X", (SafeFunction<String>) ValueUtil::randomString ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void action_function_minimalParameters() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final String expectedValue = ValueUtil.randomString(); final ObservableValue<Object> observableValue = context.observable(); final int nextNodeId = context.getNextNodeId(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String v0 = context.action( () -> { observableValue.reportObserved(); assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), "Action@" + nextNodeId ); assertTrue( transaction.isMutation() ); return expectedValue; } ); assertFalse( context.isTransactionActive() ); assertEquals( v0, expectedValue ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 0 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertTrue( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertTrue( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertNull( e.getThrowable() ); assertTrue( e.returnsResult() ); assertEquals( e.getResult(), v0 ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 0 ); } ); } @Test public void track_function() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final String expectedValue = ValueUtil.randomString(); final AtomicInteger callCount = new AtomicInteger(); final Observer tracker = context.tracker( callCount::incrementAndGet, Flags.READ_WRITE ); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String v0 = context.observe( tracker, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), tracker.getName() ); assertEquals( transaction.isMutation(), tracker.isMutation() ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); // Tracking so state updated final ArrayList<ObservableValue<?>> observableValues = transaction.getObservableValues(); assertNotNull( observableValues ); assertEquals( observableValues.size(), 1 ); assertEquals( observableValue.getObservers().size(), 0 ); assertEquals( observableValue.getLastTrackerTransactionId(), nextNodeId ); return expectedValue; }, new Object[]{ param1, param2, param3 } ); assertFalse( context.isTransactionActive() ); context.getSpy().removeSpyEventHandler( handler ); assertEquals( v0, expectedValue ); assertEquals( observableValue.getLastTrackerTransactionId(), 0 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); // Reaction not called as the function sets up initial tracking assertEquals( callCount.get(), 0 ); context.action( observableValue::reportChanged ); assertEquals( callCount.get(), 1 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertTrue( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertTrue( e.isMutation() ); final ObserverInfo info = e.getTracker(); assertNotNull( info ); assertEquals( info.getName(), tracker.getName() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertTrue( e.isMutation() ); final ObserverInfo info = e.getTracker(); assertNotNull( info ); assertEquals( info.getName(), tracker.getName() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertNull( e.getThrowable() ); assertTrue( e.returnsResult() ); assertEquals( e.getResult(), v0 ); assertTrue( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void observe_function_no_parameters() throws Throwable { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger callCount = new AtomicInteger(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); final Observer observer = context.tracker( callCount::incrementAndGet, Flags.AREZ_OR_NO_DEPENDENCIES | Flags.READ_WRITE ); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final int result = context.observe( observer, () -> { final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), observer.getName() ); return 23; } ); assertEquals( inEnvironmentCallCount.get(), 0 ); assertEquals( result, 23 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertTrue( e.isTracked() ); assertEquals( e.getParameters().length, 0 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertTrue( e.isMutation() ); final ObserverInfo info = e.getTracker(); assertNotNull( info ); assertEquals( info.getName(), observer.getName() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertTrue( e.isMutation() ); final ObserverInfo info = e.getTracker(); assertNotNull( info ); assertEquals( info.getName(), observer.getName() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertNull( e.getThrowable() ); assertTrue( e.returnsResult() ); assertEquals( e.getResult(), result ); assertTrue( e.isTracked() ); assertEquals( e.getParameters().length, 0 ); } ); } @Test public void observe_NO_REPORT_RESULT() throws Throwable { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.tracker( callCount::incrementAndGet, Flags.AREZ_OR_NO_DEPENDENCIES | Flags.NO_REPORT_RESULT ); assertTrue( observer.noReportResults() ); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final int result = context.observe( observer, () -> 23 ); assertEquals( result, 23 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class ); handler.assertNextEvent( TransactionStartEvent.class ); handler.assertNextEvent( TransactionCompleteEvent.class ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertTrue( e.returnsResult() ); assertNull( e.getResult() ); } ); } @Test public void observe_environment_Required() throws Throwable { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger callCount = new AtomicInteger(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); final Observer observer = context.tracker( callCount::incrementAndGet, Flags.AREZ_OR_NO_DEPENDENCIES | Flags.ENVIRONMENT_REQUIRED ); context.observe( observer, () -> 23 ); assertEquals( inEnvironmentCallCount.get(), 1 ); } @Test public void track_function_passingNonTracker() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( new CountAndObserveProcedure() ); assertInvariantFailure( () -> context.observe( observer, callCount::incrementAndGet ), "Arez-0017: Attempted to invoke observe(..) on observer named '" + observer.getName() + "' but observer is not configured to use an application executor." ); assertEquals( callCount.get(), 0 ); } @Test public void action_safeFunction() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final String expectedValue = ValueUtil.randomString(); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); final String name = ValueUtil.randomString(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String v0 = context.safeAction( name, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), name ); assertNull( transaction.getPrevious() ); assertEquals( transaction.getContext(), context ); assertEquals( transaction.getId(), nextNodeId ); assertFalse( transaction.isMutation() ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); //Not tracking so no state updated assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); return expectedValue; }, Flags.READ_ONLY, new Object[]{ param1, param2, param3 } ); assertFalse( context.isTransactionActive() ); assertEquals( v0, expectedValue ); //ObservableValue still not updated assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); assertEquals( observableValue.getObservers().size(), 0 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertNull( e.getThrowable() ); assertTrue( e.returnsResult() ); assertEquals( e.getResult(), v0 ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void action_safeFunction_throws_Exception() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final AccessControlException secException = new AccessControlException( "" ); final String name = ValueUtil.randomString(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); assertThrows( AccessControlException.class, () -> context.safeAction( name, () -> { throw secException; }, Flags.READ_ONLY, new Object[]{ param1, param2, param3 } ) ); assertFalse( context.isTransactionActive() ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertEquals( e.getThrowable(), secException ); assertTrue( e.returnsResult() ); assertNull( e.getResult() ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void action_safeFunction_minimalParameters() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final int nextNodeId = context.currentNextTransactionId(); final String expectedValue = ValueUtil.randomString(); final String v0 = context.safeAction( () -> { observeADependency(); assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), "Action@" + nextNodeId ); assertTrue( transaction.isMutation() ); return expectedValue; } ); assertFalse( context.isTransactionActive() ); assertEquals( v0, expectedValue ); } @Test public void action_safeFunction_NameButNoMutationVariant() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); context.safeAction( name, () -> { observeADependency(); assertTrue( context.getTransaction().isMutation() ); assertEquals( context.getTransaction().getName(), name ); return ValueUtil.randomString(); } ); } @Test public void track_safeFunction() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final String expectedValue = ValueUtil.randomString(); final AtomicInteger callCount = new AtomicInteger(); final Observer tracker = context.tracker( callCount::incrementAndGet ); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); final String v0 = context.safeObserve( tracker, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), tracker.getName() ); assertEquals( transaction.isMutation(), tracker.isMutation() ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); // Tracking so state updated final ArrayList<ObservableValue<?>> observableValues = transaction.getObservableValues(); assertNotNull( observableValues ); assertEquals( observableValues.size(), 1 ); assertEquals( observableValue.getObservers().size(), 0 ); assertEquals( observableValue.getLastTrackerTransactionId(), nextNodeId ); return expectedValue; } ); assertFalse( context.isTransactionActive() ); assertEquals( v0, expectedValue ); assertEquals( observableValue.getLastTrackerTransactionId(), 0 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); // Reaction not called as the function sets up initial tracking assertEquals( callCount.get(), 0 ); context.action( observableValue::reportChanged ); assertEquals( callCount.get(), 1 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); } @Test public void track_safeFunction_passingNonTracker() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( new CountAndObserveProcedure() ); assertInvariantFailure( () -> context.safeObserve( observer, callCount::incrementAndGet ), "Arez-0018: Attempted to invoke safeObserve(..) on observer named '" + observer.getName() + "' but observer is not configured to use an application executor." ); assertEquals( callCount.get(), 0 ); } @Test public void safeAction_safeProcedure_minimalParameters() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final int nextNodeId = context.getNextNodeId(); context.safeAction( () -> { observeADependency(); assertTrue( context.isTransactionActive() ); assertTrue( context.getTransaction().isMutation() ); assertEquals( context.getTransaction().getName(), "Action@" + nextNodeId ); } ); assertFalse( context.isTransactionActive() ); } @Test public void safeAction_safeProcedure_NameButNoMutationVariant() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); context.safeAction( name, () -> { observeADependency(); assertTrue( context.getTransaction().isMutation() ); assertEquals( context.getTransaction().getName(), name ); } ); } @Test public void action_safeProcedure_throws_Exception() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final AccessControlException secException = new AccessControlException( "" ); final String name = ValueUtil.randomString(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final SafeProcedure procedure = () -> { throw secException; }; assertThrows( AccessControlException.class, () -> context.safeAction( name, procedure, 0, new Object[]{ param1, param2, param3 } ) ); assertFalse( context.isTransactionActive() ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertTrue( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertTrue( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertEquals( e.getThrowable(), secException ); assertFalse( e.returnsResult() ); assertNull( e.getResult() ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void track_safeProcedure() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final AtomicInteger callCount = new AtomicInteger(); final Observer tracker = context.tracker( callCount::incrementAndGet ); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); context.safeObserve( tracker, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), tracker.getName() ); assertEquals( transaction.isMutation(), tracker.isMutation() ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); // Tracking so state updated final ArrayList<ObservableValue<?>> observableValues = transaction.getObservableValues(); assertNotNull( observableValues ); assertEquals( observableValues.size(), 1 ); assertEquals( observableValue.getObservers().size(), 0 ); assertEquals( observableValue.getLastTrackerTransactionId(), nextNodeId ); } ); assertFalse( context.isTransactionActive() ); assertEquals( observableValue.getLastTrackerTransactionId(), 0 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); // Reaction not called as the function sets up initial tracking assertEquals( callCount.get(), 0 ); context.action( observableValue::reportChanged ); assertEquals( callCount.get(), 1 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); } @Test public void track_safeProcedure_passingNonTracker() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( new CountAndObserveProcedure() ); final SafeProcedure procedure = callCount::incrementAndGet; assertInvariantFailure( () -> context.safeObserve( observer, procedure ), "Arez-0020: Attempted to invoke safeObserve(..) on observer named '" + observer.getName() + "' but observer is not configured to use an application executor." ); assertEquals( callCount.get(), 0 ); } @Test public void action_procedure_NameButNoMutationVariant() throws Throwable { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); context.action( name, () -> { observeADependency(); assertTrue( context.getTransaction().isMutation() ); assertEquals( context.getTransaction().getName(), name ); } ); } @Test public void action_procedure_minimalParameters() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final int nextNodeId = context.currentNextTransactionId(); context.action( () -> { observeADependency(); assertTrue( context.isTransactionActive() ); assertTrue( context.getTransaction().isMutation() ); assertEquals( context.getTransaction().getName(), "Action@" + nextNodeId ); } ); assertFalse( context.isTransactionActive() ); } @Test public void track_procedure_passingNonTracker() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( new CountAndObserveProcedure() ); final Procedure procedure = callCount::incrementAndGet; assertInvariantFailure( () -> context.observe( observer, procedure ), "Arez-0019: Attempted to invoke observe(..) on observer named '" + observer.getName() + "' but observer is not configured to use an application executor." ); assertEquals( callCount.get(), 0 ); } @Test public void track_procedure() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final AtomicInteger callCount = new AtomicInteger(); final Observer tracker = context.tracker( callCount::incrementAndGet ); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); context.observe( tracker, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), tracker.getName() ); assertEquals( transaction.isMutation(), tracker.isMutation() ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); // Tracking so state updated final ArrayList<ObservableValue<?>> observableValues = transaction.getObservableValues(); assertNotNull( observableValues ); assertEquals( observableValues.size(), 1 ); assertEquals( observableValue.getObservers().size(), 0 ); assertEquals( observableValue.getLastTrackerTransactionId(), nextNodeId ); } ); assertFalse( context.isTransactionActive() ); assertEquals( observableValue.getLastTrackerTransactionId(), 0 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); // Reaction not called as the function sets up initial tracking assertEquals( callCount.get(), 0 ); context.action( observableValue::reportChanged ); assertEquals( callCount.get(), 1 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); } @Test public void nonTrackingSafeProcedureObservingSingleObservable() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); final String name = ValueUtil.randomString(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.safeAction( name, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), name ); assertNull( transaction.getPrevious() ); assertEquals( transaction.getContext(), context ); assertEquals( transaction.getId(), nextNodeId ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); //Not tracking so no state updated assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); }, Flags.READ_ONLY, new Object[]{ param1, param2, param3 } ); assertFalse( context.isTransactionActive() ); //ObservableValue still not updated assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); assertEquals( observableValue.getObservers().size(), 0 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertNull( e.getThrowable() ); assertFalse( e.returnsResult() ); assertNull( e.getResult() ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void action_procedure() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); final String name = ValueUtil.randomString(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final ObservableValue<Object> observableValue1 = Arez.context().observable(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.action( name, () -> { observableValue1.reportObserved(); assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), name ); assertNull( transaction.getPrevious() ); assertEquals( transaction.getContext(), context ); assertEquals( transaction.getId(), nextNodeId ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); //Not tracking so no state updated assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); }, Flags.READ_ONLY, new Object[]{ param1, param2, param3 } ); assertFalse( context.isTransactionActive() ); //ObservableValue still not updated assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); assertEquals( observableValue.getObservers().size(), 0 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertNull( e.getThrowable() ); assertFalse( e.returnsResult() ); assertNull( e.getResult() ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void action_procedure_throwsException() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final String name = ValueUtil.randomString(); final IOException ioException = new IOException(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final ObservableValue observableValue = Arez.context().observable(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final Procedure procedure = () -> { observableValue.reportObserved(); throw ioException; }; assertThrows( IOException.class, () -> context.action( name, procedure, Flags.READ_ONLY, new Object[]{ param1, param2, param3 } ) ); assertFalse( context.isTransactionActive() ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertEquals( e.getThrowable(), ioException ); assertFalse( e.returnsResult() ); assertNull( e.getResult() ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void nestedProceduresAccessingSameObservable() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertThrows( context::getTransaction ); final int nextNodeId = context.currentNextTransactionId(); final String name = ValueUtil.randomString(); final String name2 = ValueUtil.randomString(); context.action( name, () -> { observeADependency(); assertTrue( context.isTransactionActive() ); final Transaction transaction1 = context.getTransaction(); assertEquals( transaction1.getName(), name ); assertNull( transaction1.getPrevious() ); assertEquals( transaction1.getContext(), context ); assertEquals( transaction1.getId(), nextNodeId ); assertTrue( transaction1.isRootTransaction() ); assertEquals( transaction1.getRootTransaction(), transaction1 ); context.action( name2, () -> { observeADependency(); assertTrue( context.isTransactionActive() ); final Transaction transaction2 = context.getTransaction(); assertEquals( transaction2.getName(), name2 ); assertEquals( transaction2.getPrevious(), transaction1 ); assertEquals( transaction2.getContext(), context ); assertEquals( transaction2.getId(), nextNodeId + 1 ); assertFalse( transaction2.isRootTransaction() ); assertEquals( transaction2.getRootTransaction(), transaction1 ); }, Flags.REQUIRE_NEW_TRANSACTION ); final Transaction transaction1b = context.getTransaction(); assertEquals( transaction1b.getName(), name ); assertNull( transaction1b.getPrevious() ); assertEquals( transaction1b.getContext(), context ); assertEquals( transaction1b.getId(), nextNodeId ); assertTrue( transaction1b.isRootTransaction() ); assertEquals( transaction1b.getRootTransaction(), transaction1b ); } ); assertFalse( context.isTransactionActive() ); } @Test public void nextNodeId() { final ArezContext context = Arez.context(); assertEquals( context.currentNextTransactionId(), 1 ); assertEquals( context.nextTransactionId(), 1 ); assertEquals( context.currentNextTransactionId(), 2 ); } @Test public void observer_with_onDepsUpdated() { final ArezContext context = Arez.context(); final ObservableValue<Object> observable = context.observable(); final AtomicInteger observedCallCount = new AtomicInteger(); final AtomicInteger onDepsChangeCallCount = new AtomicInteger(); final String name = ValueUtil.randomString(); context.observer( name, () -> { observedCallCount.incrementAndGet(); observable.reportObserved(); assertEquals( context.getTransaction().getName(), name ); }, onDepsChangeCallCount::incrementAndGet ); assertEquals( onDepsChangeCallCount.get(), 0 ); context.safeAction( observable::reportChanged ); assertEquals( onDepsChangeCallCount.get(), 1 ); } @Test public void observer_withComponent_and_onDepsUpdated() { final ArezContext context = Arez.context(); final ObservableValue<Object> observable = context.observable(); final AtomicInteger observeCallCount = new AtomicInteger(); final AtomicInteger onDepsChangeCallCount = new AtomicInteger(); final Component component = context.component( ValueUtil.randomString(), 22 ); final String name = ValueUtil.randomString(); final Observer observer = context.observer( component, name, () -> { observeCallCount.incrementAndGet(); observable.reportObserved(); assertEquals( context.getTransaction().getName(), name ); }, onDepsChangeCallCount::incrementAndGet ); assertEquals( onDepsChangeCallCount.get(), 0 ); final ComponentInfo componentInfo = observer.asInfo().getComponent(); assertNotNull( componentInfo ); assertEquals( componentInfo.getName(), component.getName() ); context.safeAction( observable::reportChanged ); assertEquals( onDepsChangeCallCount.get(), 1 ); } @Test public void observerErrorHandler() { final ArezContext context = Arez.context(); // Clear out handler added as part of test infrastructure context.getObserverErrorHandlerSupport().getObserverErrorHandlers().clear(); final ObserverError observerError = ObserverError.REACTION_ERROR; final Throwable throwable = new Throwable(); final Procedure action = new NoopProcedure(); final Observer observer = context.observer( ValueUtil.randomString(), action, Flags.READ_WRITE ); final AtomicInteger callCount = new AtomicInteger(); final ObserverErrorHandler handler = ( o, e, t ) -> { callCount.incrementAndGet(); assertEquals( o, observer ); assertEquals( e, observerError ); assertEquals( t, throwable ); }; context.addObserverErrorHandler( handler ); assertEquals( context.getObserverErrorHandlerSupport().getObserverErrorHandlers().size(), 1 ); assertTrue( context.getObserverErrorHandlerSupport().getObserverErrorHandlers().contains( handler ) ); assertEquals( callCount.get(), 0 ); context.reportObserverError( observer, observerError, throwable ); assertEquals( callCount.get(), 1 ); context.removeObserverErrorHandler( handler ); assertEquals( context.getObserverErrorHandlerSupport().getObserverErrorHandlers().size(), 0 ); context.reportObserverError( observer, observerError, throwable ); assertEquals( callCount.get(), 1 ); } @Test public void reportObserverError_when_spyEventHandler_present() { final ArezContext context = Arez.context(); // Clear out handler added as part of test infrastructure context.getObserverErrorHandlerSupport().getObserverErrorHandlers().clear(); final ObserverError observerError = ObserverError.REACTION_ERROR; final Throwable throwable = new Throwable(); final Procedure action = new NoopProcedure(); final Observer observer = context.observer( action ); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.reportObserverError( observer, observerError, throwable ); handler.assertEventCount( 1 ); handler.assertNextEvent( ObserverErrorEvent.class, event -> { assertEquals( event.getObserver().getName(), observer.getName() ); assertEquals( event.getError(), observerError ); assertEquals( event.getThrowable(), throwable ); } ); } @Test public void addObserverErrorHandler_whenDisabled() { ArezTestUtil.disableObserverErrorHandlers(); final ObserverErrorHandler handler = ( o, e, t ) -> { }; assertInvariantFailure( () -> Arez.context().addObserverErrorHandler( handler ), "Arez-0182: ArezContext.addObserverErrorHandler() invoked when Arez.areObserverErrorHandlersEnabled() returns false." ); } @Test public void removeObserverErrorHandler_whenDisabled() { ArezTestUtil.disableObserverErrorHandlers(); final ArezContext context = Arez.context(); final ObserverErrorHandler handler = ( o, e, t ) -> { }; assertInvariantFailure( () -> context.removeObserverErrorHandler( handler ), "Arez-0181: ArezContext.removeObserverErrorHandler() invoked when Arez.areObserverErrorHandlersEnabled() returns false." ); } @Test public void getSpy_whenSpiesDisabled() { ArezTestUtil.disableSpies(); final ArezContext context = Arez.context(); assertInvariantFailure( context::getSpy, "Arez-0021: Attempting to get Spy but spies are not enabled." ); } @Test public void scheduleReaction() { final ArezContext context = Arez.context(); final Observer observer = context.observer( new CountAndObserveProcedure() ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 0L ); context.scheduleReaction( observer ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 1L ); assertTrue( context.getTaskQueue().getOrderedTasks().anyMatch( o -> o == observer.getTask() ) ); } @Test public void scheduleReaction_shouldAbortInReadOnlyTransaction() { final ArezContext context = Arez.context(); final Observer observer = context.observer( new CountAndObserveProcedure() ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 0L ); assertInvariantFailure( () -> { final Procedure executable = () -> context.scheduleReaction( observer ); context.action( executable, Flags.READ_ONLY ); }, "Arez-0013: Observer named '" + observer.getName() + "' attempted to be scheduled " + "during read-only transaction." ); } @Test public void scheduleReaction_shouldAbortInReadWriteOwnedTransaction() { final ArezContext context = Arez.context(); final Observer derivation = context.computable( () -> "" ).getObserver(); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 0L ); setCurrentTransaction( derivation ); assertInvariantFailure( () -> context.scheduleReaction( derivation ), "Arez-0014: Observer named '" + derivation.getName() + "' attempted to schedule itself " + "during read-only tracking transaction. Observers that are supporting ComputableValue " + "instances must not schedule self." ); } @Test public void scheduleReaction_generates_spyEvent() { final ArezContext context = Arez.context(); final Observer observer = context.observer( new CountAndObserveProcedure() ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 0L ); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.scheduleReaction( observer ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 1L ); handler.assertEventCount( 1 ); handler.assertNextEvent( ObserveScheduleEvent.class, event -> assertEquals( event.getObserver().getName(), observer.getName() ) ); } @Test public void computableValue() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); final SafeFunction<String> function = () -> { observeADependency(); return ""; }; final Procedure onActivate = ValueUtil::randomString; final Procedure onDeactivate = ValueUtil::randomString; final Procedure onStale = ValueUtil::randomString; final ComputableValue<String> computableValue = context.computable( null, name, function, onActivate, onDeactivate, onStale, Flags.PRIORITY_HIGH ); assertEquals( computableValue.getName(), name ); assertEquals( computableValue.getContext(), context ); assertFalse( computableValue.getObserver().isKeepAlive() ); assertTrue( computableValue.getObserver().areArezDependenciesRequired() ); assertFalse( computableValue.getObserver().isEnvironmentRequired() ); assertEquals( computableValue.getObservableValue().getName(), name ); assertEquals( computableValue.getOnActivate(), onActivate ); assertEquals( computableValue.getOnDeactivate(), onDeactivate ); assertEquals( computableValue.getOnStale(), onStale ); assertEquals( computableValue.getObserver().getName(), name ); assertEquals( computableValue.getObserver().getTask().getPriority(), Priority.HIGH ); assertFalse( computableValue.getObserver().canObserveLowerPriorityDependencies() ); } @Test public void computable_with_NO_REPORT_RESULT() { final ArezContext context = Arez.context(); final ObservableValue<Object> observable = Arez.context().observable(); final SafeFunction<String> function = () -> { observable.reportObserved(); return ""; }; final ComputableValue<String> computableValue = context.computable( function, Flags.NO_REPORT_RESULT ); assertTrue( computableValue.getObserver().noReportResults() ); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.safeAction( computableValue::get ); handler.assertEventCount( 9 ); handler.assertNextEvent( ActionStartEvent.class ); handler.assertNextEvent( TransactionStartEvent.class ); handler.assertNextEvent( ComputeStartEvent.class ); handler.assertNextEvent( TransactionStartEvent.class ); handler.assertNextEvent( ObservableValueChangeEvent.class ); handler.assertNextEvent( TransactionCompleteEvent.class ); handler.assertNextEvent( ComputeCompleteEvent.class, e -> assertNull( e.getResult() ) ); handler.assertNextEvent( TransactionCompleteEvent.class ); handler.assertNextEvent( ActionCompleteEvent.class ); } @Test public void computableValue_withComponent() { final ArezContext context = Arez.context(); final Component component = context.component( ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString() ); final String name = ValueUtil.randomString(); final ComputableValue<String> computableValue = context.computable( component, name, () -> "", null, null, null ); assertEquals( computableValue.getName(), name ); assertEquals( computableValue.getComponent(), component ); } @Test public void computableValue_Environment_Required() { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); final SafeFunction<String> function = () -> { observeADependency(); return ""; }; final ComputableValue<String> computableValue = context.computable( function, Flags.ENVIRONMENT_REQUIRED ); assertTrue( computableValue.getObserver().isEnvironmentRequired() ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.safeAction( computableValue::get ); assertEquals( inEnvironmentCallCount.get(), 1 ); } @Test public void computableValue_Environment_NotRequired() { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); final SafeFunction<String> function = () -> { observeADependency(); return ""; }; final ComputableValue<String> computableValue = context.computable( function, Flags.ENVIRONMENT_NOT_REQUIRED ); assertFalse( computableValue.getObserver().isEnvironmentRequired() ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.safeAction( computableValue::get ); assertEquals( inEnvironmentCallCount.get(), 0 ); } @Test public void computableValue_canObserveLowerPriorityDependencies() { final ComputableValue<String> computableValue = Arez.context().computable( () -> "", Flags.OBSERVE_LOWER_PRIORITY_DEPENDENCIES ); assertTrue( computableValue.getObserver().canObserveLowerPriorityDependencies() ); } @Test public void computableValue_mayNotAccessArezState() { final ArezContext context = Arez.context(); assertFalse( context.computable( () -> "", Flags.AREZ_OR_NO_DEPENDENCIES ) .getObserver() .areArezDependenciesRequired() ); assertFalse( context.computable( () -> "", Flags.AREZ_OR_EXTERNAL_DEPENDENCIES ) .getObserver() .areArezDependenciesRequired() ); } @Test public void computableValue_withKeepAliveAndRunImmediately() { final ArezContext context = Arez.context(); final AtomicInteger calls = new AtomicInteger(); final SafeFunction<String> action = () -> { observeADependency(); calls.incrementAndGet(); return ""; }; final ComputableValue<String> computableValue = context.computable( action, Flags.KEEPALIVE | Flags.RUN_NOW ); assertTrue( computableValue.getObserver().isKeepAlive() ); assertEquals( calls.get(), 1 ); } @Test public void computableValue_withKeepAliveAndNoRunImmediately() { final ArezContext context = Arez.context(); final AtomicInteger calls = new AtomicInteger(); final SafeFunction<String> action = () -> { observeADependency(); calls.incrementAndGet(); return ""; }; final ComputableValue<String> computableValue = context.computable( action, Flags.KEEPALIVE | Flags.RUN_LATER ); assertTrue( computableValue.getObserver().isKeepAlive() ); assertEquals( calls.get(), 0 ); context.triggerScheduler(); assertEquals( calls.get(), 1 ); } @Test public void computableValue_pass_no_hooks() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); final SafeFunction<String> function = () -> { observeADependency(); return ""; }; final ComputableValue<String> computableValue = context.computable( name, function ); assertEquals( computableValue.getName(), name ); assertEquals( computableValue.getContext(), context ); assertEquals( computableValue.getObserver().getName(), name ); assertEquals( computableValue.getObservableValue().getName(), name ); assertNull( computableValue.getOnActivate() ); assertNull( computableValue.getOnDeactivate() ); assertNull( computableValue.getOnStale() ); assertEquals( computableValue.getObserver().getTask().getPriority(), Priority.NORMAL ); } @Test public void computableValue_minimumParameters() { final ArezContext context = Arez.context(); context.setNextNodeId( 22 ); final SafeFunction<String> function = () -> { observeADependency(); return ""; }; final ComputableValue<String> computableValue = context.computable( function ); final String name = "ComputableValue@22"; assertEquals( computableValue.getName(), name ); assertEquals( computableValue.getContext(), context ); assertEquals( computableValue.getObserver().getName(), name ); assertEquals( computableValue.getObservableValue().getName(), name ); assertNull( computableValue.getOnActivate() ); assertNull( computableValue.getOnDeactivate() ); assertNull( computableValue.getOnStale() ); assertEquals( computableValue.getObserver().getTask().getPriority(), Priority.NORMAL ); assertFalse( computableValue.getObserver().canObserveLowerPriorityDependencies() ); } @Test public void computableValue_generates_spyEvent() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final ComputableValue<String> computableValue = context.computable( ValueUtil.randomString(), () -> { observeADependency(); return ""; } ); handler.assertEventCount( 1 ); handler.assertNextEvent( ComputableValueCreateEvent.class, event -> assertEquals( event.getComputableValue().getName(), computableValue.getName() ) ); } @Test public void observer_noObservers() { setIgnoreObserverErrors( true ); Arez.context().setNextNodeId( 22 ); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = Arez.context().observer( callCount::incrementAndGet ); assertEquals( observer.getName(), "Observer@22" ); assertFalse( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_UP_TO_DATE ); assertEquals( observer.getTask().getPriority(), Priority.NORMAL ); assertEquals( callCount.get(), 1 ); assertEquals( getObserverErrors().size(), 1 ); assertEquals( getObserverErrors().get( 0 ), "Observer: Observer@22 Error: REACTION_ERROR java.lang.IllegalStateException: Arez-0172: Observer named 'Observer@22' that does not use an external executor completed observe function but is not observing any properties. As a result the observer will never be rescheduled." ); } @Test public void autorun_noObservers_manualReportStaleAllowed() { setIgnoreObserverErrors( true ); final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); context.observer( callCount::incrementAndGet, Flags.AREZ_OR_EXTERNAL_DEPENDENCIES ); assertEquals( callCount.get(), 1 ); // No observer errors even though the executable accesses no arez dependencies assertEquals( getObserverErrors().size(), 0 ); } @Test public void observer_minimumParameters() { final ArezContext context = Arez.context(); context.setNextNodeId( 22 ); final AtomicInteger callCount = new AtomicInteger(); final Procedure observe = () -> { observeADependency(); callCount.incrementAndGet(); }; final Observer observer = context.observer( observe ); assertNull( observer.getComponent() ); assertEquals( observer.getName(), "Observer@22" ); assertFalse( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_UP_TO_DATE ); assertEquals( observer.getTask().getPriority(), Priority.NORMAL ); assertFalse( observer.isComputableValue() ); assertFalse( observer.canObserveLowerPriorityDependencies() ); assertTrue( observer.isKeepAlive() ); assertFalse( observer.nestedActionsAllowed() ); assertNull( observer.getOnDepsChange() ); assertFalse( observer.isApplicationExecutor() ); assertEquals( observer.getObserve(), observe ); assertEquals( callCount.get(), 1 ); } @Test public void autorun_withComponent() { final ArezContext context = Arez.context(); final Component component = context.component( ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString() ); final String name = ValueUtil.randomString(); final Observer observer = context.observer( component, name, AbstractArezTest::observeADependency ); assertEquals( observer.getName(), name ); assertEquals( observer.getComponent(), component ); } @Test public void autorun_minimumParametersForMutation() { final ArezContext context = Arez.context(); context.setNextNodeId( 22 ); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( () -> { observeADependency(); callCount.incrementAndGet(); }, Flags.READ_WRITE ); assertEquals( observer.getName(), "Observer@22" ); assertTrue( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_UP_TO_DATE ); assertEquals( observer.getTask().getPriority(), Priority.NORMAL ); assertFalse( observer.nestedActionsAllowed() ); assertFalse( observer.supportsManualSchedule() ); assertEquals( callCount.get(), 1 ); } @SuppressWarnings( "ConstantConditions" ) @Test public void autorun_runImmediately() { final ArezContext context = Arez.context(); final ObservableValue<Object> observableValue = Arez.context().observable(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String name = ValueUtil.randomString(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( name, () -> { observableValue.reportObserved(); callCount.incrementAndGet(); }, Flags.READ_WRITE ); assertEquals( observer.getName(), name ); assertTrue( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_UP_TO_DATE ); assertEquals( observer.getTask().getPriority(), Priority.NORMAL ); assertFalse( observer.isApplicationExecutor() ); assertEquals( callCount.get(), 1 ); handler.assertEventCount( 8 ); handler.assertNextEvent( ObserverCreateEvent.class, e -> assertEquals( e.getObserver().getName(), name ) ); handler.assertNextEvent( ObserveScheduleEvent.class, e -> assertEquals( e.getObserver().getName(), name ) ); assertObserverReaction( handler, name ); } @SuppressWarnings( "ConstantConditions" ) @Test public void autorun_runImmediately_will_obeyNormalSchedulingPriorities() { final ArezContext context = Arez.context(); final ObservableValue<Object> observableValue = Arez.context().observable(); final Observer observer1 = context.observer( "O1", observableValue::reportObserved ); final Observer observer2 = context.observer( "O2", observableValue::reportObserved, Flags.PRIORITY_HIGH ); final Disposable schedulerLock = context.pauseScheduler(); // Trigger change that should schedule above observers context.safeAction( observableValue::reportChanged ); final Observer observer3 = context.observer( "O3", observableValue::reportObserved ); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); schedulerLock.dispose(); handler.assertEventCount( 6 * 3 ); assertObserverReaction( handler, observer2.getName() ); assertObserverReaction( handler, observer1.getName() ); assertObserverReaction( handler, observer3.getName() ); } private void assertObserverReaction( @Nonnull final TestSpyEventHandler handler, @Nonnull final String name ) { handler.assertNextEvent( ObserveStartEvent.class, e -> assertEquals( e.getObserver().getName(), name ) ); handler.assertNextEvent( ActionStartEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( TransactionStartEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( ActionCompleteEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( ObserveCompleteEvent.class, e -> assertEquals( e.getObserver().getName(), name ) ); } @Test public void autorun_highPriority() { final ArezContext context = Arez.context(); final Observer observer = context.observer( AbstractArezTest::observeADependency, Flags.PRIORITY_HIGH ); assertEquals( observer.getTask().getPriority(), Priority.HIGH ); } @Test public void autorun_canObserveLowerPriorityDependencies() { final ArezContext context = Arez.context(); final Observer observer = context.observer( AbstractArezTest::observeADependency, Flags.OBSERVE_LOWER_PRIORITY_DEPENDENCIES ); assertTrue( observer.canObserveLowerPriorityDependencies() ); } @Test public void autorun_nestedActionsAllowed() { final ArezContext context = Arez.context(); final Observer observer = context.observer( AbstractArezTest::observeADependency, Flags.NESTED_ACTIONS_ALLOWED ); assertTrue( observer.nestedActionsAllowed() ); } @Test public void observer_areArezDependenciesRequired() { final ArezContext context = Arez.context(); final Procedure observe = AbstractArezTest::observeADependency; assertFalse( context.observer( observe, Flags.AREZ_OR_EXTERNAL_DEPENDENCIES ).areArezDependenciesRequired() ); assertFalse( context.observer( observe, Flags.AREZ_OR_NO_DEPENDENCIES ).areArezDependenciesRequired() ); assertTrue( context.observer( observe, Flags.AREZ_DEPENDENCIES ).areArezDependenciesRequired() ); } @Test public void autorun_supportsManualSchedule() { final ArezContext context = Arez.context(); final Observer observer = context.observer( AbstractArezTest::observeADependency, ValueUtil::randomString ); assertTrue( observer.supportsManualSchedule() ); } @Test public void autorun_notRunImmediately() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String name = ValueUtil.randomString(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( name, () -> { observeADependency(); callCount.incrementAndGet(); }, Flags.RUN_LATER ); assertEquals( observer.getName(), name ); assertFalse( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_INACTIVE ); assertEquals( observer.getTask().getPriority(), Priority.NORMAL ); assertFalse( observer.isApplicationExecutor() ); assertEquals( callCount.get(), 0 ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 1L ); handler.assertEventCount( 2 ); handler.assertNextEvent( ObserverCreateEvent.class, e -> assertEquals( e.getObserver().getName(), observer.getName() ) ); handler.assertNextEvent( ObserveScheduleEvent.class, e -> assertEquals( e.getObserver().getName(), observer.getName() ) ); } @Test public void tracker() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String name = ValueUtil.randomString(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.tracker( null, name, callCount::incrementAndGet, Flags.PRIORITY_HIGH | Flags.OBSERVE_LOWER_PRIORITY_DEPENDENCIES | Flags.NESTED_ACTIONS_ALLOWED | Flags.AREZ_OR_NO_DEPENDENCIES ); assertEquals( observer.getName(), name ); assertFalse( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_INACTIVE ); assertNull( observer.getComponent() ); assertEquals( observer.getTask().getPriority(), Priority.HIGH ); assertTrue( observer.canObserveLowerPriorityDependencies() ); assertTrue( observer.isApplicationExecutor() ); assertTrue( observer.nestedActionsAllowed() ); assertFalse( observer.areArezDependenciesRequired() ); assertFalse( observer.supportsManualSchedule() ); assertEquals( callCount.get(), 0 ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 0L ); handler.assertEventCount( 1 ); handler.assertNextEvent( ObserverCreateEvent.class, e -> assertEquals( e.getObserver().getName(), observer.getName() ) ); } @Test public void tracker_withComponent() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); final AtomicInteger callCount = new AtomicInteger(); final Component component = context.component( ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString() ); final Observer observer = context.tracker( component, name, callCount::incrementAndGet ); assertEquals( observer.getName(), name ); assertEquals( observer.getComponent(), component ); assertEquals( observer.getTask().getPriority(), Priority.NORMAL ); assertFalse( observer.canObserveLowerPriorityDependencies() ); assertTrue( observer.isApplicationExecutor() ); } @Test public void tracker_minimalParameters() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final int nextNodeId = context.getNextNodeId(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.tracker( callCount::incrementAndGet ); assertEquals( observer.getName(), "Observer@" + nextNodeId ); assertFalse( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_INACTIVE ); assertFalse( observer.canObserveLowerPriorityDependencies() ); assertTrue( observer.isApplicationExecutor() ); assertFalse( observer.nestedActionsAllowed() ); assertTrue( observer.areArezDependenciesRequired() ); assertFalse( observer.supportsManualSchedule() ); assertEquals( callCount.get(), 0 ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 0L ); handler.assertEventCount( 1 ); handler.assertNextEvent( ObserverCreateEvent.class, e -> assertEquals( e.getObserver().getName(), observer.getName() ) ); } @Test public void observer_generates_spyEvent() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.pauseScheduler(); final Observer observer = context.observer( new CountingProcedure() ); handler.assertEventCount( 2 ); handler.assertNextEvent( ObserverCreateEvent.class, e -> assertEquals( e.getObserver().getName(), observer.getName() ) ); handler.assertNextEvent( ObserveScheduleEvent.class, e -> assertEquals( e.getObserver().getName(), observer.getName() ) ); } @Test public void createObservable_no_parameters() { final ArezContext context = Arez.context(); context.setNextNodeId( 22 ); final ObservableValue<?> observableValue = context.observable(); assertNotNull( observableValue.getName() ); assertEquals( observableValue.getName(), "ObservableValue@22" ); assertNull( observableValue.getAccessor() ); assertNull( observableValue.getMutator() ); } @Test public void createObservable() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); final ObservableValue<?> observableValue = context.observable( name ); assertEquals( observableValue.getName(), name ); assertNull( observableValue.getAccessor() ); assertNull( observableValue.getMutator() ); } @Test public void createObservable_withIntrospectors() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); final PropertyAccessor<String> accessor = () -> ""; final PropertyMutator<String> mutator = v -> { }; final ObservableValue<?> observableValue = context.observable( name, accessor, mutator ); assertEquals( observableValue.getName(), name ); assertEquals( observableValue.getAccessor(), accessor ); assertEquals( observableValue.getMutator(), mutator ); } @Test public void createObservable_withComponent() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); final Component component = context.component( ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString() ); final ObservableValue<String> observableValue = context.observable( component, name ); assertEquals( observableValue.getName(), name ); assertEquals( observableValue.getComponent(), component ); } @Test public void createObservable_spyEventHandlerPresent() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String name = ValueUtil.randomString(); final ObservableValue<?> observableValue = context.observable( name ); assertEquals( observableValue.getName(), name ); handler.assertEventCount( 1 ); handler.assertNextEvent( ObservableValueCreateEvent.class, e -> assertEquals( e.getObservableValue().getName(), observableValue.getName() ) ); } @Test public void createObservable_name_Null() { ArezTestUtil.disableNames(); final ArezContext context = Arez.context(); final ObservableValue<?> observableValue = context.observable( null ); assertNotNull( observableValue ); } @Test public void pauseScheduler() { final ArezContext context = Arez.context(); assertFalse( context.isSchedulerPaused() ); assertEquals( context.getSchedulerLockCount(), 0 ); final Disposable lock1 = context.pauseScheduler(); assertEquals( context.getSchedulerLockCount(), 1 ); assertTrue( context.isSchedulerPaused() ); final AtomicInteger callCount = new AtomicInteger(); // This would normally be scheduled and run now but scheduler should be paused context.observer( () -> { observeADependency(); callCount.incrementAndGet(); }, Flags.RUN_LATER ); context.triggerScheduler(); assertEquals( callCount.get(), 0 ); final Disposable lock2 = context.pauseScheduler(); assertEquals( context.getSchedulerLockCount(), 2 ); assertTrue( context.isSchedulerPaused() ); lock2.dispose(); assertEquals( context.getSchedulerLockCount(), 1 ); // Already disposed so this is a noop lock2.dispose(); assertEquals( context.getSchedulerLockCount(), 1 ); assertTrue( context.isSchedulerPaused() ); assertEquals( callCount.get(), 0 ); lock1.dispose(); assertEquals( context.getSchedulerLockCount(), 0 ); assertEquals( callCount.get(), 1 ); assertFalse( context.isSchedulerPaused() ); } @Test public void releaseSchedulerLock_whenNoLock() { assertInvariantFailure( () -> Arez.context().releaseSchedulerLock(), "Arez-0016: releaseSchedulerLock() reduced schedulerLockCount below 0." ); } @Test public void createComponent() { final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); final String name = ValueUtil.randomString(); assertFalse( context.isComponentPresent( type, id ) ); final Component component = context.component( type, id, name ); assertTrue( context.isComponentPresent( type, id ) ); assertEquals( component.getType(), type ); assertEquals( component.getId(), id ); assertEquals( component.getName(), name ); assertNull( component.getPreDispose() ); assertNull( component.getPostDispose() ); } @Test public void createComponent_includeDisposeHooks() { final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); final String name = ValueUtil.randomString(); assertFalse( context.isComponentPresent( type, id ) ); final SafeProcedure preDispose = () -> { }; final SafeProcedure postDispose = () -> { }; final Component component = context.component( type, id, name, preDispose, postDispose ); assertTrue( context.isComponentPresent( type, id ) ); assertEquals( component.getType(), type ); assertEquals( component.getId(), id ); assertEquals( component.getName(), name ); assertEquals( component.getPreDispose(), preDispose ); assertEquals( component.getPostDispose(), postDispose ); } @Test public void createComponent_synthesizeNameIfRequired() { final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); assertFalse( context.isComponentPresent( type, id ) ); final Component component = context.component( type, id ); assertTrue( context.isComponentPresent( type, id ) ); assertEquals( component.getType(), type ); assertEquals( component.getId(), id ); assertEquals( component.getName(), type + "@" + id ); } @Test public void createComponent_spyEventHandlerPresent() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final Component component = context.component( ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString() ); handler.assertEventCount( 1 ); handler.assertNextEvent( ComponentCreateStartEvent.class, event -> assertEquals( event.getComponentInfo().getName(), component.getName() ) ); } @Test public void createComponent_nativeComponentsDisabled() { ArezTestUtil.disableNativeComponents(); final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); final String name = ValueUtil.randomString(); assertInvariantFailure( () -> context.component( type, id, name ), "Arez-0008: ArezContext.component() invoked when Arez.areNativeComponentsEnabled() returns false." ); } @Test public void createComponent_duplicateComponent() { final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); context.component( type, id, ValueUtil.randomString() ); assertTrue( context.isComponentPresent( type, id ) ); assertInvariantFailure( () -> context.component( type, id, ValueUtil.randomString() ), "Arez-0009: ArezContext.component() invoked for type '" + type + "' and id '" + id + "' but a component already exists for specified type+id." ); } @Test public void isComponentPresent_nativeComponentsDisabled() { ArezTestUtil.disableNativeComponents(); final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); assertInvariantFailure( () -> context.isComponentPresent( type, id ), "Arez-0135: ArezContext.isComponentPresent() invoked when Arez.areNativeComponentsEnabled() returns false." ); } @Test public void deregisterComponent_nativeComponentsDisabled() { ArezTestUtil.disableNativeComponents(); final ArezContext context = Arez.context(); final Component component = new Component( context, ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString(), null, null ); assertInvariantFailure( () -> context.deregisterComponent( component ), "Arez-0006: ArezContext.deregisterComponent() invoked when Arez.areNativeComponentsEnabled() returns false." ); } @Test public void deregisterComponent_componentMisalignment() { final ArezContext context = Arez.context(); final Component component = new Component( context, ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString(), null, null ); final Component component2 = context.component( component.getType(), component.getId(), ValueUtil.randomString() ); assertInvariantFailure( () -> context.deregisterComponent( component ), "Arez-0007: ArezContext.deregisterComponent() invoked for '" + component + "' but was unable to remove specified component from registry. " + "Actual component removed: " + component2 ); } @Test public void deregisterComponent_removesTypeIfLastOfType() { final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final Component component = context.component( type, ValueUtil.randomString(), ValueUtil.randomString() ); final Component component2 = context.component( type, ValueUtil.randomString(), ValueUtil.randomString() ); assertEquals( context.findAllComponentTypes().size(), 1 ); assertTrue( context.findAllComponentTypes().contains( type ) ); context.deregisterComponent( component ); assertEquals( context.findAllComponentTypes().size(), 1 ); assertTrue( context.findAllComponentTypes().contains( type ) ); context.deregisterComponent( component2 ); assertEquals( context.findAllComponentTypes().size(), 0 ); } @Test public void component_finders() { final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id1 = ValueUtil.randomString(); final String id2 = ValueUtil.randomString(); assertEquals( context.findAllComponentTypes().size(), 0 ); assertEquals( context.findAllComponentsByType( type ).size(), 0 ); final Component component = context.component( type, id1, ValueUtil.randomString() ); assertEquals( context.findAllComponentTypes().size(), 1 ); assertTrue( context.findAllComponentTypes().contains( type ) ); assertEquals( context.findAllComponentsByType( ValueUtil.randomString() ).size(), 0 ); assertEquals( context.findAllComponentsByType( type ).size(), 1 ); assertTrue( context.findAllComponentsByType( type ).contains( component ) ); final Component component2 = context.component( type, id2, ValueUtil.randomString() ); assertEquals( context.findAllComponentTypes().size(), 1 ); assertTrue( context.findAllComponentTypes().contains( type ) ); assertEquals( context.findAllComponentsByType( ValueUtil.randomString() ).size(), 0 ); assertEquals( context.findAllComponentsByType( type ).size(), 2 ); assertTrue( context.findAllComponentsByType( type ).contains( component ) ); assertTrue( context.findAllComponentsByType( type ).contains( component2 ) ); assertEquals( context.findComponent( type, id1 ), component ); assertEquals( context.findComponent( type, id2 ), component2 ); assertNull( context.findComponent( type, ValueUtil.randomString() ) ); assertNull( context.findComponent( ValueUtil.randomString(), id2 ) ); } @Test public void findComponent_nativeComponentsDisabled() { ArezTestUtil.disableNativeComponents(); final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); assertInvariantFailure( () -> context.findComponent( type, id ), "Arez-0010: ArezContext.findComponent() invoked when Arez.areNativeComponentsEnabled() returns false." ); } @Test public void findAllComponentsByType_nativeComponentsDisabled() { ArezTestUtil.disableNativeComponents(); final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); assertInvariantFailure( () -> context.findAllComponentsByType( type ), "Arez-0011: ArezContext.findAllComponentsByType() invoked when Arez.areNativeComponentsEnabled() returns false." ); } @Test public void findAllComponentTypes_nativeComponentsDisabled() { ArezTestUtil.disableNativeComponents(); final ArezContext context = Arez.context(); assertInvariantFailure( context::findAllComponentTypes, "Arez-0012: ArezContext.findAllComponentTypes() invoked when Arez.areNativeComponentsEnabled() returns false." ); } @Test public void registryAccessWhenDisabled() { ArezTestUtil.disableRegistries(); final ArezContext context = Arez.context(); final ObservableValue<Object> observableValue = context.observable(); final ComputableValue<String> computableValue = context.computable( () -> "" ); final Observer observer = context.observer( AbstractArezTest::observeADependency ); final Task task = context.task( ValueUtil::randomString ); assertInvariantFailure( () -> context.registerObservableValue( observableValue ), "Arez-0022: ArezContext.registerObservableValue invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.deregisterObservableValue( observableValue ), "Arez-0024: ArezContext.deregisterObservableValue invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( context::getTopLevelObservables, "Arez-0026: ArezContext.getTopLevelObservables() invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.registerObserver( observer ), "Arez-0027: ArezContext.registerObserver invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.deregisterObserver( observer ), "Arez-0029: ArezContext.deregisterObserver invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( context::getTopLevelObservers, "Arez-0031: ArezContext.getTopLevelObservers() invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.registerComputableValue( computableValue ), "Arez-0032: ArezContext.registerComputableValue invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.deregisterComputableValue( computableValue ), "Arez-0034: ArezContext.deregisterComputableValue invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( context::getTopLevelComputableValues, "Arez-0036: ArezContext.getTopLevelComputableValues() invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.registerTask( task ), "Arez-0214: ArezContext.registerTask invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.deregisterTask( task ), "Arez-0226: ArezContext.deregisterTask invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( context::getTopLevelTasks, "Arez-0228: ArezContext.getTopLevelTasks() invoked when Arez.areRegistriesEnabled() returns false." ); } @Test public void observableRegistry() { final ArezContext context = Arez.context(); final ObservableValue<Object> observableValue = context.observable(); assertEquals( context.getTopLevelObservables().size(), 1 ); assertEquals( context.getTopLevelObservables().get( observableValue.getName() ), observableValue ); assertInvariantFailure( () -> context.registerObservableValue( observableValue ), "Arez-0023: ArezContext.registerObservableValue invoked with observableValue named '" + observableValue.getName() + "' but an existing observableValue with that name is " + "already registered." ); assertEquals( context.getTopLevelObservables().size(), 1 ); context.getTopLevelObservables().clear(); assertEquals( context.getTopLevelObservables().size(), 0 ); assertInvariantFailure( () -> context.deregisterObservableValue( observableValue ), "Arez-0025: ArezContext.deregisterObservableValue invoked with observableValue named '" + observableValue.getName() + "' but no observableValue with that name is registered." ); } @Test public void observerRegistry() { final ArezContext context = Arez.context(); final Observer observer = context.observer( AbstractArezTest::observeADependency ); assertEquals( context.getTopLevelObservers().size(), 1 ); assertEquals( context.getTopLevelObservers().get( observer.getName() ), observer ); assertInvariantFailure( () -> context.registerObserver( observer ), "Arez-0028: ArezContext.registerObserver invoked with observer named '" + observer.getName() + "' but an existing observer with that name is " + "already registered." ); assertEquals( context.getTopLevelObservers().size(), 1 ); context.getTopLevelObservers().clear(); assertEquals( context.getTopLevelObservers().size(), 0 ); assertInvariantFailure( () -> context.deregisterObserver( observer ), "Arez-0030: ArezContext.deregisterObserver invoked with observer named '" + observer.getName() + "' but no observer with that name is registered." ); } @Test public void computableValueRegistry() { final ArezContext context = Arez.context(); final ComputableValue computableValue = context.computable( () -> "" ); assertEquals( context.getTopLevelComputableValues().size(), 1 ); assertEquals( context.getTopLevelComputableValues().get( computableValue.getName() ), computableValue ); assertInvariantFailure( () -> context.registerComputableValue( computableValue ), "Arez-0033: ArezContext.registerComputableValue invoked with ComputableValue " + "named '" + computableValue.getName() + "' but an existing ComputableValue with that " + "name is already registered." ); assertEquals( context.getTopLevelComputableValues().size(), 1 ); context.getTopLevelComputableValues().clear(); assertEquals( context.getTopLevelComputableValues().size(), 0 ); assertInvariantFailure( () -> context.deregisterComputableValue( computableValue ), "Arez-0035: ArezContext.deregisterComputableValue invoked with " + "ComputableValue named '" + computableValue.getName() + "' but no ComputableValue " + "with that name is registered." ); } @Test public void taskRegistry() { final ArezContext context = Arez.context(); final Task task = context.task( ValueUtil::randomString ); assertEquals( context.getTopLevelTasks().size(), 1 ); assertEquals( context.getTopLevelTasks().get( task.getName() ), task ); assertInvariantFailure( () -> context.registerTask( task ), "Arez-0225: ArezContext.registerTask invoked with Task named '" + task.getName() + "' but an existing Task with that name is already registered." ); assertEquals( context.getTopLevelTasks().size(), 1 ); context.getTopLevelTasks().clear(); assertEquals( context.getTopLevelTasks().size(), 0 ); assertInvariantFailure( () -> context.deregisterTask( task ), "Arez-0227: ArezContext.deregisterTask invoked with Task named '" + task.getName() + "' but no Task with that name is registered." ); } @Test public void computedValueNotPopulateOtherTopLevelRegistries() { final ArezContext context = Arez.context(); final ComputableValue computableValue = context.computable( () -> "" ); assertEquals( context.getTopLevelComputableValues().size(), 1 ); assertEquals( context.getTopLevelComputableValues().get( computableValue.getName() ), computableValue ); assertEquals( context.getTopLevelTasks().size(), 0 ); assertEquals( context.getTopLevelObservers().size(), 0 ); assertEquals( context.getTopLevelObservables().size(), 0 ); } @Test public void observersNotPopulateOtherTopLevelRegistries() { final ArezContext context = Arez.context(); final Observer observer = context.observer( ValueUtil::randomString, Flags.AREZ_OR_NO_DEPENDENCIES ); assertEquals( context.getTopLevelObservers().size(), 1 ); assertEquals( context.getTopLevelObservers().get( observer.getName() ), observer ); assertEquals( context.getTopLevelTasks().size(), 0 ); assertEquals( context.getTopLevelComputableValues().size(), 0 ); assertEquals( context.getTopLevelObservables().size(), 0 ); } @Test public void scheduleDispose() { final ArezContext context = Arez.context(); final MultiPriorityTaskQueue queue = context.getTaskQueue(); final Observer observer = Arez.context().observer( new CountAndObserveProcedure() ); assertEquals( queue.getOrderedTasks().count(), 0L ); // Pause scheduler so that the task is not invoked immediately final Disposable schedulerLock = context.pauseScheduler(); final String name = observer.getName() + ".dispose"; context.scheduleDispose( name, observer ); assertEquals( queue.getOrderedTasks().count(), 1L ); final CircularBuffer<Task> buffer = queue.getBufferByPriority( 0 ); assertEquals( buffer.size(), 1 ); final Task task = buffer.get( 0 ); assertNotNull( task ); assertEquals( task.getName(), name ); // Ensure that the scheduled dispose is actually in the top level registry assertEquals( context.getTopLevelTasks().size(), 1 ); assertEquals( context.getTopLevelTasks().get( name ), task ); assertFalse( task.isDisposed() ); assertFalse( observer.isDisposed() ); schedulerLock.dispose(); assertTrue( task.isDisposed() ); assertTrue( observer.isDisposed() ); assertEquals( queue.getOrderedTasks().count(), 0L ); assertEquals( context.getTopLevelTasks().size(), 0 ); } @Test public void scheduleDispose_withNoNameWhenNamesEnabled() { final ArezContext context = Arez.context(); final MultiPriorityTaskQueue queue = context.getTaskQueue(); final Observer observer = Arez.context().observer( new CountAndObserveProcedure() ); assertEquals( queue.getOrderedTasks().count(), 0L ); // Pause scheduler so that the task stays in the queue context.pauseScheduler(); context.scheduleDispose( null, observer ); assertEquals( queue.getOrderedTasks().count(), 1L ); final CircularBuffer<Task> buffer = queue.getBufferByPriority( 0 ); assertEquals( buffer.size(), 1 ); final Task task = buffer.get( 0 ); assertNotNull( task ); assertEquals( task.getName(), "Dispose@3" ); // Ensure that the scheduled dispose is actually in the top level registry assertEquals( context.getTopLevelTasks().size(), 1 ); assertEquals( context.getTopLevelTasks().get( "Dispose@3" ), task ); } @Test public void locator() { final ArezContext context = Arez.context(); final Locator locator = context.locator(); assertNotNull( locator ); assertNull( locator.findById( String.class, "21" ) ); final TypeBasedLocator worker = new TypeBasedLocator(); worker.registerLookup( String.class, String::valueOf ); final Disposable disposable = context.registerLocator( worker ); assertEquals( locator.findById( String.class, "21" ), "21" ); disposable.dispose(); assertNull( locator.findById( String.class, "21" ) ); } @Test public void locator_referencesDisabled() { ArezTestUtil.disableReferences(); ArezTestUtil.resetState(); assertInvariantFailure( () -> Arez.context().locator(), "Arez-0192: ArezContext.locator() invoked but Arez.areReferencesEnabled() returned false." ); } @Test public void registerLocator_referencesDisabled() { ArezTestUtil.disableReferences(); ArezTestUtil.resetState(); assertInvariantFailure( () -> Arez.context().registerLocator( new TypeBasedLocator() ), "Arez-0191: ArezContext.registerLocator invoked but Arez.areReferencesEnabled() returned false." ); } @Test public void runInEnvironment() { final ArezContext context = Arez.context(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); final ObservableValue<Object> observable = context.observable(); final AtomicInteger observerCallCount = new AtomicInteger(); context.observer( () -> { observerCallCount.incrementAndGet(); observable.reportObserved(); } ); assertEquals( inEnvironmentCallCount.get(), 1 ); assertEquals( observerCallCount.get(), 1 ); } @Test public void runInEnvironment_nestedCallIgnored() throws Throwable { final ArezContext context = Arez.context(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); final ObservableValue<Object> observable = context.observable(); final AtomicInteger observer1CallCount = new AtomicInteger(); final AtomicInteger observer2CallCount = new AtomicInteger(); context.runInEnvironment( () -> { context.observer( () -> { observer1CallCount.incrementAndGet(); observable.reportObserved(); } ); context.observer( () -> { observer2CallCount.incrementAndGet(); observable.reportObserved(); } ); return null; } ); assertEquals( inEnvironmentCallCount.get(), 1 ); assertEquals( observer1CallCount.get(), 1 ); assertEquals( observer2CallCount.get(), 1 ); } @Test public void runInEnvironment_directNested() throws Throwable { final ArezContext context = Arez.context(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.runInEnvironment( () -> context.runInEnvironment( () -> context.runInEnvironment( () -> "" ) ) ); assertEquals( inEnvironmentCallCount.get(), 1 ); } @Test public void setEnvironment_whenEnvironmentsDisabled() { ArezTestUtil.disableEnvironments(); final ArezContext context = Arez.context(); assertInvariantFailure( () -> context.setEnvironment( new CountingEnvironment( new AtomicInteger() ) ), "Arez-0124: ArezContext.setEnvironment() invoked but Arez.areEnvironmentsEnabled() returned false." ); } @Test public void safeRunInEnvironment_directNested() { final ArezContext context = Arez.context(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.safeRunInEnvironment( () -> context.safeRunInEnvironment( () -> context.safeRunInEnvironment( () -> "" ) ) ); assertEquals( inEnvironmentCallCount.get(), 1 ); } @Test public void runInEnvironment_noEnvironment() throws Throwable { final ArezContext context = Arez.context(); context.runInEnvironment( () -> context.runInEnvironment( () -> context.runInEnvironment( () -> "" ) ) ); } @Test public void safeRunInEnvironment_noEnvironment() { final ArezContext context = Arez.context(); context.safeRunInEnvironment( () -> context.safeRunInEnvironment( () -> context.safeRunInEnvironment( () -> "" ) ) ); } @Test public void task() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final String name = ValueUtil.randomString(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final Disposable task = context.task( name, callCount::incrementAndGet, 0 ); assertEquals( ( (Task) task ).getName(), name ); assertEquals( callCount.get(), 1 ); assertFalse( ( (Task) task ).isQueued() ); assertFalse( task.isDisposed() ); handler.assertEventCount( 2 ); handler.assertNextEvent( TaskStartEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( TaskCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertNull( e.getThrowable() ); } ); handler.reset(); // This does nothing but just to make sure task.dispose(); assertEquals( callCount.get(), 1 ); assertFalse( ( (Task) task ).isQueued() ); assertTrue( task.isDisposed() ); handler.assertEventCount( 0 ); } @Test public void task_throwsException() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final String name = ValueUtil.randomString(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String errorMessage = "Blah Error!"; final SafeProcedure work = () -> { callCount.incrementAndGet(); throw new RuntimeException( errorMessage ); }; final Disposable task = context.task( name, work, 0 ); assertEquals( ( (Task) task ).getName(), name ); assertEquals( callCount.get(), 1 ); assertFalse( ( (Task) task ).isQueued() ); assertFalse( task.isDisposed() ); handler.assertEventCount( 2 ); handler.assertNextEvent( TaskStartEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( TaskCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertNotNull( e.getThrowable() ); assertEquals( e.getThrowable().getMessage(), errorMessage ); } ); } @Test public void task_minimalParameters() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final Disposable task = context.task( callCount::incrementAndGet ); final String name = "Task@1"; assertEquals( ( (Task) task ).getName(), name ); assertEquals( callCount.get(), 1 ); assertFalse( ( (Task) task ).isQueued() ); assertFalse( task.isDisposed() ); handler.assertEventCount( 2 ); handler.assertNextEvent( TaskStartEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( TaskCompleteEvent.class, e -> assertEquals( e.getName(), name ) ); } @Test public void task_RUN_LATER() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final Disposable task = context.task( null, callCount::incrementAndGet, Flags.RUN_LATER ); final String name = "Task@1"; assertEquals( ( (Task) task ).getName(), name ); assertEquals( callCount.get(), 0 ); assertTrue( ( (Task) task ).isQueued() ); assertFalse( task.isDisposed() ); handler.assertEventCount( 0 ); // Trigger scheduler and allow task to run context.triggerScheduler(); assertEquals( callCount.get(), 1 ); assertFalse( ( (Task) task ).isQueued() ); assertFalse( task.isDisposed() ); handler.assertEventCount( 2 ); handler.assertNextEvent( TaskStartEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( TaskCompleteEvent.class, e -> assertEquals( e.getName(), name ) ); } @Test public void task_different_PRIORITY() { final ArezContext context = Arez.context(); final ArrayList<String> calls = new ArrayList<>(); context.task( null, () -> calls.add( "1" ), Flags.RUN_LATER | Flags.PRIORITY_LOW ); context.task( null, () -> calls.add( "2" ), Flags.RUN_LATER | Flags.PRIORITY_HIGH ); context.task( null, () -> calls.add( "3" ), Flags.RUN_LATER ); context.task( null, () -> calls.add( "4" ), Flags.RUN_LATER | Flags.PRIORITY_HIGH ); context.task( null, () -> calls.add( "5" ), Flags.RUN_LATER | Flags.PRIORITY_HIGHEST ); context.task( null, () -> calls.add( "6" ), Flags.RUN_LATER | Flags.PRIORITY_LOWEST ); context.task( null, () -> calls.add( "7" ), Flags.RUN_LATER | Flags.PRIORITY_NORMAL ); // Trigger scheduler and allow tasks to run according to priority context.triggerScheduler(); assertEquals( String.join( ",", calls ), "5,2,4,3,7,1,6" ); } @Test public void task_bad_flags() { final ArezContext context = Arez.context(); assertInvariantFailure( () -> context.task( "MyTask", ValueUtil::randomString, Flags.REQUIRE_NEW_TRANSACTION ), "Arez-0224: Task named 'MyTask' passed invalid flags: " + Flags.REQUIRE_NEW_TRANSACTION ); } }
core/src/test/java/arez/ArezContextTest.java
package arez; import arez.component.TypeBasedLocator; import arez.spy.ActionCompleteEvent; import arez.spy.ActionStartEvent; import arez.spy.ComponentCreateStartEvent; import arez.spy.ComponentInfo; import arez.spy.ComputableValueCreateEvent; import arez.spy.ComputeCompleteEvent; import arez.spy.ComputeStartEvent; import arez.spy.ObservableValueChangeEvent; import arez.spy.ObservableValueCreateEvent; import arez.spy.ObserveCompleteEvent; import arez.spy.ObserveScheduleEvent; import arez.spy.ObserveStartEvent; import arez.spy.ObserverCreateEvent; import arez.spy.ObserverErrorEvent; import arez.spy.ObserverInfo; import arez.spy.Priority; import arez.spy.PropertyAccessor; import arez.spy.PropertyMutator; import arez.spy.TaskCompleteEvent; import arez.spy.TaskStartEvent; import arez.spy.TransactionCompleteEvent; import arez.spy.TransactionStartEvent; import java.io.IOException; import java.security.AccessControlException; import java.util.ArrayList; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nonnull; import org.realityforge.guiceyloops.shared.ValueUtil; import org.testng.annotations.Test; import static org.testng.Assert.*; @SuppressWarnings( "Duplicates" ) public class ArezContextTest extends AbstractArezTest { @Test public void generateName() { final ArezContext context = Arez.context(); // Use passed in name assertEquals( context.generateName( "ComputableValue", "MyName" ), "MyName" ); //synthesize name context.setNextNodeId( 1 ); assertEquals( context.generateName( "ComputableValue", null ), "ComputableValue@1" ); assertEquals( context.getNextNodeId(), 2 ); ArezTestUtil.disableNames(); //Ignore name assertNull( context.generateName( "ComputableValue", "MyName" ) ); //Null name also fine assertNull( context.generateName( "ComputableValue", null ) ); } @Test public void triggerScheduler() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); context.observer( () -> { observeADependency(); callCount.incrementAndGet(); }, Flags.RUN_LATER ); assertEquals( callCount.get(), 0 ); context.triggerScheduler(); assertEquals( callCount.get(), 1 ); } @Test public void triggerScheduler_alreadyActive() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); context.observer( () -> { observeADependency(); callCount.incrementAndGet(); }, Flags.RUN_LATER ); assertEquals( callCount.get(), 0 ); context.markSchedulerAsActive(); context.triggerScheduler(); assertEquals( callCount.get(), 0 ); } @Test public void triggerScheduler_inEnvironment() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final AtomicReference<String> environment = new AtomicReference<>(); context.setEnvironment( new Environment() { @Override public <T> T run( @Nonnull final SafeFunction<T> function ) { environment.set( "RED" ); try { return function.call(); } finally { environment.set( null ); } } @Override public <T> T run( @Nonnull final Function<T> function ) throws Throwable { environment.set( "RED" ); try { return function.call(); } finally { environment.set( null ); } } } ); context.observer( () -> { observeADependency(); callCount.incrementAndGet(); assertEquals( environment.get(), "RED" ); }, Flags.RUN_LATER ); assertEquals( callCount.get(), 0 ); assertNull( environment.get() ); context.triggerScheduler(); assertEquals( callCount.get(), 1 ); assertNull( environment.get() ); } @Test public void triggerScheduler_inEnvironment_whereEnvironmentSchedulesActions() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final AtomicReference<String> environment = new AtomicReference<>(); final AtomicInteger count = new AtomicInteger( 3 ); final AtomicReference<Observer> observerReference = new AtomicReference<>(); context.setEnvironment( new Environment() { @Override public <T> T run( @Nonnull final SafeFunction<T> function ) { environment.set( "RED" ); T result = function.call(); /* * This simulates the scenario where something like react4j has only scheduler that will * react to changes in arez and potentially re-schedule arez events. */ if ( count.decrementAndGet() > 0 ) { context.safeAction( () -> observerReference.get().setState( Flags.STATE_STALE ), Flags.NO_VERIFY_ACTION_REQUIRED ); } environment.set( null ); return result; } @Override public <T> T run( @Nonnull final Function<T> function ) throws Throwable { environment.set( "RED" ); T result = function.call(); /* * This simulates the scenario where something like react4j has only scheduler that will * react to changes in arez and potentially re-schedule arez events. */ if ( count.decrementAndGet() > 0 ) { context.safeAction( () -> observerReference.get().setState( Flags.STATE_STALE ), Flags.NO_VERIFY_ACTION_REQUIRED ); } environment.set( null ); return result; } } ); final Observer observer = context.observer( () -> { final ObservableValue<Object> observableValue = Arez.context().observable(); observableValue.reportObserved(); callCount.incrementAndGet(); assertEquals( environment.get(), "RED" ); }, Flags.RUN_LATER ); observerReference.set( observer ); assertEquals( callCount.get(), 0 ); assertNull( environment.get() ); context.triggerScheduler(); assertEquals( callCount.get(), 3 ); assertEquals( count.get(), 0 ); assertNull( environment.get() ); } @Test public void isReadOnlyTransactionActive() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertFalse( context.isReadOnlyTransactionActive() ); context.action( () -> { assertTrue( context.isTransactionActive() ); assertFalse( context.isReadOnlyTransactionActive() ); observeADependency(); context.action( () -> { assertTrue( context.isTransactionActive() ); assertTrue( context.isReadOnlyTransactionActive() ); observeADependency(); }, Flags.READ_ONLY ); } ); assertFalse( context.isTransactionActive() ); assertFalse( context.isReadOnlyTransactionActive() ); } @Test public void isWriteTransactionActive() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertFalse( context.isReadWriteTransactionActive() ); context.action( () -> { assertTrue( context.isTransactionActive() ); assertTrue( context.isReadWriteTransactionActive() ); observeADependency(); context.action( () -> { assertTrue( context.isTransactionActive() ); assertFalse( context.isReadWriteTransactionActive() ); observeADependency(); }, Flags.READ_ONLY ); } ); assertFalse( context.isTransactionActive() ); assertFalse( context.isReadWriteTransactionActive() ); } @Test public void isTrackingTransactionActive() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertFalse( context.isReadOnlyTransactionActive() ); assertFalse( context.isReadWriteTransactionActive() ); assertFalse( context.isTrackingTransactionActive() ); context.action( () -> { assertTrue( context.isTransactionActive() ); assertFalse( context.isReadOnlyTransactionActive() ); assertTrue( context.isReadWriteTransactionActive() ); observeADependency(); } ); final Observer tracker = context.tracker( () -> assertFalse( context.isTrackingTransactionActive() ) ); context.observe( tracker, () -> { observeADependency(); assertTrue( context.isTransactionActive() ); assertTrue( context.isReadOnlyTransactionActive() ); assertFalse( context.isReadWriteTransactionActive() ); assertTrue( context.isTrackingTransactionActive() ); } ); assertFalse( context.isTransactionActive() ); assertFalse( context.isReadOnlyTransactionActive() ); assertFalse( context.isReadWriteTransactionActive() ); assertFalse( context.isTrackingTransactionActive() ); } @SuppressWarnings( "unused" ) @Test public void requireNewTransaction_false() throws Throwable { final ArezContext context = Arez.context(); context.action( () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); context.action( () -> assertNotEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); final int result1 = context.action( () -> { assertNotEquals( context.getTransaction(), transaction ); return 0; }, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); context.safeAction( () -> assertNotEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); final int result2 = context.safeAction( () -> { assertNotEquals( context.getTransaction(), transaction ); return 0; }, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); context.action( () -> assertEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED ); final int result3 = context.action( () -> { assertEquals( context.getTransaction(), transaction ); return 0; }, Flags.NO_VERIFY_ACTION_REQUIRED ); context.safeAction( () -> assertEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED ); final int result4 = context.safeAction( () -> { assertEquals( context.getTransaction(), transaction ); return 0; }, Flags.NO_VERIFY_ACTION_REQUIRED ); }, Flags.NO_VERIFY_ACTION_REQUIRED ); } @SuppressWarnings( "unused" ) @Test public void nestedAction_allowed() throws Throwable { final ArezContext context = Arez.context(); final AtomicInteger updateCalled = new AtomicInteger(); final Observer tracker = context.tracker( updateCalled::incrementAndGet, Flags.READ_WRITE | Flags.NESTED_ACTIONS_ALLOWED ); context.observe( tracker, () -> { observeADependency(); assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); context.action( () -> assertNotEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); final int result1 = context.action( () -> { assertNotEquals( context.getTransaction(), transaction ); return 0; }, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); context.safeAction( () -> assertNotEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); final int result2 = context.safeAction( () -> { assertNotEquals( context.getTransaction(), transaction ); return 0; }, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ); context.action( () -> assertEquals( context.getTransaction(), transaction ), Flags.NO_VERIFY_ACTION_REQUIRED ); } ); } @SuppressWarnings( "unused" ) @Test public void nestedAction_notAllowed() throws Throwable { final ArezContext context = Arez.context(); final AtomicInteger updateCalled = new AtomicInteger(); final Observer tracker = context.tracker( updateCalled::incrementAndGet, Flags.READ_WRITE | Flags.NESTED_ACTIONS_DISALLOWED ); context.observe( tracker, () -> { observeADependency(); assertInvariantFailure( () -> context.action( "A1", AbstractArezTest::observeADependency, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ), "Arez-0187: Attempting to nest action named 'A1' inside transaction named 'Observer@1' created by an observer that does not allow nested actions." ); assertInvariantFailure( () -> context.action( "A2", () -> 1, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ), "Arez-0187: Attempting to nest action named 'A2' inside transaction named 'Observer@1' created by an observer that does not allow nested actions." ); assertInvariantFailure( () -> context.safeAction( "A3", AbstractArezTest::observeADependency, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ), "Arez-0187: Attempting to nest action named 'A3' inside transaction named 'Observer@1' created by an observer that does not allow nested actions." ); assertInvariantFailure( () -> context.safeAction( "A4", () -> 1, Flags.NO_VERIFY_ACTION_REQUIRED | Flags.REQUIRE_NEW_TRANSACTION ), "Arez-0187: Attempting to nest action named 'A4' inside transaction named 'Observer@1' created by an observer that does not allow nested actions." ); } ); } @Test public void verifyActionFlags() { final Procedure executable = () -> { }; assertInvariantFailure( () -> Arez.context().action( executable, Flags.DEACTIVATE_ON_UNOBSERVE ), "Arez-0212: Flags passed to action 'Action@1' include some " + "unexpected flags set: " + Flags.DEACTIVATE_ON_UNOBSERVE ); } @Test public void verifyActionFlags_badEnvironmentFlags() { final Procedure executable = () -> { }; assertInvariantFailure( () -> Arez.context() .action( executable, Flags.ENVIRONMENT_REQUIRED | Flags.ENVIRONMENT_NOT_REQUIRED ), "Arez-0125: Flags passed to action 'Action@1' include both ENVIRONMENT_REQUIRED and ENVIRONMENT_NOT_REQUIRED." ); } @Test public void verifyActionFlags_badTransactionFlags() { final Procedure executable = () -> { }; assertInvariantFailure( () -> Arez.context() .action( executable, Flags.READ_ONLY | Flags.READ_WRITE ), "Arez-0126: Flags passed to action 'Action@1' include both READ_ONLY and READ_WRITE." ); } @Test public void verifyActionFlags_badVerifyAction() { final Procedure executable = () -> { }; assertInvariantFailure( () -> Arez.context() .action( executable, Flags.VERIFY_ACTION_REQUIRED | Flags.NO_VERIFY_ACTION_REQUIRED ), "Arez-0127: Flags passed to action 'Action@1' include both VERIFY_ACTION_REQUIRED and NO_VERIFY_ACTION_REQUIRED." ); } @Test public void action_function() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final String expectedValue = ValueUtil.randomString(); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); final String name = ValueUtil.randomString(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String v0 = context.action( name, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), name ); assertNull( transaction.getPrevious() ); assertEquals( transaction.getContext(), context ); assertEquals( transaction.getId(), nextNodeId ); assertFalse( transaction.isMutation() ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); //Not tracking so no state updated assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); return expectedValue; }, Flags.READ_ONLY, new Object[]{ param1, param2, param3 } ); assertFalse( context.isTransactionActive() ); assertEquals( v0, expectedValue ); //ObservableValue still not updated assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); assertEquals( observableValue.getObservers().size(), 0 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertNull( e.getThrowable() ); assertTrue( e.returnsResult() ); assertEquals( e.getResult(), v0 ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void action_NO_REPORT_RESULT() throws Throwable { final ArezContext context = Arez.context(); final ObservableValue<?> observableValue = context.observable(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.action( () -> { observableValue.reportObserved(); return ValueUtil.randomString(); }, Flags.NO_REPORT_RESULT ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class ); handler.assertNextEvent( TransactionStartEvent.class ); handler.assertNextEvent( TransactionCompleteEvent.class ); handler.assertNextEvent( ActionCompleteEvent.class, e -> assertNull( e.getResult() ) ); } @Test public void safeAction_NO_REPORT_RESULT() { final ArezContext context = Arez.context(); final ObservableValue<?> observableValue = context.observable(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.safeAction( () -> { observableValue.reportObserved(); return ValueUtil.randomString(); }, Flags.NO_REPORT_RESULT ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class ); handler.assertNextEvent( TransactionStartEvent.class ); handler.assertNextEvent( TransactionCompleteEvent.class ); handler.assertNextEvent( ActionCompleteEvent.class, e -> assertNull( e.getResult() ) ); } @Test public void action_function_throwsException() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final String name = ValueUtil.randomString(); final IOException ioException = new IOException(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); assertThrows( IOException.class, () -> context.action( name, () -> { throw ioException; }, 0, new Object[]{ param1, param2, param3 } ) ); assertFalse( context.isTransactionActive() ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertTrue( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertTrue( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertEquals( e.getThrowable(), ioException ); assertTrue( e.returnsResult() ); assertNull( e.getResult() ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void action_Environment_Required() throws Throwable { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.action( AbstractArezTest::observeADependency, Flags.ENVIRONMENT_REQUIRED ); assertEquals( inEnvironmentCallCount.get(), 1 ); } @Test public void action_Environment_Not_Required() throws Throwable { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.action( AbstractArezTest::observeADependency, Flags.ENVIRONMENT_NOT_REQUIRED ); assertEquals( inEnvironmentCallCount.get(), 0 ); } @Test public void action_Environment_Default() throws Throwable { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.action( AbstractArezTest::observeADependency ); assertEquals( inEnvironmentCallCount.get(), 0 ); } @Test public void safeAction_Environment_Required() { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.safeAction( AbstractArezTest::observeADependency, Flags.ENVIRONMENT_REQUIRED ); assertEquals( inEnvironmentCallCount.get(), 1 ); } @Test public void safeAction_Environment_Not_Required() { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.safeAction( AbstractArezTest::observeADependency, Flags.ENVIRONMENT_NOT_REQUIRED ); assertEquals( inEnvironmentCallCount.get(), 0 ); } @Test public void safeAction_Environment_Default() { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.safeAction( AbstractArezTest::observeADependency ); assertEquals( inEnvironmentCallCount.get(), 0 ); } @Test public void action_function_NameButNoMutationVariant() throws Throwable { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); context.action( name, () -> { observeADependency(); assertTrue( context.getTransaction().isMutation() ); assertEquals( context.getTransaction().getName(), name ); return ValueUtil.randomString(); } ); } @Test public void action_procedure_verifyActionRequired_false() throws Throwable { final Procedure executable = ValueUtil::randomString; Arez.context().action( executable, Flags.NO_VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void action_procedure_verifyActionRequired_true_butInvariantsDisabled() throws Throwable { ArezTestUtil.noCheckInvariants(); final Procedure executable = ValueUtil::randomString; Arez.context().action( executable, Flags.VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void action_procedure_verifyActionRequired_true() { final Procedure procedure = ValueUtil::randomString; assertInvariantFailure( () -> Arez.context().action( "X", procedure, Flags.VERIFY_ACTION_REQUIRED ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void action_procedure_verifyActionRequired_true_is_default() { assertInvariantFailure( () -> Arez.context().action( "X", (Procedure) ValueUtil::randomString ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void action_function_verifyActionRequired_false() throws Throwable { Arez.context().action( (Function<String>) ValueUtil::randomString, Flags.NO_VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void action_function_verifyActionRequired_true_butInvariantsDisabled() throws Throwable { ArezTestUtil.noCheckInvariants(); Arez.context().action( (Function<String>) ValueUtil::randomString, Flags.VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void action_function_verifyActionRequired_true() { final Function<String> function = ValueUtil::randomString; assertInvariantFailure( () -> Arez.context().action( "X", function, Flags.VERIFY_ACTION_REQUIRED ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void action_function_verifyActionRequired_true_is_default() { final Function<String> function = ValueUtil::randomString; assertInvariantFailure( () -> Arez.context().action( "X", function ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void safeAction_procedure_verifyActionRequired_false() { final SafeProcedure procedure = ValueUtil::randomString; Arez.context().safeAction( ValueUtil.randomString(), procedure, Flags.NO_VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void safeAction_procedure_verifyActionRequired_true_butInvariantsDisabled() { ArezTestUtil.noCheckInvariants(); final SafeProcedure executable = ValueUtil::randomString; Arez.context().safeAction( ValueUtil.randomString(), executable, Flags.VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void safeAction_procedure_verifyActionRequired_true() { final SafeProcedure procedure = ValueUtil::randomString; assertInvariantFailure( () -> Arez.context().safeAction( "X", procedure, Flags.VERIFY_ACTION_REQUIRED ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void safeAction_procedure_verifyActionRequired_true_is_default() { assertInvariantFailure( () -> Arez.context().safeAction( "X", (SafeProcedure) ValueUtil::randomString ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void safeAction_function_verifyActionRequired_false() { Arez.context().safeAction( (SafeFunction<String>) ValueUtil::randomString, Flags.NO_VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void safeAction_function_verifyActionRequired_true_butInvariantsDisabled() { ArezTestUtil.noCheckInvariants(); Arez.context().safeAction( (SafeFunction<String>) ValueUtil::randomString, Flags.VERIFY_ACTION_REQUIRED ); // If we get to here then we performed an action where no read or write occurred } @Test public void safeAction_function_verifyActionRequired_true() { final SafeFunction<String> function = ValueUtil::randomString; assertInvariantFailure( () -> Arez.context().safeAction( "X", function, Flags.VERIFY_ACTION_REQUIRED ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void safeAction_function_verifyActionRequired_true_is_default() { assertInvariantFailure( () -> Arez.context().safeAction( "X", (SafeFunction<String>) ValueUtil::randomString ), "Arez-0185: Action named 'X' completed but no reads, writes, schedules, reportStales or reportPossiblyChanged occurred within the scope of the action." ); } @Test public void action_function_minimalParameters() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final String expectedValue = ValueUtil.randomString(); final ObservableValue<Object> observableValue = context.observable(); final int nextNodeId = context.getNextNodeId(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String v0 = context.action( () -> { observableValue.reportObserved(); assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), "Action@" + nextNodeId ); assertTrue( transaction.isMutation() ); return expectedValue; } ); assertFalse( context.isTransactionActive() ); assertEquals( v0, expectedValue ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 0 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertTrue( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertTrue( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertNull( e.getThrowable() ); assertTrue( e.returnsResult() ); assertEquals( e.getResult(), v0 ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 0 ); } ); } @Test public void track_function() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final String expectedValue = ValueUtil.randomString(); final AtomicInteger callCount = new AtomicInteger(); final Observer tracker = context.tracker( callCount::incrementAndGet, Flags.READ_WRITE ); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String v0 = context.observe( tracker, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), tracker.getName() ); assertEquals( transaction.isMutation(), tracker.isMutation() ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); // Tracking so state updated final ArrayList<ObservableValue<?>> observableValues = transaction.getObservableValues(); assertNotNull( observableValues ); assertEquals( observableValues.size(), 1 ); assertEquals( observableValue.getObservers().size(), 0 ); assertEquals( observableValue.getLastTrackerTransactionId(), nextNodeId ); return expectedValue; }, new Object[]{ param1, param2, param3 } ); assertFalse( context.isTransactionActive() ); context.getSpy().removeSpyEventHandler( handler ); assertEquals( v0, expectedValue ); assertEquals( observableValue.getLastTrackerTransactionId(), 0 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); // Reaction not called as the function sets up initial tracking assertEquals( callCount.get(), 0 ); context.action( observableValue::reportChanged ); assertEquals( callCount.get(), 1 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertTrue( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertTrue( e.isMutation() ); final ObserverInfo info = e.getTracker(); assertNotNull( info ); assertEquals( info.getName(), tracker.getName() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertTrue( e.isMutation() ); final ObserverInfo info = e.getTracker(); assertNotNull( info ); assertEquals( info.getName(), tracker.getName() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertNull( e.getThrowable() ); assertTrue( e.returnsResult() ); assertEquals( e.getResult(), v0 ); assertTrue( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void observe_function_no_parameters() throws Throwable { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger callCount = new AtomicInteger(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); final Observer observer = context.tracker( callCount::incrementAndGet, Flags.AREZ_OR_NO_DEPENDENCIES | Flags.READ_WRITE ); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final int result = context.observe( observer, () -> { final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), observer.getName() ); return 23; } ); assertEquals( inEnvironmentCallCount.get(), 0 ); assertEquals( result, 23 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertTrue( e.isTracked() ); assertEquals( e.getParameters().length, 0 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertTrue( e.isMutation() ); final ObserverInfo info = e.getTracker(); assertNotNull( info ); assertEquals( info.getName(), observer.getName() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertTrue( e.isMutation() ); final ObserverInfo info = e.getTracker(); assertNotNull( info ); assertEquals( info.getName(), observer.getName() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertNull( e.getThrowable() ); assertTrue( e.returnsResult() ); assertEquals( e.getResult(), result ); assertTrue( e.isTracked() ); assertEquals( e.getParameters().length, 0 ); } ); } @Test public void observe_NO_REPORT_RESULT() throws Throwable { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.tracker( callCount::incrementAndGet, Flags.AREZ_OR_NO_DEPENDENCIES | Flags.NO_REPORT_RESULT ); assertTrue( observer.noReportResults() ); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final int result = context.observe( observer, () -> 23 ); assertEquals( result, 23 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class ); handler.assertNextEvent( TransactionStartEvent.class ); handler.assertNextEvent( TransactionCompleteEvent.class ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertTrue( e.returnsResult() ); assertNull( e.getResult() ); } ); } @Test public void observe_environment_Required() throws Throwable { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger callCount = new AtomicInteger(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); final Observer observer = context.tracker( callCount::incrementAndGet, Flags.AREZ_OR_NO_DEPENDENCIES | Flags.ENVIRONMENT_REQUIRED ); context.observe( observer, () -> 23 ); assertEquals( inEnvironmentCallCount.get(), 1 ); } @Test public void track_function_passingNonTracker() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( new CountAndObserveProcedure() ); assertInvariantFailure( () -> context.observe( observer, callCount::incrementAndGet ), "Arez-0017: Attempted to invoke observe(..) on observer named '" + observer.getName() + "' but observer is not configured to use an application executor." ); assertEquals( callCount.get(), 0 ); } @Test public void action_safeFunction() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final String expectedValue = ValueUtil.randomString(); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); final String name = ValueUtil.randomString(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String v0 = context.safeAction( name, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), name ); assertNull( transaction.getPrevious() ); assertEquals( transaction.getContext(), context ); assertEquals( transaction.getId(), nextNodeId ); assertFalse( transaction.isMutation() ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); //Not tracking so no state updated assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); return expectedValue; }, Flags.READ_ONLY, new Object[]{ param1, param2, param3 } ); assertFalse( context.isTransactionActive() ); assertEquals( v0, expectedValue ); //ObservableValue still not updated assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); assertEquals( observableValue.getObservers().size(), 0 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertNull( e.getThrowable() ); assertTrue( e.returnsResult() ); assertEquals( e.getResult(), v0 ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void action_safeFunction_throws_Exception() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final AccessControlException secException = new AccessControlException( "" ); final String name = ValueUtil.randomString(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); assertThrows( AccessControlException.class, () -> context.safeAction( name, () -> { throw secException; }, Flags.READ_ONLY, new Object[]{ param1, param2, param3 } ) ); assertFalse( context.isTransactionActive() ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertEquals( e.getThrowable(), secException ); assertTrue( e.returnsResult() ); assertNull( e.getResult() ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void action_safeFunction_minimalParameters() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final int nextNodeId = context.currentNextTransactionId(); final String expectedValue = ValueUtil.randomString(); final String v0 = context.safeAction( () -> { observeADependency(); assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), "Action@" + nextNodeId ); assertTrue( transaction.isMutation() ); return expectedValue; } ); assertFalse( context.isTransactionActive() ); assertEquals( v0, expectedValue ); } @Test public void action_safeFunction_NameButNoMutationVariant() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); context.safeAction( name, () -> { observeADependency(); assertTrue( context.getTransaction().isMutation() ); assertEquals( context.getTransaction().getName(), name ); return ValueUtil.randomString(); } ); } @Test public void track_safeFunction() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final String expectedValue = ValueUtil.randomString(); final AtomicInteger callCount = new AtomicInteger(); final Observer tracker = context.tracker( callCount::incrementAndGet ); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); final String v0 = context.safeObserve( tracker, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), tracker.getName() ); assertEquals( transaction.isMutation(), tracker.isMutation() ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); // Tracking so state updated final ArrayList<ObservableValue<?>> observableValues = transaction.getObservableValues(); assertNotNull( observableValues ); assertEquals( observableValues.size(), 1 ); assertEquals( observableValue.getObservers().size(), 0 ); assertEquals( observableValue.getLastTrackerTransactionId(), nextNodeId ); return expectedValue; } ); assertFalse( context.isTransactionActive() ); assertEquals( v0, expectedValue ); assertEquals( observableValue.getLastTrackerTransactionId(), 0 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); // Reaction not called as the function sets up initial tracking assertEquals( callCount.get(), 0 ); context.action( observableValue::reportChanged ); assertEquals( callCount.get(), 1 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); } @Test public void track_safeFunction_passingNonTracker() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( new CountAndObserveProcedure() ); assertInvariantFailure( () -> context.safeObserve( observer, callCount::incrementAndGet ), "Arez-0018: Attempted to invoke safeObserve(..) on observer named '" + observer.getName() + "' but observer is not configured to use an application executor." ); assertEquals( callCount.get(), 0 ); } @Test public void safeAction_safeProcedure_minimalParameters() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final int nextNodeId = context.getNextNodeId(); context.safeAction( () -> { observeADependency(); assertTrue( context.isTransactionActive() ); assertTrue( context.getTransaction().isMutation() ); assertEquals( context.getTransaction().getName(), "Action@" + nextNodeId ); } ); assertFalse( context.isTransactionActive() ); } @Test public void safeAction_safeProcedure_NameButNoMutationVariant() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); context.safeAction( name, () -> { observeADependency(); assertTrue( context.getTransaction().isMutation() ); assertEquals( context.getTransaction().getName(), name ); } ); } @Test public void action_safeProcedure_throws_Exception() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final AccessControlException secException = new AccessControlException( "" ); final String name = ValueUtil.randomString(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final SafeProcedure procedure = () -> { throw secException; }; assertThrows( AccessControlException.class, () -> context.safeAction( name, procedure, 0, new Object[]{ param1, param2, param3 } ) ); assertFalse( context.isTransactionActive() ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertTrue( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertTrue( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertEquals( e.getThrowable(), secException ); assertFalse( e.returnsResult() ); assertNull( e.getResult() ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void track_safeProcedure() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final AtomicInteger callCount = new AtomicInteger(); final Observer tracker = context.tracker( callCount::incrementAndGet ); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); context.safeObserve( tracker, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), tracker.getName() ); assertEquals( transaction.isMutation(), tracker.isMutation() ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); // Tracking so state updated final ArrayList<ObservableValue<?>> observableValues = transaction.getObservableValues(); assertNotNull( observableValues ); assertEquals( observableValues.size(), 1 ); assertEquals( observableValue.getObservers().size(), 0 ); assertEquals( observableValue.getLastTrackerTransactionId(), nextNodeId ); } ); assertFalse( context.isTransactionActive() ); assertEquals( observableValue.getLastTrackerTransactionId(), 0 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); // Reaction not called as the function sets up initial tracking assertEquals( callCount.get(), 0 ); context.action( observableValue::reportChanged ); assertEquals( callCount.get(), 1 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); } @Test public void track_safeProcedure_passingNonTracker() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( new CountAndObserveProcedure() ); final SafeProcedure procedure = callCount::incrementAndGet; assertInvariantFailure( () -> context.safeObserve( observer, procedure ), "Arez-0020: Attempted to invoke safeObserve(..) on observer named '" + observer.getName() + "' but observer is not configured to use an application executor." ); assertEquals( callCount.get(), 0 ); } @Test public void action_procedure_NameButNoMutationVariant() throws Throwable { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); context.action( name, () -> { observeADependency(); assertTrue( context.getTransaction().isMutation() ); assertEquals( context.getTransaction().getName(), name ); } ); } @Test public void action_procedure_minimalParameters() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final int nextNodeId = context.currentNextTransactionId(); context.action( () -> { observeADependency(); assertTrue( context.isTransactionActive() ); assertTrue( context.getTransaction().isMutation() ); assertEquals( context.getTransaction().getName(), "Action@" + nextNodeId ); } ); assertFalse( context.isTransactionActive() ); } @Test public void track_procedure_passingNonTracker() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( new CountAndObserveProcedure() ); final Procedure procedure = callCount::incrementAndGet; assertInvariantFailure( () -> context.observe( observer, procedure ), "Arez-0019: Attempted to invoke observe(..) on observer named '" + observer.getName() + "' but observer is not configured to use an application executor." ); assertEquals( callCount.get(), 0 ); } @Test public void track_procedure() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); final AtomicInteger callCount = new AtomicInteger(); final Observer tracker = context.tracker( callCount::incrementAndGet ); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); context.observe( tracker, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), tracker.getName() ); assertEquals( transaction.isMutation(), tracker.isMutation() ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); // Tracking so state updated final ArrayList<ObservableValue<?>> observableValues = transaction.getObservableValues(); assertNotNull( observableValues ); assertEquals( observableValues.size(), 1 ); assertEquals( observableValue.getObservers().size(), 0 ); assertEquals( observableValue.getLastTrackerTransactionId(), nextNodeId ); } ); assertFalse( context.isTransactionActive() ); assertEquals( observableValue.getLastTrackerTransactionId(), 0 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); // Reaction not called as the function sets up initial tracking assertEquals( callCount.get(), 0 ); context.action( observableValue::reportChanged ); assertEquals( callCount.get(), 1 ); assertEquals( observableValue.getObservers().size(), 1 ); assertEquals( tracker.getDependencies().size(), 1 ); } @Test public void nonTrackingSafeProcedureObservingSingleObservable() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); final String name = ValueUtil.randomString(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.safeAction( name, () -> { assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), name ); assertNull( transaction.getPrevious() ); assertEquals( transaction.getContext(), context ); assertEquals( transaction.getId(), nextNodeId ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); //Not tracking so no state updated assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); }, Flags.READ_ONLY, new Object[]{ param1, param2, param3 } ); assertFalse( context.isTransactionActive() ); //ObservableValue still not updated assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); assertEquals( observableValue.getObservers().size(), 0 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertNull( e.getThrowable() ); assertFalse( e.returnsResult() ); assertNull( e.getResult() ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void action_procedure() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final ObservableValue<?> observableValue = context.observable(); assertEquals( observableValue.getObservers().size(), 0 ); final int nextNodeId = context.currentNextTransactionId(); final String name = ValueUtil.randomString(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final ObservableValue<Object> observableValue1 = Arez.context().observable(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.action( name, () -> { observableValue1.reportObserved(); assertTrue( context.isTransactionActive() ); final Transaction transaction = context.getTransaction(); assertEquals( transaction.getName(), name ); assertNull( transaction.getPrevious() ); assertEquals( transaction.getContext(), context ); assertEquals( transaction.getId(), nextNodeId ); assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); observableValue.reportObserved(); //Not tracking so no state updated assertEquals( observableValue.getObservers().size(), 0 ); assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); }, Flags.READ_ONLY, new Object[]{ param1, param2, param3 } ); assertFalse( context.isTransactionActive() ); //ObservableValue still not updated assertNotEquals( nextNodeId, observableValue.getLastTrackerTransactionId() ); assertEquals( observableValue.getObservers().size(), 0 ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertNull( e.getThrowable() ); assertFalse( e.returnsResult() ); assertNull( e.getResult() ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void action_procedure_throwsException() { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertInvariantFailure( context::getTransaction, "Arez-0117: Attempting to get current transaction but no transaction is active." ); final String name = ValueUtil.randomString(); final IOException ioException = new IOException(); final String param1 = ""; final Object param2 = null; final int param3 = 3; final ObservableValue observableValue = Arez.context().observable(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final Procedure procedure = () -> { observableValue.reportObserved(); throw ioException; }; assertThrows( IOException.class, () -> context.action( name, procedure, Flags.READ_ONLY, new Object[]{ param1, param2, param3 } ) ); assertFalse( context.isTransactionActive() ); handler.assertEventCount( 4 ); handler.assertNextEvent( ActionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); handler.assertNextEvent( TransactionStartEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertFalse( e.isMutation() ); assertNull( e.getTracker() ); } ); handler.assertNextEvent( ActionCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertEquals( e.getThrowable(), ioException ); assertFalse( e.returnsResult() ); assertNull( e.getResult() ); assertFalse( e.isTracked() ); final Object[] parameters = e.getParameters(); assertEquals( parameters.length, 3 ); assertEquals( parameters[ 0 ], param1 ); assertEquals( parameters[ 1 ], param2 ); assertEquals( parameters[ 2 ], param3 ); } ); } @Test public void nestedProceduresAccessingSameObservable() throws Throwable { final ArezContext context = Arez.context(); assertFalse( context.isTransactionActive() ); assertThrows( context::getTransaction ); final int nextNodeId = context.currentNextTransactionId(); final String name = ValueUtil.randomString(); final String name2 = ValueUtil.randomString(); context.action( name, () -> { observeADependency(); assertTrue( context.isTransactionActive() ); final Transaction transaction1 = context.getTransaction(); assertEquals( transaction1.getName(), name ); assertNull( transaction1.getPrevious() ); assertEquals( transaction1.getContext(), context ); assertEquals( transaction1.getId(), nextNodeId ); assertTrue( transaction1.isRootTransaction() ); assertEquals( transaction1.getRootTransaction(), transaction1 ); context.action( name2, () -> { observeADependency(); assertTrue( context.isTransactionActive() ); final Transaction transaction2 = context.getTransaction(); assertEquals( transaction2.getName(), name2 ); assertEquals( transaction2.getPrevious(), transaction1 ); assertEquals( transaction2.getContext(), context ); assertEquals( transaction2.getId(), nextNodeId + 1 ); assertFalse( transaction2.isRootTransaction() ); assertEquals( transaction2.getRootTransaction(), transaction1 ); }, Flags.REQUIRE_NEW_TRANSACTION ); final Transaction transaction1b = context.getTransaction(); assertEquals( transaction1b.getName(), name ); assertNull( transaction1b.getPrevious() ); assertEquals( transaction1b.getContext(), context ); assertEquals( transaction1b.getId(), nextNodeId ); assertTrue( transaction1b.isRootTransaction() ); assertEquals( transaction1b.getRootTransaction(), transaction1b ); } ); assertFalse( context.isTransactionActive() ); } @Test public void nextNodeId() { final ArezContext context = Arez.context(); assertEquals( context.currentNextTransactionId(), 1 ); assertEquals( context.nextTransactionId(), 1 ); assertEquals( context.currentNextTransactionId(), 2 ); } @Test public void observer_with_onDepsUpdated() { final ArezContext context = Arez.context(); final ObservableValue<Object> observable = context.observable(); final AtomicInteger observedCallCount = new AtomicInteger(); final AtomicInteger onDepsChangeCallCount = new AtomicInteger(); final String name = ValueUtil.randomString(); context.observer( name, () -> { observedCallCount.incrementAndGet(); observable.reportObserved(); assertEquals( context.getTransaction().getName(), name ); }, onDepsChangeCallCount::incrementAndGet ); assertEquals( onDepsChangeCallCount.get(), 0 ); context.safeAction( observable::reportChanged ); assertEquals( onDepsChangeCallCount.get(), 1 ); } @Test public void observer_withComponent_and_onDepsUpdated() { final ArezContext context = Arez.context(); final ObservableValue<Object> observable = context.observable(); final AtomicInteger observeCallCount = new AtomicInteger(); final AtomicInteger onDepsChangeCallCount = new AtomicInteger(); final Component component = context.component( ValueUtil.randomString(), 22 ); final String name = ValueUtil.randomString(); final Observer observer = context.observer( component, name, () -> { observeCallCount.incrementAndGet(); observable.reportObserved(); assertEquals( context.getTransaction().getName(), name ); }, onDepsChangeCallCount::incrementAndGet ); assertEquals( onDepsChangeCallCount.get(), 0 ); final ComponentInfo componentInfo = observer.asInfo().getComponent(); assertNotNull( componentInfo ); assertEquals( componentInfo.getName(), component.getName() ); context.safeAction( observable::reportChanged ); assertEquals( onDepsChangeCallCount.get(), 1 ); } @Test public void observerErrorHandler() { final ArezContext context = Arez.context(); // Clear out handler added as part of test infrastructure context.getObserverErrorHandlerSupport().getObserverErrorHandlers().clear(); final ObserverError observerError = ObserverError.REACTION_ERROR; final Throwable throwable = new Throwable(); final Procedure action = new NoopProcedure(); final Observer observer = context.observer( ValueUtil.randomString(), action, Flags.READ_WRITE ); final AtomicInteger callCount = new AtomicInteger(); final ObserverErrorHandler handler = ( o, e, t ) -> { callCount.incrementAndGet(); assertEquals( o, observer ); assertEquals( e, observerError ); assertEquals( t, throwable ); }; context.addObserverErrorHandler( handler ); assertEquals( context.getObserverErrorHandlerSupport().getObserverErrorHandlers().size(), 1 ); assertTrue( context.getObserverErrorHandlerSupport().getObserverErrorHandlers().contains( handler ) ); assertEquals( callCount.get(), 0 ); context.reportObserverError( observer, observerError, throwable ); assertEquals( callCount.get(), 1 ); context.removeObserverErrorHandler( handler ); assertEquals( context.getObserverErrorHandlerSupport().getObserverErrorHandlers().size(), 0 ); context.reportObserverError( observer, observerError, throwable ); assertEquals( callCount.get(), 1 ); } @Test public void reportObserverError_when_spyEventHandler_present() { final ArezContext context = Arez.context(); // Clear out handler added as part of test infrastructure context.getObserverErrorHandlerSupport().getObserverErrorHandlers().clear(); final ObserverError observerError = ObserverError.REACTION_ERROR; final Throwable throwable = new Throwable(); final Procedure action = new NoopProcedure(); final Observer observer = context.observer( action ); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.reportObserverError( observer, observerError, throwable ); handler.assertEventCount( 1 ); handler.assertNextEvent( ObserverErrorEvent.class, event -> { assertEquals( event.getObserver().getName(), observer.getName() ); assertEquals( event.getError(), observerError ); assertEquals( event.getThrowable(), throwable ); } ); } @Test public void addObserverErrorHandler_whenDisabled() { ArezTestUtil.disableObserverErrorHandlers(); final ObserverErrorHandler handler = ( o, e, t ) -> { }; assertInvariantFailure( () -> Arez.context().addObserverErrorHandler( handler ), "Arez-0182: ArezContext.addObserverErrorHandler() invoked when Arez.areObserverErrorHandlersEnabled() returns false." ); } @Test public void removeObserverErrorHandler_whenDisabled() { ArezTestUtil.disableObserverErrorHandlers(); final ArezContext context = Arez.context(); final ObserverErrorHandler handler = ( o, e, t ) -> { }; assertInvariantFailure( () -> context.removeObserverErrorHandler( handler ), "Arez-0181: ArezContext.removeObserverErrorHandler() invoked when Arez.areObserverErrorHandlersEnabled() returns false." ); } @Test public void getSpy_whenSpiesDisabled() { ArezTestUtil.disableSpies(); final ArezContext context = Arez.context(); assertInvariantFailure( context::getSpy, "Arez-0021: Attempting to get Spy but spies are not enabled." ); } @Test public void scheduleReaction() { final ArezContext context = Arez.context(); final Observer observer = context.observer( new CountAndObserveProcedure() ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 0L ); context.scheduleReaction( observer ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 1L ); assertTrue( context.getTaskQueue().getOrderedTasks().anyMatch( o -> o == observer.getTask() ) ); } @Test public void scheduleReaction_shouldAbortInReadOnlyTransaction() { final ArezContext context = Arez.context(); final Observer observer = context.observer( new CountAndObserveProcedure() ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 0L ); assertInvariantFailure( () -> { final Procedure executable = () -> context.scheduleReaction( observer ); context.action( executable, Flags.READ_ONLY ); }, "Arez-0013: Observer named '" + observer.getName() + "' attempted to be scheduled " + "during read-only transaction." ); } @Test public void scheduleReaction_shouldAbortInReadWriteOwnedTransaction() { final ArezContext context = Arez.context(); final Observer derivation = context.computable( () -> "" ).getObserver(); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 0L ); setCurrentTransaction( derivation ); assertInvariantFailure( () -> context.scheduleReaction( derivation ), "Arez-0014: Observer named '" + derivation.getName() + "' attempted to schedule itself " + "during read-only tracking transaction. Observers that are supporting ComputableValue " + "instances must not schedule self." ); } @Test public void scheduleReaction_generates_spyEvent() { final ArezContext context = Arez.context(); final Observer observer = context.observer( new CountAndObserveProcedure() ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 0L ); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.scheduleReaction( observer ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 1L ); handler.assertEventCount( 1 ); handler.assertNextEvent( ObserveScheduleEvent.class, event -> assertEquals( event.getObserver().getName(), observer.getName() ) ); } @Test public void computableValue() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); final SafeFunction<String> function = () -> { observeADependency(); return ""; }; final Procedure onActivate = ValueUtil::randomString; final Procedure onDeactivate = ValueUtil::randomString; final Procedure onStale = ValueUtil::randomString; final ComputableValue<String> computableValue = context.computable( null, name, function, onActivate, onDeactivate, onStale, Flags.PRIORITY_HIGH ); assertEquals( computableValue.getName(), name ); assertEquals( computableValue.getContext(), context ); assertFalse( computableValue.getObserver().isKeepAlive() ); assertTrue( computableValue.getObserver().areArezDependenciesRequired() ); assertFalse( computableValue.getObserver().isEnvironmentRequired() ); assertEquals( computableValue.getObservableValue().getName(), name ); assertEquals( computableValue.getOnActivate(), onActivate ); assertEquals( computableValue.getOnDeactivate(), onDeactivate ); assertEquals( computableValue.getOnStale(), onStale ); assertEquals( computableValue.getObserver().getName(), name ); assertEquals( computableValue.getObserver().getTask().getPriority(), Priority.HIGH ); assertFalse( computableValue.getObserver().canObserveLowerPriorityDependencies() ); } @Test public void computable_with_NO_REPORT_RESULT() { final ArezContext context = Arez.context(); final ObservableValue<Object> observable = Arez.context().observable(); final SafeFunction<String> function = () -> { observable.reportObserved(); return ""; }; final ComputableValue<String> computableValue = context.computable( function, Flags.NO_REPORT_RESULT ); assertTrue( computableValue.getObserver().noReportResults() ); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.safeAction( computableValue::get ); handler.assertEventCount( 9 ); handler.assertNextEvent( ActionStartEvent.class ); handler.assertNextEvent( TransactionStartEvent.class ); handler.assertNextEvent( ComputeStartEvent.class ); handler.assertNextEvent( TransactionStartEvent.class ); handler.assertNextEvent( ObservableValueChangeEvent.class ); handler.assertNextEvent( TransactionCompleteEvent.class ); handler.assertNextEvent( ComputeCompleteEvent.class, e -> assertNull( e.getResult() ) ); handler.assertNextEvent( TransactionCompleteEvent.class ); handler.assertNextEvent( ActionCompleteEvent.class ); } @Test public void computableValue_withComponent() { final ArezContext context = Arez.context(); final Component component = context.component( ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString() ); final String name = ValueUtil.randomString(); final ComputableValue<String> computableValue = context.computable( component, name, () -> "", null, null, null ); assertEquals( computableValue.getName(), name ); assertEquals( computableValue.getComponent(), component ); } @Test public void computableValue_Environment_Required() { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); final SafeFunction<String> function = () -> { observeADependency(); return ""; }; final ComputableValue<String> computableValue = context.computable( function, Flags.ENVIRONMENT_REQUIRED ); assertTrue( computableValue.getObserver().isEnvironmentRequired() ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.safeAction( computableValue::get ); assertEquals( inEnvironmentCallCount.get(), 1 ); } @Test public void computableValue_Environment_NotRequired() { final ArezContext context = Arez.context(); // Scheduler paused otherwise reactions will run in environment and upset our environment call count context.pauseScheduler(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); final SafeFunction<String> function = () -> { observeADependency(); return ""; }; final ComputableValue<String> computableValue = context.computable( function, Flags.ENVIRONMENT_NOT_REQUIRED ); assertFalse( computableValue.getObserver().isEnvironmentRequired() ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.safeAction( computableValue::get ); assertEquals( inEnvironmentCallCount.get(), 0 ); } @Test public void computableValue_canObserveLowerPriorityDependencies() { final ComputableValue<String> computableValue = Arez.context().computable( () -> "", Flags.OBSERVE_LOWER_PRIORITY_DEPENDENCIES ); assertTrue( computableValue.getObserver().canObserveLowerPriorityDependencies() ); } @Test public void computableValue_mayNotAccessArezState() { final ArezContext context = Arez.context(); assertFalse( context.computable( () -> "", Flags.AREZ_OR_NO_DEPENDENCIES ) .getObserver() .areArezDependenciesRequired() ); assertFalse( context.computable( () -> "", Flags.AREZ_OR_EXTERNAL_DEPENDENCIES ) .getObserver() .areArezDependenciesRequired() ); } @Test public void computableValue_withKeepAliveAndRunImmediately() { final ArezContext context = Arez.context(); final AtomicInteger calls = new AtomicInteger(); final SafeFunction<String> action = () -> { observeADependency(); calls.incrementAndGet(); return ""; }; final ComputableValue<String> computableValue = context.computable( action, Flags.KEEPALIVE | Flags.RUN_NOW ); assertTrue( computableValue.getObserver().isKeepAlive() ); assertEquals( calls.get(), 1 ); } @Test public void computableValue_withKeepAliveAndNoRunImmediately() { final ArezContext context = Arez.context(); final AtomicInteger calls = new AtomicInteger(); final SafeFunction<String> action = () -> { observeADependency(); calls.incrementAndGet(); return ""; }; final ComputableValue<String> computableValue = context.computable( action, Flags.KEEPALIVE | Flags.RUN_LATER ); assertTrue( computableValue.getObserver().isKeepAlive() ); assertEquals( calls.get(), 0 ); context.triggerScheduler(); assertEquals( calls.get(), 1 ); } @Test public void computableValue_pass_no_hooks() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); final SafeFunction<String> function = () -> { observeADependency(); return ""; }; final ComputableValue<String> computableValue = context.computable( name, function ); assertEquals( computableValue.getName(), name ); assertEquals( computableValue.getContext(), context ); assertEquals( computableValue.getObserver().getName(), name ); assertEquals( computableValue.getObservableValue().getName(), name ); assertNull( computableValue.getOnActivate() ); assertNull( computableValue.getOnDeactivate() ); assertNull( computableValue.getOnStale() ); assertEquals( computableValue.getObserver().getTask().getPriority(), Priority.NORMAL ); } @Test public void computableValue_minimumParameters() { final ArezContext context = Arez.context(); context.setNextNodeId( 22 ); final SafeFunction<String> function = () -> { observeADependency(); return ""; }; final ComputableValue<String> computableValue = context.computable( function ); final String name = "ComputableValue@22"; assertEquals( computableValue.getName(), name ); assertEquals( computableValue.getContext(), context ); assertEquals( computableValue.getObserver().getName(), name ); assertEquals( computableValue.getObservableValue().getName(), name ); assertNull( computableValue.getOnActivate() ); assertNull( computableValue.getOnDeactivate() ); assertNull( computableValue.getOnStale() ); assertEquals( computableValue.getObserver().getTask().getPriority(), Priority.NORMAL ); assertFalse( computableValue.getObserver().canObserveLowerPriorityDependencies() ); } @Test public void computableValue_generates_spyEvent() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final ComputableValue<String> computableValue = context.computable( ValueUtil.randomString(), () -> { observeADependency(); return ""; } ); handler.assertEventCount( 1 ); handler.assertNextEvent( ComputableValueCreateEvent.class, event -> assertEquals( event.getComputableValue().getName(), computableValue.getName() ) ); } @Test public void observer_noObservers() { setIgnoreObserverErrors( true ); Arez.context().setNextNodeId( 22 ); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = Arez.context().observer( callCount::incrementAndGet ); assertEquals( observer.getName(), "Observer@22" ); assertFalse( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_UP_TO_DATE ); assertEquals( observer.getTask().getPriority(), Priority.NORMAL ); assertEquals( callCount.get(), 1 ); assertEquals( getObserverErrors().size(), 1 ); assertEquals( getObserverErrors().get( 0 ), "Observer: Observer@22 Error: REACTION_ERROR java.lang.IllegalStateException: Arez-0172: Observer named 'Observer@22' that does not use an external executor completed observe function but is not observing any properties. As a result the observer will never be rescheduled." ); } @Test public void autorun_noObservers_manualReportStaleAllowed() { setIgnoreObserverErrors( true ); final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); context.observer( callCount::incrementAndGet, Flags.AREZ_OR_EXTERNAL_DEPENDENCIES ); assertEquals( callCount.get(), 1 ); // No observer errors even though the executable accesses no arez dependencies assertEquals( getObserverErrors().size(), 0 ); } @Test public void observer_minimumParameters() { final ArezContext context = Arez.context(); context.setNextNodeId( 22 ); final AtomicInteger callCount = new AtomicInteger(); final Procedure observe = () -> { observeADependency(); callCount.incrementAndGet(); }; final Observer observer = context.observer( observe ); assertNull( observer.getComponent() ); assertEquals( observer.getName(), "Observer@22" ); assertFalse( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_UP_TO_DATE ); assertEquals( observer.getTask().getPriority(), Priority.NORMAL ); assertFalse( observer.isComputableValue() ); assertFalse( observer.canObserveLowerPriorityDependencies() ); assertTrue( observer.isKeepAlive() ); assertFalse( observer.nestedActionsAllowed() ); assertNull( observer.getOnDepsChange() ); assertFalse( observer.isApplicationExecutor() ); assertEquals( observer.getObserve(), observe ); assertEquals( callCount.get(), 1 ); } @Test public void autorun_withComponent() { final ArezContext context = Arez.context(); final Component component = context.component( ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString() ); final String name = ValueUtil.randomString(); final Observer observer = context.observer( component, name, AbstractArezTest::observeADependency ); assertEquals( observer.getName(), name ); assertEquals( observer.getComponent(), component ); } @Test public void autorun_minimumParametersForMutation() { final ArezContext context = Arez.context(); context.setNextNodeId( 22 ); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( () -> { observeADependency(); callCount.incrementAndGet(); }, Flags.READ_WRITE ); assertEquals( observer.getName(), "Observer@22" ); assertTrue( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_UP_TO_DATE ); assertEquals( observer.getTask().getPriority(), Priority.NORMAL ); assertFalse( observer.nestedActionsAllowed() ); assertFalse( observer.supportsManualSchedule() ); assertEquals( callCount.get(), 1 ); } @SuppressWarnings( "ConstantConditions" ) @Test public void autorun_runImmediately() { final ArezContext context = Arez.context(); final ObservableValue<Object> observableValue = Arez.context().observable(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String name = ValueUtil.randomString(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( name, () -> { observableValue.reportObserved(); callCount.incrementAndGet(); }, Flags.READ_WRITE ); assertEquals( observer.getName(), name ); assertTrue( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_UP_TO_DATE ); assertEquals( observer.getTask().getPriority(), Priority.NORMAL ); assertFalse( observer.isApplicationExecutor() ); assertEquals( callCount.get(), 1 ); handler.assertEventCount( 8 ); handler.assertNextEvent( ObserverCreateEvent.class, e -> assertEquals( e.getObserver().getName(), name ) ); handler.assertNextEvent( ObserveScheduleEvent.class, e -> assertEquals( e.getObserver().getName(), name ) ); assertObserverReaction( handler, name ); } @SuppressWarnings( "ConstantConditions" ) @Test public void autorun_runImmediately_will_obeyNormalSchedulingPriorities() { final ArezContext context = Arez.context(); final ObservableValue<Object> observableValue = Arez.context().observable(); final Observer observer1 = context.observer( "O1", observableValue::reportObserved ); final Observer observer2 = context.observer( "O2", observableValue::reportObserved, Flags.PRIORITY_HIGH ); final Disposable schedulerLock = context.pauseScheduler(); // Trigger change that should schedule above observers context.safeAction( observableValue::reportChanged ); final Observer observer3 = context.observer( "O3", observableValue::reportObserved ); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); schedulerLock.dispose(); handler.assertEventCount( 6 * 3 ); assertObserverReaction( handler, observer2.getName() ); assertObserverReaction( handler, observer1.getName() ); assertObserverReaction( handler, observer3.getName() ); } private void assertObserverReaction( @Nonnull final TestSpyEventHandler handler, @Nonnull final String name ) { handler.assertNextEvent( ObserveStartEvent.class, e -> assertEquals( e.getObserver().getName(), name ) ); handler.assertNextEvent( ActionStartEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( TransactionStartEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( TransactionCompleteEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( ActionCompleteEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( ObserveCompleteEvent.class, e -> assertEquals( e.getObserver().getName(), name ) ); } @Test public void autorun_highPriority() { final ArezContext context = Arez.context(); final Observer observer = context.observer( AbstractArezTest::observeADependency, Flags.PRIORITY_HIGH ); assertEquals( observer.getTask().getPriority(), Priority.HIGH ); } @Test public void autorun_canObserveLowerPriorityDependencies() { final ArezContext context = Arez.context(); final Observer observer = context.observer( AbstractArezTest::observeADependency, Flags.OBSERVE_LOWER_PRIORITY_DEPENDENCIES ); assertTrue( observer.canObserveLowerPriorityDependencies() ); } @Test public void autorun_nestedActionsAllowed() { final ArezContext context = Arez.context(); final Observer observer = context.observer( AbstractArezTest::observeADependency, Flags.NESTED_ACTIONS_ALLOWED ); assertTrue( observer.nestedActionsAllowed() ); } @Test public void observer_areArezDependenciesRequired() { final ArezContext context = Arez.context(); final Procedure observe = AbstractArezTest::observeADependency; assertFalse( context.observer( observe, Flags.AREZ_OR_EXTERNAL_DEPENDENCIES ).areArezDependenciesRequired() ); assertFalse( context.observer( observe, Flags.AREZ_OR_NO_DEPENDENCIES ).areArezDependenciesRequired() ); assertTrue( context.observer( observe, Flags.AREZ_DEPENDENCIES ).areArezDependenciesRequired() ); } @Test public void autorun_supportsManualSchedule() { final ArezContext context = Arez.context(); final Observer observer = context.observer( AbstractArezTest::observeADependency, ValueUtil::randomString ); assertTrue( observer.supportsManualSchedule() ); } @Test public void autorun_notRunImmediately() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String name = ValueUtil.randomString(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.observer( name, () -> { observeADependency(); callCount.incrementAndGet(); }, Flags.RUN_LATER ); assertEquals( observer.getName(), name ); assertFalse( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_INACTIVE ); assertEquals( observer.getTask().getPriority(), Priority.NORMAL ); assertFalse( observer.isApplicationExecutor() ); assertEquals( callCount.get(), 0 ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 1L ); handler.assertEventCount( 2 ); handler.assertNextEvent( ObserverCreateEvent.class, e -> assertEquals( e.getObserver().getName(), observer.getName() ) ); handler.assertNextEvent( ObserveScheduleEvent.class, e -> assertEquals( e.getObserver().getName(), observer.getName() ) ); } @Test public void tracker() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String name = ValueUtil.randomString(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.tracker( null, name, callCount::incrementAndGet, Flags.PRIORITY_HIGH | Flags.OBSERVE_LOWER_PRIORITY_DEPENDENCIES | Flags.NESTED_ACTIONS_ALLOWED | Flags.AREZ_OR_NO_DEPENDENCIES ); assertEquals( observer.getName(), name ); assertFalse( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_INACTIVE ); assertNull( observer.getComponent() ); assertEquals( observer.getTask().getPriority(), Priority.HIGH ); assertTrue( observer.canObserveLowerPriorityDependencies() ); assertTrue( observer.isApplicationExecutor() ); assertTrue( observer.nestedActionsAllowed() ); assertFalse( observer.areArezDependenciesRequired() ); assertFalse( observer.supportsManualSchedule() ); assertEquals( callCount.get(), 0 ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 0L ); handler.assertEventCount( 1 ); handler.assertNextEvent( ObserverCreateEvent.class, e -> assertEquals( e.getObserver().getName(), observer.getName() ) ); } @Test public void tracker_withComponent() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); final AtomicInteger callCount = new AtomicInteger(); final Component component = context.component( ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString() ); final Observer observer = context.tracker( component, name, callCount::incrementAndGet ); assertEquals( observer.getName(), name ); assertEquals( observer.getComponent(), component ); assertEquals( observer.getTask().getPriority(), Priority.NORMAL ); assertFalse( observer.canObserveLowerPriorityDependencies() ); assertTrue( observer.isApplicationExecutor() ); } @Test public void tracker_minimalParameters() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final int nextNodeId = context.getNextNodeId(); final AtomicInteger callCount = new AtomicInteger(); final Observer observer = context.tracker( callCount::incrementAndGet ); assertEquals( observer.getName(), "Observer@" + nextNodeId ); assertFalse( observer.isMutation() ); assertEquals( observer.getState(), Flags.STATE_INACTIVE ); assertFalse( observer.canObserveLowerPriorityDependencies() ); assertTrue( observer.isApplicationExecutor() ); assertFalse( observer.nestedActionsAllowed() ); assertTrue( observer.areArezDependenciesRequired() ); assertFalse( observer.supportsManualSchedule() ); assertEquals( callCount.get(), 0 ); assertEquals( context.getTaskQueue().getOrderedTasks().count(), 0L ); handler.assertEventCount( 1 ); handler.assertNextEvent( ObserverCreateEvent.class, e -> assertEquals( e.getObserver().getName(), observer.getName() ) ); } @Test public void observer_generates_spyEvent() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); context.pauseScheduler(); final Observer observer = context.observer( new CountingProcedure() ); handler.assertEventCount( 2 ); handler.assertNextEvent( ObserverCreateEvent.class, e -> assertEquals( e.getObserver().getName(), observer.getName() ) ); handler.assertNextEvent( ObserveScheduleEvent.class, e -> assertEquals( e.getObserver().getName(), observer.getName() ) ); } @Test public void createObservable_no_parameters() { final ArezContext context = Arez.context(); context.setNextNodeId( 22 ); final ObservableValue<?> observableValue = context.observable(); assertNotNull( observableValue.getName() ); assertEquals( observableValue.getName(), "ObservableValue@22" ); assertNull( observableValue.getAccessor() ); assertNull( observableValue.getMutator() ); } @Test public void createObservable() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); final ObservableValue<?> observableValue = context.observable( name ); assertEquals( observableValue.getName(), name ); assertNull( observableValue.getAccessor() ); assertNull( observableValue.getMutator() ); } @Test public void createObservable_withIntrospectors() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); final PropertyAccessor<String> accessor = () -> ""; final PropertyMutator<String> mutator = v -> { }; final ObservableValue<?> observableValue = context.observable( name, accessor, mutator ); assertEquals( observableValue.getName(), name ); assertEquals( observableValue.getAccessor(), accessor ); assertEquals( observableValue.getMutator(), mutator ); } @Test public void createObservable_withComponent() { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); final Component component = context.component( ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString() ); final ObservableValue<String> observableValue = context.observable( component, name ); assertEquals( observableValue.getName(), name ); assertEquals( observableValue.getComponent(), component ); } @Test public void createObservable_spyEventHandlerPresent() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String name = ValueUtil.randomString(); final ObservableValue<?> observableValue = context.observable( name ); assertEquals( observableValue.getName(), name ); handler.assertEventCount( 1 ); handler.assertNextEvent( ObservableValueCreateEvent.class, e -> assertEquals( e.getObservableValue().getName(), observableValue.getName() ) ); } @Test public void createObservable_name_Null() { ArezTestUtil.disableNames(); final ArezContext context = Arez.context(); final ObservableValue<?> observableValue = context.observable( null ); assertNotNull( observableValue ); } @Test public void pauseScheduler() { final ArezContext context = Arez.context(); assertFalse( context.isSchedulerPaused() ); assertEquals( context.getSchedulerLockCount(), 0 ); final Disposable lock1 = context.pauseScheduler(); assertEquals( context.getSchedulerLockCount(), 1 ); assertTrue( context.isSchedulerPaused() ); final AtomicInteger callCount = new AtomicInteger(); // This would normally be scheduled and run now but scheduler should be paused context.observer( () -> { observeADependency(); callCount.incrementAndGet(); }, Flags.RUN_LATER ); context.triggerScheduler(); assertEquals( callCount.get(), 0 ); final Disposable lock2 = context.pauseScheduler(); assertEquals( context.getSchedulerLockCount(), 2 ); assertTrue( context.isSchedulerPaused() ); lock2.dispose(); assertEquals( context.getSchedulerLockCount(), 1 ); // Already disposed so this is a noop lock2.dispose(); assertEquals( context.getSchedulerLockCount(), 1 ); assertTrue( context.isSchedulerPaused() ); assertEquals( callCount.get(), 0 ); lock1.dispose(); assertEquals( context.getSchedulerLockCount(), 0 ); assertEquals( callCount.get(), 1 ); assertFalse( context.isSchedulerPaused() ); } @Test public void releaseSchedulerLock_whenNoLock() { assertInvariantFailure( () -> Arez.context().releaseSchedulerLock(), "Arez-0016: releaseSchedulerLock() reduced schedulerLockCount below 0." ); } @Test public void createComponent() { final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); final String name = ValueUtil.randomString(); assertFalse( context.isComponentPresent( type, id ) ); final Component component = context.component( type, id, name ); assertTrue( context.isComponentPresent( type, id ) ); assertEquals( component.getType(), type ); assertEquals( component.getId(), id ); assertEquals( component.getName(), name ); assertNull( component.getPreDispose() ); assertNull( component.getPostDispose() ); } @Test public void createComponent_includeDisposeHooks() { final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); final String name = ValueUtil.randomString(); assertFalse( context.isComponentPresent( type, id ) ); final SafeProcedure preDispose = () -> { }; final SafeProcedure postDispose = () -> { }; final Component component = context.component( type, id, name, preDispose, postDispose ); assertTrue( context.isComponentPresent( type, id ) ); assertEquals( component.getType(), type ); assertEquals( component.getId(), id ); assertEquals( component.getName(), name ); assertEquals( component.getPreDispose(), preDispose ); assertEquals( component.getPostDispose(), postDispose ); } @Test public void createComponent_synthesizeNameIfRequired() { final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); assertFalse( context.isComponentPresent( type, id ) ); final Component component = context.component( type, id ); assertTrue( context.isComponentPresent( type, id ) ); assertEquals( component.getType(), type ); assertEquals( component.getId(), id ); assertEquals( component.getName(), type + "@" + id ); } @Test public void createComponent_spyEventHandlerPresent() { final ArezContext context = Arez.context(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final Component component = context.component( ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString() ); handler.assertEventCount( 1 ); handler.assertNextEvent( ComponentCreateStartEvent.class, event -> assertEquals( event.getComponentInfo().getName(), component.getName() ) ); } @Test public void createComponent_nativeComponentsDisabled() { ArezTestUtil.disableNativeComponents(); final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); final String name = ValueUtil.randomString(); assertInvariantFailure( () -> context.component( type, id, name ), "Arez-0008: ArezContext.component() invoked when Arez.areNativeComponentsEnabled() returns false." ); } @Test public void createComponent_duplicateComponent() { final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); context.component( type, id, ValueUtil.randomString() ); assertTrue( context.isComponentPresent( type, id ) ); assertInvariantFailure( () -> context.component( type, id, ValueUtil.randomString() ), "Arez-0009: ArezContext.component() invoked for type '" + type + "' and id '" + id + "' but a component already exists for specified type+id." ); } @Test public void isComponentPresent_nativeComponentsDisabled() { ArezTestUtil.disableNativeComponents(); final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); assertInvariantFailure( () -> context.isComponentPresent( type, id ), "Arez-0135: ArezContext.isComponentPresent() invoked when Arez.areNativeComponentsEnabled() returns false." ); } @Test public void deregisterComponent_nativeComponentsDisabled() { ArezTestUtil.disableNativeComponents(); final ArezContext context = Arez.context(); final Component component = new Component( context, ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString(), null, null ); assertInvariantFailure( () -> context.deregisterComponent( component ), "Arez-0006: ArezContext.deregisterComponent() invoked when Arez.areNativeComponentsEnabled() returns false." ); } @Test public void deregisterComponent_componentMisalignment() { final ArezContext context = Arez.context(); final Component component = new Component( context, ValueUtil.randomString(), ValueUtil.randomString(), ValueUtil.randomString(), null, null ); final Component component2 = context.component( component.getType(), component.getId(), ValueUtil.randomString() ); assertInvariantFailure( () -> context.deregisterComponent( component ), "Arez-0007: ArezContext.deregisterComponent() invoked for '" + component + "' but was unable to remove specified component from registry. " + "Actual component removed: " + component2 ); } @Test public void deregisterComponent_removesTypeIfLastOfType() { final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final Component component = context.component( type, ValueUtil.randomString(), ValueUtil.randomString() ); final Component component2 = context.component( type, ValueUtil.randomString(), ValueUtil.randomString() ); assertEquals( context.findAllComponentTypes().size(), 1 ); assertTrue( context.findAllComponentTypes().contains( type ) ); context.deregisterComponent( component ); assertEquals( context.findAllComponentTypes().size(), 1 ); assertTrue( context.findAllComponentTypes().contains( type ) ); context.deregisterComponent( component2 ); assertEquals( context.findAllComponentTypes().size(), 0 ); } @Test public void component_finders() { final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id1 = ValueUtil.randomString(); final String id2 = ValueUtil.randomString(); assertEquals( context.findAllComponentTypes().size(), 0 ); assertEquals( context.findAllComponentsByType( type ).size(), 0 ); final Component component = context.component( type, id1, ValueUtil.randomString() ); assertEquals( context.findAllComponentTypes().size(), 1 ); assertTrue( context.findAllComponentTypes().contains( type ) ); assertEquals( context.findAllComponentsByType( ValueUtil.randomString() ).size(), 0 ); assertEquals( context.findAllComponentsByType( type ).size(), 1 ); assertTrue( context.findAllComponentsByType( type ).contains( component ) ); final Component component2 = context.component( type, id2, ValueUtil.randomString() ); assertEquals( context.findAllComponentTypes().size(), 1 ); assertTrue( context.findAllComponentTypes().contains( type ) ); assertEquals( context.findAllComponentsByType( ValueUtil.randomString() ).size(), 0 ); assertEquals( context.findAllComponentsByType( type ).size(), 2 ); assertTrue( context.findAllComponentsByType( type ).contains( component ) ); assertTrue( context.findAllComponentsByType( type ).contains( component2 ) ); assertEquals( context.findComponent( type, id1 ), component ); assertEquals( context.findComponent( type, id2 ), component2 ); assertNull( context.findComponent( type, ValueUtil.randomString() ) ); assertNull( context.findComponent( ValueUtil.randomString(), id2 ) ); } @Test public void findComponent_nativeComponentsDisabled() { ArezTestUtil.disableNativeComponents(); final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); final String id = ValueUtil.randomString(); assertInvariantFailure( () -> context.findComponent( type, id ), "Arez-0010: ArezContext.findComponent() invoked when Arez.areNativeComponentsEnabled() returns false." ); } @Test public void findAllComponentsByType_nativeComponentsDisabled() { ArezTestUtil.disableNativeComponents(); final ArezContext context = Arez.context(); final String type = ValueUtil.randomString(); assertInvariantFailure( () -> context.findAllComponentsByType( type ), "Arez-0011: ArezContext.findAllComponentsByType() invoked when Arez.areNativeComponentsEnabled() returns false." ); } @Test public void findAllComponentTypes_nativeComponentsDisabled() { ArezTestUtil.disableNativeComponents(); final ArezContext context = Arez.context(); assertInvariantFailure( context::findAllComponentTypes, "Arez-0012: ArezContext.findAllComponentTypes() invoked when Arez.areNativeComponentsEnabled() returns false." ); } @Test public void registryAccessWhenDisabled() { ArezTestUtil.disableRegistries(); final ArezContext context = Arez.context(); final ObservableValue<Object> observableValue = context.observable(); final ComputableValue<String> computableValue = context.computable( () -> "" ); final Observer observer = context.observer( AbstractArezTest::observeADependency ); final Task task = context.task( ValueUtil::randomString ); assertInvariantFailure( () -> context.registerObservableValue( observableValue ), "Arez-0022: ArezContext.registerObservableValue invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.deregisterObservableValue( observableValue ), "Arez-0024: ArezContext.deregisterObservableValue invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( context::getTopLevelObservables, "Arez-0026: ArezContext.getTopLevelObservables() invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.registerObserver( observer ), "Arez-0027: ArezContext.registerObserver invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.deregisterObserver( observer ), "Arez-0029: ArezContext.deregisterObserver invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( context::getTopLevelObservers, "Arez-0031: ArezContext.getTopLevelObservers() invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.registerComputableValue( computableValue ), "Arez-0032: ArezContext.registerComputableValue invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.deregisterComputableValue( computableValue ), "Arez-0034: ArezContext.deregisterComputableValue invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( context::getTopLevelComputableValues, "Arez-0036: ArezContext.getTopLevelComputableValues() invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.registerTask( task ), "Arez-0214: ArezContext.registerTask invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( () -> context.deregisterTask( task ), "Arez-0226: ArezContext.deregisterTask invoked when Arez.areRegistriesEnabled() returns false." ); assertInvariantFailure( context::getTopLevelTasks, "Arez-0228: ArezContext.getTopLevelTasks() invoked when Arez.areRegistriesEnabled() returns false." ); } @Test public void observableRegistry() { final ArezContext context = Arez.context(); final ObservableValue<Object> observableValue = context.observable(); assertEquals( context.getTopLevelObservables().size(), 1 ); assertEquals( context.getTopLevelObservables().get( observableValue.getName() ), observableValue ); assertInvariantFailure( () -> context.registerObservableValue( observableValue ), "Arez-0023: ArezContext.registerObservableValue invoked with observableValue named '" + observableValue.getName() + "' but an existing observableValue with that name is " + "already registered." ); assertEquals( context.getTopLevelObservables().size(), 1 ); context.getTopLevelObservables().clear(); assertEquals( context.getTopLevelObservables().size(), 0 ); assertInvariantFailure( () -> context.deregisterObservableValue( observableValue ), "Arez-0025: ArezContext.deregisterObservableValue invoked with observableValue named '" + observableValue.getName() + "' but no observableValue with that name is registered." ); } @Test public void observerRegistry() { final ArezContext context = Arez.context(); final Observer observer = context.observer( AbstractArezTest::observeADependency ); assertEquals( context.getTopLevelObservers().size(), 1 ); assertEquals( context.getTopLevelObservers().get( observer.getName() ), observer ); assertInvariantFailure( () -> context.registerObserver( observer ), "Arez-0028: ArezContext.registerObserver invoked with observer named '" + observer.getName() + "' but an existing observer with that name is " + "already registered." ); assertEquals( context.getTopLevelObservers().size(), 1 ); context.getTopLevelObservers().clear(); assertEquals( context.getTopLevelObservers().size(), 0 ); assertInvariantFailure( () -> context.deregisterObserver( observer ), "Arez-0030: ArezContext.deregisterObserver invoked with observer named '" + observer.getName() + "' but no observer with that name is registered." ); } @Test public void computableValueRegistry() { final ArezContext context = Arez.context(); final ComputableValue computableValue = context.computable( () -> "" ); assertEquals( context.getTopLevelComputableValues().size(), 1 ); assertEquals( context.getTopLevelComputableValues().get( computableValue.getName() ), computableValue ); assertInvariantFailure( () -> context.registerComputableValue( computableValue ), "Arez-0033: ArezContext.registerComputableValue invoked with ComputableValue " + "named '" + computableValue.getName() + "' but an existing ComputableValue with that " + "name is already registered." ); assertEquals( context.getTopLevelComputableValues().size(), 1 ); context.getTopLevelComputableValues().clear(); assertEquals( context.getTopLevelComputableValues().size(), 0 ); assertInvariantFailure( () -> context.deregisterComputableValue( computableValue ), "Arez-0035: ArezContext.deregisterComputableValue invoked with " + "ComputableValue named '" + computableValue.getName() + "' but no ComputableValue " + "with that name is registered." ); } @Test public void taskRegistry() { final ArezContext context = Arez.context(); final Task task = context.task( ValueUtil::randomString ); assertEquals( context.getTopLevelTasks().size(), 1 ); assertEquals( context.getTopLevelTasks().get( task.getName() ), task ); assertInvariantFailure( () -> context.registerTask( task ), "Arez-0225: ArezContext.registerTask invoked with Task named '" + task.getName() + "' but an existing Task with that name is already registered." ); assertEquals( context.getTopLevelTasks().size(), 1 ); context.getTopLevelTasks().clear(); assertEquals( context.getTopLevelTasks().size(), 0 ); assertInvariantFailure( () -> context.deregisterTask( task ), "Arez-0227: ArezContext.deregisterTask invoked with Task named '" + task.getName() + "' but no Task with that name is registered." ); } @Test public void computedValueNotPopulateOtherTopLevelRegistries() { final ArezContext context = Arez.context(); final ComputableValue computableValue = context.computable( () -> "" ); assertEquals( context.getTopLevelComputableValues().size(), 1 ); assertEquals( context.getTopLevelComputableValues().get( computableValue.getName() ), computableValue ); assertEquals( context.getTopLevelTasks().size(), 0 ); assertEquals( context.getTopLevelObservers().size(), 0 ); assertEquals( context.getTopLevelObservables().size(), 0 ); } @Test public void observersNotPopulateOtherTopLevelRegistries() { final ArezContext context = Arez.context(); final Observer observer = context.observer( ValueUtil::randomString, Flags.AREZ_OR_NO_DEPENDENCIES ); assertEquals( context.getTopLevelObservers().size(), 1 ); assertEquals( context.getTopLevelObservers().get( observer.getName() ), observer ); assertEquals( context.getTopLevelTasks().size(), 0 ); assertEquals( context.getTopLevelComputableValues().size(), 0 ); assertEquals( context.getTopLevelObservables().size(), 0 ); } @Test public void scheduleDispose() { final ArezContext context = Arez.context(); final MultiPriorityTaskQueue queue = context.getTaskQueue(); final Observer observer = Arez.context().observer( new CountAndObserveProcedure() ); assertEquals( queue.getOrderedTasks().count(), 0L ); // Pause scheduler so that the task is not invoked immediately final Disposable schedulerLock = context.pauseScheduler(); final String name = observer.getName() + ".dispose"; context.scheduleDispose( name, observer ); assertEquals( queue.getOrderedTasks().count(), 1L ); final CircularBuffer<Task> buffer = queue.getBufferByPriority( 0 ); assertEquals( buffer.size(), 1 ); final Task task = buffer.get( 0 ); assertNotNull( task ); assertEquals( task.getName(), name ); assertFalse( task.isDisposed() ); assertFalse( observer.isDisposed() ); schedulerLock.dispose(); assertTrue( task.isDisposed() ); assertTrue( observer.isDisposed() ); } @Test public void scheduleDispose_withNoNameWhenNamesEnabled() { final ArezContext context = Arez.context(); final MultiPriorityTaskQueue queue = context.getTaskQueue(); final Observer observer = Arez.context().observer( new CountAndObserveProcedure() ); assertEquals( queue.getOrderedTasks().count(), 0L ); // Pause scheduler so that the task stays in the queue context.pauseScheduler(); context.scheduleDispose( null, observer ); assertEquals( queue.getOrderedTasks().count(), 1L ); final CircularBuffer<Task> buffer = queue.getBufferByPriority( 0 ); assertEquals( buffer.size(), 1 ); final Task task = buffer.get( 0 ); assertNotNull( task ); assertEquals( task.getName(), "Dispose@3" ); } @Test public void locator() { final ArezContext context = Arez.context(); final Locator locator = context.locator(); assertNotNull( locator ); assertNull( locator.findById( String.class, "21" ) ); final TypeBasedLocator worker = new TypeBasedLocator(); worker.registerLookup( String.class, String::valueOf ); final Disposable disposable = context.registerLocator( worker ); assertEquals( locator.findById( String.class, "21" ), "21" ); disposable.dispose(); assertNull( locator.findById( String.class, "21" ) ); } @Test public void locator_referencesDisabled() { ArezTestUtil.disableReferences(); ArezTestUtil.resetState(); assertInvariantFailure( () -> Arez.context().locator(), "Arez-0192: ArezContext.locator() invoked but Arez.areReferencesEnabled() returned false." ); } @Test public void registerLocator_referencesDisabled() { ArezTestUtil.disableReferences(); ArezTestUtil.resetState(); assertInvariantFailure( () -> Arez.context().registerLocator( new TypeBasedLocator() ), "Arez-0191: ArezContext.registerLocator invoked but Arez.areReferencesEnabled() returned false." ); } @Test public void runInEnvironment() { final ArezContext context = Arez.context(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); final ObservableValue<Object> observable = context.observable(); final AtomicInteger observerCallCount = new AtomicInteger(); context.observer( () -> { observerCallCount.incrementAndGet(); observable.reportObserved(); } ); assertEquals( inEnvironmentCallCount.get(), 1 ); assertEquals( observerCallCount.get(), 1 ); } @Test public void runInEnvironment_nestedCallIgnored() throws Throwable { final ArezContext context = Arez.context(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); final ObservableValue<Object> observable = context.observable(); final AtomicInteger observer1CallCount = new AtomicInteger(); final AtomicInteger observer2CallCount = new AtomicInteger(); context.runInEnvironment( () -> { context.observer( () -> { observer1CallCount.incrementAndGet(); observable.reportObserved(); } ); context.observer( () -> { observer2CallCount.incrementAndGet(); observable.reportObserved(); } ); return null; } ); assertEquals( inEnvironmentCallCount.get(), 1 ); assertEquals( observer1CallCount.get(), 1 ); assertEquals( observer2CallCount.get(), 1 ); } @Test public void runInEnvironment_directNested() throws Throwable { final ArezContext context = Arez.context(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.runInEnvironment( () -> context.runInEnvironment( () -> context.runInEnvironment( () -> "" ) ) ); assertEquals( inEnvironmentCallCount.get(), 1 ); } @Test public void setEnvironment_whenEnvironmentsDisabled() { ArezTestUtil.disableEnvironments(); final ArezContext context = Arez.context(); assertInvariantFailure( () -> context.setEnvironment( new CountingEnvironment( new AtomicInteger() ) ), "Arez-0124: ArezContext.setEnvironment() invoked but Arez.areEnvironmentsEnabled() returned false." ); } @Test public void safeRunInEnvironment_directNested() { final ArezContext context = Arez.context(); final AtomicInteger inEnvironmentCallCount = new AtomicInteger(); context.setEnvironment( new CountingEnvironment( inEnvironmentCallCount ) ); assertEquals( inEnvironmentCallCount.get(), 0 ); context.safeRunInEnvironment( () -> context.safeRunInEnvironment( () -> context.safeRunInEnvironment( () -> "" ) ) ); assertEquals( inEnvironmentCallCount.get(), 1 ); } @Test public void runInEnvironment_noEnvironment() throws Throwable { final ArezContext context = Arez.context(); context.runInEnvironment( () -> context.runInEnvironment( () -> context.runInEnvironment( () -> "" ) ) ); } @Test public void safeRunInEnvironment_noEnvironment() { final ArezContext context = Arez.context(); context.safeRunInEnvironment( () -> context.safeRunInEnvironment( () -> context.safeRunInEnvironment( () -> "" ) ) ); } @Test public void task() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final String name = ValueUtil.randomString(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final Disposable task = context.task( name, callCount::incrementAndGet, 0 ); assertEquals( ( (Task) task ).getName(), name ); assertEquals( callCount.get(), 1 ); assertFalse( ( (Task) task ).isQueued() ); assertFalse( task.isDisposed() ); handler.assertEventCount( 2 ); handler.assertNextEvent( TaskStartEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( TaskCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertNull( e.getThrowable() ); } ); handler.reset(); // This does nothing but just to make sure task.dispose(); assertEquals( callCount.get(), 1 ); assertFalse( ( (Task) task ).isQueued() ); assertTrue( task.isDisposed() ); handler.assertEventCount( 0 ); } @Test public void task_throwsException() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final String name = ValueUtil.randomString(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final String errorMessage = "Blah Error!"; final SafeProcedure work = () -> { callCount.incrementAndGet(); throw new RuntimeException( errorMessage ); }; final Disposable task = context.task( name, work, 0 ); assertEquals( ( (Task) task ).getName(), name ); assertEquals( callCount.get(), 1 ); assertFalse( ( (Task) task ).isQueued() ); assertFalse( task.isDisposed() ); handler.assertEventCount( 2 ); handler.assertNextEvent( TaskStartEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( TaskCompleteEvent.class, e -> { assertEquals( e.getName(), name ); assertNotNull( e.getThrowable() ); assertEquals( e.getThrowable().getMessage(), errorMessage ); } ); } @Test public void task_minimalParameters() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final Disposable task = context.task( callCount::incrementAndGet ); final String name = "Task@1"; assertEquals( ( (Task) task ).getName(), name ); assertEquals( callCount.get(), 1 ); assertFalse( ( (Task) task ).isQueued() ); assertFalse( task.isDisposed() ); handler.assertEventCount( 2 ); handler.assertNextEvent( TaskStartEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( TaskCompleteEvent.class, e -> assertEquals( e.getName(), name ) ); } @Test public void task_RUN_LATER() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final TestSpyEventHandler handler = new TestSpyEventHandler(); context.getSpy().addSpyEventHandler( handler ); final Disposable task = context.task( null, callCount::incrementAndGet, Flags.RUN_LATER ); final String name = "Task@1"; assertEquals( ( (Task) task ).getName(), name ); assertEquals( callCount.get(), 0 ); assertTrue( ( (Task) task ).isQueued() ); assertFalse( task.isDisposed() ); handler.assertEventCount( 0 ); // Trigger scheduler and allow task to run context.triggerScheduler(); assertEquals( callCount.get(), 1 ); assertFalse( ( (Task) task ).isQueued() ); assertFalse( task.isDisposed() ); handler.assertEventCount( 2 ); handler.assertNextEvent( TaskStartEvent.class, e -> assertEquals( e.getName(), name ) ); handler.assertNextEvent( TaskCompleteEvent.class, e -> assertEquals( e.getName(), name ) ); } @Test public void task_different_PRIORITY() { final ArezContext context = Arez.context(); final ArrayList<String> calls = new ArrayList<>(); context.task( null, () -> calls.add( "1" ), Flags.RUN_LATER | Flags.PRIORITY_LOW ); context.task( null, () -> calls.add( "2" ), Flags.RUN_LATER | Flags.PRIORITY_HIGH ); context.task( null, () -> calls.add( "3" ), Flags.RUN_LATER ); context.task( null, () -> calls.add( "4" ), Flags.RUN_LATER | Flags.PRIORITY_HIGH ); context.task( null, () -> calls.add( "5" ), Flags.RUN_LATER | Flags.PRIORITY_HIGHEST ); context.task( null, () -> calls.add( "6" ), Flags.RUN_LATER | Flags.PRIORITY_LOWEST ); context.task( null, () -> calls.add( "7" ), Flags.RUN_LATER | Flags.PRIORITY_NORMAL ); // Trigger scheduler and allow tasks to run according to priority context.triggerScheduler(); assertEquals( String.join( ",", calls ), "5,2,4,3,7,1,6" ); } @Test public void task_bad_flags() { final ArezContext context = Arez.context(); assertInvariantFailure( () -> context.task( "MyTask", ValueUtil::randomString, Flags.REQUIRE_NEW_TRANSACTION ), "Arez-0224: Task named 'MyTask' passed invalid flags: " + Flags.REQUIRE_NEW_TRANSACTION ); } }
Make sure scheduledDisposes interaction with registry is as expected
core/src/test/java/arez/ArezContextTest.java
Make sure scheduledDisposes interaction with registry is as expected
Java
apache-2.0
145cd770df5c37a76a67faab497729bebd60a085
0
riversand963/alluxio,maboelhassan/alluxio,Alluxio/alluxio,calvinjia/tachyon,maboelhassan/alluxio,Reidddddd/alluxio,maobaolong/alluxio,bf8086/alluxio,yuluo-ding/alluxio,ChangerYoung/alluxio,apc999/alluxio,apc999/alluxio,calvinjia/tachyon,WilliamZapata/alluxio,aaudiber/alluxio,madanadit/alluxio,Alluxio/alluxio,PasaLab/tachyon,EvilMcJerkface/alluxio,maboelhassan/alluxio,WilliamZapata/alluxio,Reidddddd/alluxio,Reidddddd/mo-alluxio,apc999/alluxio,uronce-cc/alluxio,maobaolong/alluxio,maboelhassan/alluxio,wwjiang007/alluxio,bf8086/alluxio,jswudi/alluxio,ChangerYoung/alluxio,jsimsa/alluxio,riversand963/alluxio,madanadit/alluxio,EvilMcJerkface/alluxio,EvilMcJerkface/alluxio,apc999/alluxio,maobaolong/alluxio,EvilMcJerkface/alluxio,Reidddddd/alluxio,ShailShah/alluxio,Reidddddd/alluxio,Alluxio/alluxio,wwjiang007/alluxio,aaudiber/alluxio,wwjiang007/alluxio,maboelhassan/alluxio,uronce-cc/alluxio,jsimsa/alluxio,yuluo-ding/alluxio,ChangerYoung/alluxio,madanadit/alluxio,riversand963/alluxio,jswudi/alluxio,Alluxio/alluxio,EvilMcJerkface/alluxio,maobaolong/alluxio,PasaLab/tachyon,PasaLab/tachyon,uronce-cc/alluxio,madanadit/alluxio,Alluxio/alluxio,Reidddddd/alluxio,jswudi/alluxio,EvilMcJerkface/alluxio,wwjiang007/alluxio,bf8086/alluxio,yuluo-ding/alluxio,aaudiber/alluxio,ShailShah/alluxio,apc999/alluxio,jswudi/alluxio,wwjiang007/alluxio,PasaLab/tachyon,madanadit/alluxio,WilliamZapata/alluxio,apc999/alluxio,maobaolong/alluxio,WilliamZapata/alluxio,wwjiang007/alluxio,bf8086/alluxio,wwjiang007/alluxio,riversand963/alluxio,ShailShah/alluxio,calvinjia/tachyon,calvinjia/tachyon,madanadit/alluxio,yuluo-ding/alluxio,calvinjia/tachyon,maboelhassan/alluxio,bf8086/alluxio,EvilMcJerkface/alluxio,Alluxio/alluxio,maobaolong/alluxio,Reidddddd/mo-alluxio,PasaLab/tachyon,aaudiber/alluxio,maobaolong/alluxio,maobaolong/alluxio,calvinjia/tachyon,Alluxio/alluxio,Reidddddd/alluxio,Alluxio/alluxio,ShailShah/alluxio,madanadit/alluxio,PasaLab/tachyon,Alluxio/alluxio,calvinjia/tachyon,uronce-cc/alluxio,riversand963/alluxio,Alluxio/alluxio,Reidddddd/mo-alluxio,wwjiang007/alluxio,Reidddddd/alluxio,maobaolong/alluxio,jsimsa/alluxio,jsimsa/alluxio,bf8086/alluxio,uronce-cc/alluxio,jsimsa/alluxio,WilliamZapata/alluxio,ShailShah/alluxio,ChangerYoung/alluxio,bf8086/alluxio,bf8086/alluxio,maobaolong/alluxio,jswudi/alluxio,uronce-cc/alluxio,WilliamZapata/alluxio,wwjiang007/alluxio,PasaLab/tachyon,aaudiber/alluxio,yuluo-ding/alluxio,apc999/alluxio,EvilMcJerkface/alluxio,wwjiang007/alluxio,ChangerYoung/alluxio,jsimsa/alluxio,Reidddddd/mo-alluxio,Reidddddd/mo-alluxio,Reidddddd/mo-alluxio,yuluo-ding/alluxio,aaudiber/alluxio,maboelhassan/alluxio,ChangerYoung/alluxio,jswudi/alluxio,riversand963/alluxio,aaudiber/alluxio,madanadit/alluxio,calvinjia/tachyon,ShailShah/alluxio
/* * Licensed to the University of California, Berkeley under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package tachyon; /** * Different storage level alias for StorageTier. */ public enum StorageLevelAlias { /** * Memory Layer */ MEM(1), /** * SSD Layer */ SSD(2), /** * HDD Layer */ HDD(3); private int mValue; private StorageLevelAlias(int value) { mValue = value; } /** * Gets value of the storage level alias * * @return value of the storage level alias */ public int getValue() { return mValue; } /** * Gets StorageLevelAlias from a given value. * * @param value the value of the storage level alias * @return the StorageLevelAlias */ public static StorageLevelAlias getAlias(int value) { if (value > SIZE || value < 1) { throw new IllegalArgumentException("non existing storage level"); } return StorageLevelAlias.values()[value - 1]; } public static final int SIZE = StorageLevelAlias.values().length; }
servers/src/main/java/tachyon/StorageLevelAlias.java
/* * Licensed to the University of California, Berkeley under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package tachyon; /** * Different storage level alias for StorageTier. */ public enum StorageLevelAlias { /** * Memory Layer */ MEM(1), /** * SSD Layer */ SSD(2), /** * HDD Layer */ HDD(3); private int mValue; private StorageLevelAlias(int value) { mValue = value; } /** * Gets value of the storage level alias * * @return value of the storage level alias */ public int getValue() { return mValue; } /** * Gets StorageLevelAlias from a given value. * * @param value the value of the storage level alias * @return the StorageLevelAlias */ public static StorageLevelAlias getAlias(int value) { if (value > SIZE || value < 1) { throw new IllegalArgumentException("non existing storage level"); } return StorageLevelAlias.values()[value - 1]; } public static final int SIZE = StorageLevelAlias.values().length; }
Fix formatting in StorageLevelAlias
servers/src/main/java/tachyon/StorageLevelAlias.java
Fix formatting in StorageLevelAlias
Java
apache-2.0
10265b61cf860dc6493388c1cf4d6a53edb128c1
0
dimbleby/JGroups,rpelisse/JGroups,pferraro/JGroups,dimbleby/JGroups,belaban/JGroups,deepnarsay/JGroups,kedzie/JGroups,Sanne/JGroups,TarantulaTechnology/JGroups,Sanne/JGroups,kedzie/JGroups,ligzy/JGroups,vjuranek/JGroups,TarantulaTechnology/JGroups,tristantarrant/JGroups,rhusar/JGroups,pruivo/JGroups,danberindei/JGroups,kedzie/JGroups,slaskawi/JGroups,rhusar/JGroups,vjuranek/JGroups,vjuranek/JGroups,danberindei/JGroups,pruivo/JGroups,belaban/JGroups,deepnarsay/JGroups,dimbleby/JGroups,pferraro/JGroups,slaskawi/JGroups,ibrahimshbat/JGroups,rvansa/JGroups,danberindei/JGroups,ibrahimshbat/JGroups,ligzy/JGroups,TarantulaTechnology/JGroups,belaban/JGroups,rpelisse/JGroups,tristantarrant/JGroups,rhusar/JGroups,pruivo/JGroups,ibrahimshbat/JGroups,deepnarsay/JGroups,slaskawi/JGroups,pferraro/JGroups,rvansa/JGroups,rpelisse/JGroups,ligzy/JGroups,Sanne/JGroups,ibrahimshbat/JGroups
// $Id: DistributedHashtable.java,v 1.30 2007/04/19 18:45:06 vlada Exp $ package org.jgroups.blocks; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jgroups.*; import org.jgroups.persistence.CannotPersistException; import org.jgroups.persistence.CannotRemoveException; import org.jgroups.persistence.PersistenceFactory; import org.jgroups.persistence.PersistenceManager; import org.jgroups.util.Promise; import org.jgroups.util.Util; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.Serializable; import java.util.*; /** * Provides the abstraction of a java.util.Hashtable that is replicated at several * locations. Any change to the hashtable (clear, put, remove etc) will transparently be * propagated to all replicas in the group. All read-only methods will always access the * local replica.<p> * Both keys and values added to the hashtable <em>must be serializable</em>, the reason * being that they will be sent across the network to all replicas of the group. Having said * this, it is now for example possible to add RMI remote objects to the hashtable as they * are derived from <code>java.rmi.server.RemoteObject</code> which in turn is serializable. * This allows to lookup shared distributed objects by their name and invoke methods on them, * regardless of one's onw location. A <code>DistributedHashtable</code> thus allows to * implement a distributed naming service in just a couple of lines.<p> * An instance of this class will contact an existing member of the group to fetch its * initial state (using the state exchange funclet <code>StateExchangeFunclet</code>. * @author Bela Ban * @author <a href="mailto:aolias@yahoo.com">Alfonso Olias-Sanz</a> * @version $Id: DistributedHashtable.java,v 1.30 2007/04/19 18:45:06 vlada Exp $ */ public class DistributedHashtable extends Hashtable implements ExtendedMessageListener, ExtendedMembershipListener { public interface Notification { void entrySet(Object key, Object value); void entryRemoved(Object key); void viewChange(Vector new_mbrs, Vector old_mbrs); void contentsSet(Map new_entries); void contentsCleared(); } private transient Channel channel; protected transient RpcDispatcher disp=null; private transient String groupname=null; private final transient Vector notifs=new Vector(); // to be notified when mbrship changes private final transient Vector members=new Vector(); // keeps track of all DHTs private transient Class[] put_signature=null; private transient Class[] putAll_signature=null; private transient Class[] clear_signature=null; private transient Class[] remove_signature=null; private transient boolean persistent=false; // whether to use PersistenceManager to save state private transient PersistenceManager persistence_mgr=null; /** Determines when the updates have to be sent across the network, avoids sending unnecessary * messages when there are no member in the group */ private transient boolean send_message = false; protected final transient Promise state_promise=new Promise(); protected final Log log=LogFactory.getLog(this.getClass()); /** * Creates a DistributedHashtable * @param groupname The name of the group to join * @param factory The ChannelFactory which will be used to create a channel * @param properties The property string to be used to define the channel. This will override the properties of * the factory. If null, then the factory properties will be used * @param state_timeout The time to wait until state is retrieved in milliseconds. A value of 0 means wait forever. */ public DistributedHashtable(String groupname, ChannelFactory factory, String properties, long state_timeout) throws ChannelException { this.groupname=groupname; initSignatures(); if(factory != null) { channel=properties != null? factory.createChannel(properties) : factory.createChannel(); } else { channel=new JChannel(properties); } disp=new RpcDispatcher(channel, this, this, this); channel.connect(groupname); start(state_timeout); } /** * Creates a DisttributedHashtable. Optionally the contents can be saved to * persistemt storage using the {@link PersistenceManager}. * @param groupname Name of the group to join * @param factory Instance of a ChannelFactory to create the channel * @param properties Protocol stack properties. This will override the properties of the factory. If * null, then the factory properties will be used * @param persistent Whether the contents should be persisted * @param state_timeout Max number of milliseconds to wait until state is * retrieved */ public DistributedHashtable(String groupname, ChannelFactory factory, String properties, boolean persistent, long state_timeout) throws ChannelException { this.groupname=groupname; this.persistent=persistent; initSignatures(); if(factory != null) { channel=properties != null? factory.createChannel(properties) : factory.createChannel(); } else { channel=new JChannel(properties); } disp=new RpcDispatcher(channel, this, this, this); channel.connect(groupname); start(state_timeout); } public DistributedHashtable(Channel channel, long state_timeout) { this(channel, false, state_timeout); } public DistributedHashtable(Channel channel, boolean persistent, long state_timeout) { this.groupname = channel.getClusterName(); this.channel = channel; this.persistent=persistent; init(state_timeout); } /** * Uses a user-provided PullPushAdapter to create the dispatcher rather than a Channel. If id is non-null, it will be * used to register under that id. This is typically used when another building block is already using * PullPushAdapter, and we want to add this building block in addition. The id is the used to discriminate * between messages for the various blocks on top of PullPushAdapter. If null, we will assume we are the * first block created on PullPushAdapter. * @param adapter The PullPushAdapter which to use as underlying transport * @param id A serializable object (e.g. an Integer) used to discriminate (multiplex/demultiplex) between * requests/responses for different building blocks on top of PullPushAdapter. * @param state_timeout Max number of milliseconds to wait until state is * retrieved */ public DistributedHashtable(PullPushAdapter adapter, Serializable id, long state_timeout) throws ChannelNotConnectedException, ChannelClosedException { initSignatures(); this.channel = (Channel)adapter.getTransport(); this.groupname = this.channel.getClusterName(); disp=new RpcDispatcher(adapter, id, this, this, this); start(state_timeout); } public DistributedHashtable(PullPushAdapter adapter, Serializable id) { initSignatures(); this.channel = (Channel)adapter.getTransport(); this.groupname = this.channel.getClusterName(); disp=new RpcDispatcher(adapter, id, this, this, this); } protected final void init(long state_timeout) { initSignatures(); disp = new RpcDispatcher(channel, this, this, this); // Changed by bela (jan 20 2003): start() has to be called by user (only when providing // own channel). First, Channel.connect() has to be called, then start(). // start(state_timeout); } /** * Fetches the state * @param state_timeout * @throws ChannelClosedException * @throws ChannelNotConnectedException */ public final void start(long state_timeout) throws ChannelClosedException, ChannelNotConnectedException { boolean rc; if(persistent) { if(log.isInfoEnabled()) log.info("fetching state from database"); try { persistence_mgr=PersistenceFactory.getInstance().createManager(); } catch(Throwable ex) { if(log.isErrorEnabled()) log.error("failed creating PersistenceManager, " + "turning persistency off. Exception: " + Util.printStackTrace(ex)); persistent=false; } } state_promise.reset(); rc=channel.getState(null, state_timeout); if(rc) { if(log.isInfoEnabled()) log.info("state was retrieved successfully, waiting for setState()"); Boolean result=(Boolean)state_promise.getResult(state_timeout); if(result == null) { if(log.isErrorEnabled()) log.error("setState() never got called"); } else { if(log.isInfoEnabled()) log.info("setState() was called"); } } else { if(log.isInfoEnabled()) log.info("state could not be retrieved (first member)"); if(persistent) { if(log.isInfoEnabled()) log.info("fetching state from database"); try { Map m=persistence_mgr.retrieveAll(); if(m != null) { Map.Entry entry; Object key, val; for(Iterator it=m.entrySet().iterator(); it.hasNext();) { entry=(Map.Entry)it.next(); key=entry.getKey(); val=entry.getValue(); if(log.isInfoEnabled()) log.info("inserting " + key + " --> " + val); put(key, val); // will replicate key and value } } } catch(Throwable ex) { if(log.isErrorEnabled()) log.error("failed creating PersistenceManager, " + "turning persistency off. Exception: " + Util.printStackTrace(ex)); persistent=false; } } } } public Address getLocalAddress() {return channel != null ? channel.getLocalAddress() : null;} public String getGroupName() {return groupname;} public Channel getChannel() {return channel;} public boolean getPersistent() {return persistent;} public void setPersistent(boolean p) {persistent=p;} public void setDeadlockDetection(boolean flag) { if(disp != null) disp.setDeadlockDetection(flag); } public void addNotifier(Notification n) { if(!notifs.contains(n)) notifs.addElement(n); } public void removeNotifier(Notification n) { if(notifs.contains(n)) notifs.removeElement(n); } public void stop() { if(disp != null) { disp.stop(); disp=null; } if(channel != null) { channel.close(); channel=null; } } /** * Maps the specified key to the specified value in the hashtable. Neither of both parameters can be null * @param key - the hashtable key * @param value - the value * @return the previous value of the specified key in this hashtable, or null if it did not have one */ public Object put(Object key, Object value) { Object prev_val=get(key); //Changes done by <aos> //if true, propagate action to the group if(send_message == true) { try { disp.callRemoteMethods( null, "_put", new Object[]{key,value}, put_signature, GroupRequest.GET_ALL, 0); } catch(Exception e) { //return null; } } else { _put(key, value); //don't have to do prev_val = super.put(..) as is done at the beginning } return prev_val; } /** * Copies all of the mappings from the specified Map to this Hashtable These mappings will replace any mappings that this Hashtable had for any of the keys currently in the specified Map. * @param m - Mappings to be stored in this map */ public void putAll(Map m) { //Changes done by <aos> //if true, propagate action to the group if(send_message == true) { try { disp.callRemoteMethods( null, "_putAll", new Object[]{m}, putAll_signature, GroupRequest.GET_ALL, 0); } catch(Throwable t) { } } else { _putAll(m); } } /** * Clears this hashtable so that it contains no keys */ public void clear() { //Changes done by <aos> //if true, propagate action to the group if(send_message == true) { try { disp.callRemoteMethods( null, "_clear", null, clear_signature, GroupRequest.GET_ALL, 0); } catch(Exception e) { if(log.isErrorEnabled()) log.error("exception=" + e); } } else { _clear(); } } /** * Removes the key (and its corresponding value) from the Hashtable. * @param key - the key to be removed. * @return the value to which the key had been mapped in this hashtable, or null if the key did not have a mapping. */ public Object remove(Object key) { Object retval = get(key); //Changes done by <aos> //if true, propagate action to the group if(send_message == true) { try { disp.callRemoteMethods( null, "_remove", new Object[]{key}, remove_signature, GroupRequest.GET_ALL, 0); //return retval; } catch(Exception e) { //return null; } } else { _remove(key); //don't have to do retval = super.remove(..) as is done at the beginning } return retval; } /*------------------------ Callbacks -----------------------*/ public Object _put(Object key, Object value) { Object retval=super.put(key, value); if(persistent) { try { persistence_mgr.save((Serializable)key, (Serializable)value); } catch(CannotPersistException cannot_persist_ex) { if(log.isErrorEnabled()) log.error("failed persisting " + key + " + " + value + ", exception=" + cannot_persist_ex); } catch(Throwable t) { if(log.isErrorEnabled()) log.error("failed persisting " + key + " + " + value + ", exception=" + Util.printStackTrace(t)); } } for(int i=0; i < notifs.size(); i++) ((Notification)notifs.elementAt(i)).entrySet(key, value); return retval; } /** * @see java.util.Map#putAll(java.util.Map) */ public void _putAll(Map m) { if (m == null) return; // Calling the method below seems okay, but would result in ... deadlock ! // The reason is that Map.putAll() calls put(), which we override, which results in // lock contention for the map. // ---> super.putAll(m); <--- CULPRIT !!!@#$%$ // That said let's do it the stupid way: Map.Entry entry; for(Iterator it=m.entrySet().iterator(); it.hasNext();) { entry=(Map.Entry)it.next(); super.put(entry.getKey(), entry.getValue()); } if (persistent) { try { persistence_mgr.saveAll(m); } catch (CannotPersistException persist_ex) { if(log.isErrorEnabled()) log.error("failed persisting contents: " + persist_ex); } catch (Throwable t) { if(log.isErrorEnabled()) log.error("failed persisting contents: " + t); } } for(int i=0; i < notifs.size(); i++) ((Notification)notifs.elementAt(i)).contentsSet(m); } public void _clear() { super.clear(); if(persistent) { try { persistence_mgr.clear(); } catch(CannotRemoveException cannot_remove_ex) { if(log.isErrorEnabled()) log.error("failed clearing contents, exception=" + cannot_remove_ex); } catch(Throwable t) { if(log.isErrorEnabled()) log.error("failed clearing contents, exception=" + t); } } for(int i=0; i < notifs.size(); i++) ((Notification)notifs.elementAt(i)).contentsCleared(); } public Object _remove(Object key) { Object retval=super.remove(key); if(persistent) { try { persistence_mgr.remove((Serializable)key); } catch(CannotRemoveException cannot_remove_ex) { if(log.isErrorEnabled()) log.error("failed clearing contents, exception=" + cannot_remove_ex); } catch(Throwable t) { if(log.isErrorEnabled()) log.error("failed clearing contents, exception=" + t); } } for(int i=0; i < notifs.size(); i++) ((Notification)notifs.elementAt(i)).entryRemoved(key); return retval; } /*----------------------------------------------------------*/ /*-------------------- State Exchange ----------------------*/ public void receive(Message msg) { } public byte[] getState() { Object key, val; Hashtable copy=new Hashtable(); for(Enumeration e=keys(); e.hasMoreElements();) { key=e.nextElement(); val=get(key); copy.put(key, val); } try { return Util.objectToByteBuffer(copy); } catch(Throwable ex) { if(log.isErrorEnabled()) log.error("exception marshalling state: " + ex); return null; } } public void setState(byte[] new_state) { Hashtable new_copy; try { new_copy=(Hashtable)Util.objectFromByteBuffer(new_state); if(new_copy == null) return; } catch(Throwable ex) { if(log.isErrorEnabled()) log.error("exception unmarshalling state: " + ex); return; } _putAll(new_copy); state_promise.setResult(Boolean.TRUE); } /*------------------- Membership Changes ----------------------*/ public void viewAccepted(View new_view) { Vector new_mbrs=new_view.getMembers(); if(new_mbrs != null) { sendViewChangeNotifications(new_mbrs, members); // notifies observers (joined, left) members.removeAllElements(); for(int i=0; i < new_mbrs.size(); i++) members.addElement(new_mbrs.elementAt(i)); } //if size is bigger than one, there are more peers in the group //otherwise there is only one server. send_message=members.size() > 1; } /** Called when a member is suspected */ public void suspect(Address suspected_mbr) { ; } /** Block sending and receiving of messages until ViewAccepted is called */ public void block() {} void sendViewChangeNotifications(Vector new_mbrs, Vector old_mbrs) { Vector joined, left; Object mbr; Notification n; if(notifs.size() == 0 || old_mbrs == null || new_mbrs == null || old_mbrs.size() == 0 || new_mbrs.size() == 0) return; // 1. Compute set of members that joined: all that are in new_mbrs, but not in old_mbrs joined=new Vector(); for(int i=0; i < new_mbrs.size(); i++) { mbr=new_mbrs.elementAt(i); if(!old_mbrs.contains(mbr)) joined.addElement(mbr); } // 2. Compute set of members that left: all that were in old_mbrs, but not in new_mbrs left=new Vector(); for(int i=0; i < old_mbrs.size(); i++) { mbr=old_mbrs.elementAt(i); if(!new_mbrs.contains(mbr)) { left.addElement(mbr); } } for(int i=0; i < notifs.size(); i++) { n=(Notification)notifs.elementAt(i); n.viewChange(joined, left); } } final void initSignatures() { try { if(put_signature == null) { put_signature=new Class[] {Object.class,Object.class}; } if(putAll_signature == null) { putAll_signature=new Class[] {Map.class}; } if(clear_signature == null) clear_signature=new Class[0]; if(remove_signature == null) { remove_signature=new Class[] {Object.class}; } } catch(Throwable ex) { if(log.isErrorEnabled()) log.error("exception=" + ex); } } public static void main(String[] args) { try { // The setup here is kind of weird: // 1. Create a channel // 2. Create a DistributedHashtable (on the channel) // 3. Connect the channel (so the HT gets a VIEW_CHANGE) // 4. Start the HT // // A simpler setup is // DistributedHashtable ht = new DistributedHashtable("demo", null, // "file://c:/JGroups-2.0/conf/state_transfer.xml", 5000); JChannel c = new JChannel("file:/c:/JGroups-2.0/conf/state_transfer.xml"); DistributedHashtable ht = new DistributedHashtable(c, false, 5000); c.connect("demo"); ht.start(5000); ht.put("name", "Michelle Ban"); Object old_key = ht.remove("name"); System.out.println("old key was " + old_key); ht.put("newkey", "newvalue"); Map m = new HashMap(); m.put("k1", "v1"); m.put("k2", "v2"); ht.putAll(m); System.out.println("hashmap is " + ht); } catch (Throwable t) { t.printStackTrace(); } } public byte[] getState(String state_id) { // not implemented return null; } public void getState(OutputStream ostream) { Object key, val; Hashtable copy=new Hashtable(); ObjectOutputStream oos = null; for(Enumeration e=keys(); e.hasMoreElements();) { key=e.nextElement(); val=get(key); copy.put(key, val); } try { oos = new ObjectOutputStream(ostream); oos.writeObject(copy); } catch(Throwable ex) { if(log.isErrorEnabled()) log.error("exception marshalling state: " + ex); } finally{ Util.close(oos); } } public void getState(String state_id, OutputStream ostream) { } public void setState(String state_id, byte[] state) { } public void setState(InputStream istream) { Hashtable new_copy = null; ObjectInputStream ois = null; try{ ois = new ObjectInputStream(istream); new_copy = (Hashtable) ois.readObject(); ois.close(); }catch(Throwable e){ e.printStackTrace(); if(log.isErrorEnabled()) log.error("exception marshalling state: " + e); }finally{ Util.close(ois); } if(new_copy != null) _putAll(new_copy); state_promise.setResult(Boolean.TRUE); } public void setState(String state_id, InputStream istream) { } public void unblock() { } }
src/org/jgroups/blocks/DistributedHashtable.java
// $Id: DistributedHashtable.java,v 1.29 2007/03/15 15:35:12 vlada Exp $ package org.jgroups.blocks; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jgroups.*; import org.jgroups.persistence.CannotPersistException; import org.jgroups.persistence.CannotRemoveException; import org.jgroups.persistence.PersistenceFactory; import org.jgroups.persistence.PersistenceManager; import org.jgroups.util.Promise; import org.jgroups.util.Util; import java.io.Serializable; import java.util.*; /** * Provides the abstraction of a java.util.Hashtable that is replicated at several * locations. Any change to the hashtable (clear, put, remove etc) will transparently be * propagated to all replicas in the group. All read-only methods will always access the * local replica.<p> * Both keys and values added to the hashtable <em>must be serializable</em>, the reason * being that they will be sent across the network to all replicas of the group. Having said * this, it is now for example possible to add RMI remote objects to the hashtable as they * are derived from <code>java.rmi.server.RemoteObject</code> which in turn is serializable. * This allows to lookup shared distributed objects by their name and invoke methods on them, * regardless of one's onw location. A <code>DistributedHashtable</code> thus allows to * implement a distributed naming service in just a couple of lines.<p> * An instance of this class will contact an existing member of the group to fetch its * initial state (using the state exchange funclet <code>StateExchangeFunclet</code>. * @author Bela Ban * @author <a href="mailto:aolias@yahoo.com">Alfonso Olias-Sanz</a> * @version $Id: DistributedHashtable.java,v 1.29 2007/03/15 15:35:12 vlada Exp $ */ public class DistributedHashtable extends Hashtable implements MessageListener, MembershipListener { public interface Notification { void entrySet(Object key, Object value); void entryRemoved(Object key); void viewChange(Vector new_mbrs, Vector old_mbrs); void contentsSet(Map new_entries); void contentsCleared(); } private transient Channel channel; protected transient RpcDispatcher disp=null; private transient String groupname=null; private final transient Vector notifs=new Vector(); // to be notified when mbrship changes private final transient Vector members=new Vector(); // keeps track of all DHTs private transient Class[] put_signature=null; private transient Class[] putAll_signature=null; private transient Class[] clear_signature=null; private transient Class[] remove_signature=null; private transient boolean persistent=false; // whether to use PersistenceManager to save state private transient PersistenceManager persistence_mgr=null; /** Determines when the updates have to be sent across the network, avoids sending unnecessary * messages when there are no member in the group */ private transient boolean send_message = false; protected final transient Promise state_promise=new Promise(); protected final Log log=LogFactory.getLog(this.getClass()); /** * Creates a DistributedHashtable * @param groupname The name of the group to join * @param factory The ChannelFactory which will be used to create a channel * @param properties The property string to be used to define the channel. This will override the properties of * the factory. If null, then the factory properties will be used * @param state_timeout The time to wait until state is retrieved in milliseconds. A value of 0 means wait forever. */ public DistributedHashtable(String groupname, ChannelFactory factory, String properties, long state_timeout) throws ChannelException { this.groupname=groupname; initSignatures(); if(factory != null) { channel=properties != null? factory.createChannel(properties) : factory.createChannel(); } else { channel=new JChannel(properties); } disp=new RpcDispatcher(channel, this, this, this); channel.connect(groupname); start(state_timeout); } /** * Creates a DisttributedHashtable. Optionally the contents can be saved to * persistemt storage using the {@link PersistenceManager}. * @param groupname Name of the group to join * @param factory Instance of a ChannelFactory to create the channel * @param properties Protocol stack properties. This will override the properties of the factory. If * null, then the factory properties will be used * @param persistent Whether the contents should be persisted * @param state_timeout Max number of milliseconds to wait until state is * retrieved */ public DistributedHashtable(String groupname, ChannelFactory factory, String properties, boolean persistent, long state_timeout) throws ChannelException { this.groupname=groupname; this.persistent=persistent; initSignatures(); if(factory != null) { channel=properties != null? factory.createChannel(properties) : factory.createChannel(); } else { channel=new JChannel(properties); } disp=new RpcDispatcher(channel, this, this, this); channel.connect(groupname); start(state_timeout); } public DistributedHashtable(Channel channel, long state_timeout) { this(channel, false, state_timeout); } public DistributedHashtable(Channel channel, boolean persistent, long state_timeout) { this.groupname = channel.getClusterName(); this.channel = channel; this.persistent=persistent; init(state_timeout); } /** * Uses a user-provided PullPushAdapter to create the dispatcher rather than a Channel. If id is non-null, it will be * used to register under that id. This is typically used when another building block is already using * PullPushAdapter, and we want to add this building block in addition. The id is the used to discriminate * between messages for the various blocks on top of PullPushAdapter. If null, we will assume we are the * first block created on PullPushAdapter. * @param adapter The PullPushAdapter which to use as underlying transport * @param id A serializable object (e.g. an Integer) used to discriminate (multiplex/demultiplex) between * requests/responses for different building blocks on top of PullPushAdapter. * @param state_timeout Max number of milliseconds to wait until state is * retrieved */ public DistributedHashtable(PullPushAdapter adapter, Serializable id, long state_timeout) throws ChannelNotConnectedException, ChannelClosedException { initSignatures(); this.channel = (Channel)adapter.getTransport(); this.groupname = this.channel.getClusterName(); disp=new RpcDispatcher(adapter, id, this, this, this); start(state_timeout); } public DistributedHashtable(PullPushAdapter adapter, Serializable id) { initSignatures(); this.channel = (Channel)adapter.getTransport(); this.groupname = this.channel.getClusterName(); disp=new RpcDispatcher(adapter, id, this, this, this); } protected final void init(long state_timeout) { initSignatures(); disp = new RpcDispatcher(channel, this, this, this); // Changed by bela (jan 20 2003): start() has to be called by user (only when providing // own channel). First, Channel.connect() has to be called, then start(). // start(state_timeout); } /** * Fetches the state * @param state_timeout * @throws ChannelClosedException * @throws ChannelNotConnectedException */ public final void start(long state_timeout) throws ChannelClosedException, ChannelNotConnectedException { boolean rc; if(persistent) { if(log.isInfoEnabled()) log.info("fetching state from database"); try { persistence_mgr=PersistenceFactory.getInstance().createManager(); } catch(Throwable ex) { if(log.isErrorEnabled()) log.error("failed creating PersistenceManager, " + "turning persistency off. Exception: " + Util.printStackTrace(ex)); persistent=false; } } state_promise.reset(); rc=channel.getState(null, state_timeout); if(rc) { if(log.isInfoEnabled()) log.info("state was retrieved successfully, waiting for setState()"); Boolean result=(Boolean)state_promise.getResult(state_timeout); if(result == null) { if(log.isErrorEnabled()) log.error("setState() never got called"); } else { if(log.isInfoEnabled()) log.info("setState() was called"); } } else { if(log.isInfoEnabled()) log.info("state could not be retrieved (first member)"); if(persistent) { if(log.isInfoEnabled()) log.info("fetching state from database"); try { Map m=persistence_mgr.retrieveAll(); if(m != null) { Map.Entry entry; Object key, val; for(Iterator it=m.entrySet().iterator(); it.hasNext();) { entry=(Map.Entry)it.next(); key=entry.getKey(); val=entry.getValue(); if(log.isInfoEnabled()) log.info("inserting " + key + " --> " + val); put(key, val); // will replicate key and value } } } catch(Throwable ex) { if(log.isErrorEnabled()) log.error("failed creating PersistenceManager, " + "turning persistency off. Exception: " + Util.printStackTrace(ex)); persistent=false; } } } } public Address getLocalAddress() {return channel != null ? channel.getLocalAddress() : null;} public String getGroupName() {return groupname;} public Channel getChannel() {return channel;} public boolean getPersistent() {return persistent;} public void setPersistent(boolean p) {persistent=p;} public void setDeadlockDetection(boolean flag) { if(disp != null) disp.setDeadlockDetection(flag); } public void addNotifier(Notification n) { if(!notifs.contains(n)) notifs.addElement(n); } public void removeNotifier(Notification n) { if(notifs.contains(n)) notifs.removeElement(n); } public void stop() { if(disp != null) { disp.stop(); disp=null; } if(channel != null) { channel.close(); channel=null; } } /** * Maps the specified key to the specified value in the hashtable. Neither of both parameters can be null * @param key - the hashtable key * @param value - the value * @return the previous value of the specified key in this hashtable, or null if it did not have one */ public Object put(Object key, Object value) { Object prev_val=get(key); //Changes done by <aos> //if true, propagate action to the group if(send_message == true) { try { disp.callRemoteMethods( null, "_put", new Object[]{key,value}, put_signature, GroupRequest.GET_ALL, 0); } catch(Exception e) { //return null; } } else { _put(key, value); //don't have to do prev_val = super.put(..) as is done at the beginning } return prev_val; } /** * Copies all of the mappings from the specified Map to this Hashtable These mappings will replace any mappings that this Hashtable had for any of the keys currently in the specified Map. * @param m - Mappings to be stored in this map */ public void putAll(Map m) { //Changes done by <aos> //if true, propagate action to the group if(send_message == true) { try { disp.callRemoteMethods( null, "_putAll", new Object[]{m}, putAll_signature, GroupRequest.GET_ALL, 0); } catch(Throwable t) { } } else { _putAll(m); } } /** * Clears this hashtable so that it contains no keys */ public void clear() { //Changes done by <aos> //if true, propagate action to the group if(send_message == true) { try { disp.callRemoteMethods( null, "_clear", null, clear_signature, GroupRequest.GET_ALL, 0); } catch(Exception e) { if(log.isErrorEnabled()) log.error("exception=" + e); } } else { _clear(); } } /** * Removes the key (and its corresponding value) from the Hashtable. * @param key - the key to be removed. * @return the value to which the key had been mapped in this hashtable, or null if the key did not have a mapping. */ public Object remove(Object key) { Object retval = get(key); //Changes done by <aos> //if true, propagate action to the group if(send_message == true) { try { disp.callRemoteMethods( null, "_remove", new Object[]{key}, remove_signature, GroupRequest.GET_ALL, 0); //return retval; } catch(Exception e) { //return null; } } else { _remove(key); //don't have to do retval = super.remove(..) as is done at the beginning } return retval; } /*------------------------ Callbacks -----------------------*/ public Object _put(Object key, Object value) { Object retval=super.put(key, value); if(persistent) { try { persistence_mgr.save((Serializable)key, (Serializable)value); } catch(CannotPersistException cannot_persist_ex) { if(log.isErrorEnabled()) log.error("failed persisting " + key + " + " + value + ", exception=" + cannot_persist_ex); } catch(Throwable t) { if(log.isErrorEnabled()) log.error("failed persisting " + key + " + " + value + ", exception=" + Util.printStackTrace(t)); } } for(int i=0; i < notifs.size(); i++) ((Notification)notifs.elementAt(i)).entrySet(key, value); return retval; } /** * @see java.util.Map#putAll(java.util.Map) */ public void _putAll(Map m) { if (m == null) return; // Calling the method below seems okay, but would result in ... deadlock ! // The reason is that Map.putAll() calls put(), which we override, which results in // lock contention for the map. // ---> super.putAll(m); <--- CULPRIT !!!@#$%$ // That said let's do it the stupid way: Map.Entry entry; for(Iterator it=m.entrySet().iterator(); it.hasNext();) { entry=(Map.Entry)it.next(); super.put(entry.getKey(), entry.getValue()); } if (persistent) { try { persistence_mgr.saveAll(m); } catch (CannotPersistException persist_ex) { if(log.isErrorEnabled()) log.error("failed persisting contents: " + persist_ex); } catch (Throwable t) { if(log.isErrorEnabled()) log.error("failed persisting contents: " + t); } } for(int i=0; i < notifs.size(); i++) ((Notification)notifs.elementAt(i)).contentsSet(m); } public void _clear() { super.clear(); if(persistent) { try { persistence_mgr.clear(); } catch(CannotRemoveException cannot_remove_ex) { if(log.isErrorEnabled()) log.error("failed clearing contents, exception=" + cannot_remove_ex); } catch(Throwable t) { if(log.isErrorEnabled()) log.error("failed clearing contents, exception=" + t); } } for(int i=0; i < notifs.size(); i++) ((Notification)notifs.elementAt(i)).contentsCleared(); } public Object _remove(Object key) { Object retval=super.remove(key); if(persistent) { try { persistence_mgr.remove((Serializable)key); } catch(CannotRemoveException cannot_remove_ex) { if(log.isErrorEnabled()) log.error("failed clearing contents, exception=" + cannot_remove_ex); } catch(Throwable t) { if(log.isErrorEnabled()) log.error("failed clearing contents, exception=" + t); } } for(int i=0; i < notifs.size(); i++) ((Notification)notifs.elementAt(i)).entryRemoved(key); return retval; } /*----------------------------------------------------------*/ /*-------------------- State Exchange ----------------------*/ public void receive(Message msg) { } public byte[] getState() { Object key, val; Hashtable copy=new Hashtable(); for(Enumeration e=keys(); e.hasMoreElements();) { key=e.nextElement(); val=get(key); copy.put(key, val); } try { return Util.objectToByteBuffer(copy); } catch(Throwable ex) { if(log.isErrorEnabled()) log.error("exception marshalling state: " + ex); return null; } } public void setState(byte[] new_state) { Hashtable new_copy; try { new_copy=(Hashtable)Util.objectFromByteBuffer(new_state); if(new_copy == null) return; } catch(Throwable ex) { if(log.isErrorEnabled()) log.error("exception unmarshalling state: " + ex); return; } _putAll(new_copy); state_promise.setResult(Boolean.TRUE); } /*------------------- Membership Changes ----------------------*/ public void viewAccepted(View new_view) { Vector new_mbrs=new_view.getMembers(); if(new_mbrs != null) { sendViewChangeNotifications(new_mbrs, members); // notifies observers (joined, left) members.removeAllElements(); for(int i=0; i < new_mbrs.size(); i++) members.addElement(new_mbrs.elementAt(i)); } //if size is bigger than one, there are more peers in the group //otherwise there is only one server. send_message=members.size() > 1; } /** Called when a member is suspected */ public void suspect(Address suspected_mbr) { ; } /** Block sending and receiving of messages until ViewAccepted is called */ public void block() {} void sendViewChangeNotifications(Vector new_mbrs, Vector old_mbrs) { Vector joined, left; Object mbr; Notification n; if(notifs.size() == 0 || old_mbrs == null || new_mbrs == null || old_mbrs.size() == 0 || new_mbrs.size() == 0) return; // 1. Compute set of members that joined: all that are in new_mbrs, but not in old_mbrs joined=new Vector(); for(int i=0; i < new_mbrs.size(); i++) { mbr=new_mbrs.elementAt(i); if(!old_mbrs.contains(mbr)) joined.addElement(mbr); } // 2. Compute set of members that left: all that were in old_mbrs, but not in new_mbrs left=new Vector(); for(int i=0; i < old_mbrs.size(); i++) { mbr=old_mbrs.elementAt(i); if(!new_mbrs.contains(mbr)) { left.addElement(mbr); } } for(int i=0; i < notifs.size(); i++) { n=(Notification)notifs.elementAt(i); n.viewChange(joined, left); } } final void initSignatures() { try { if(put_signature == null) { put_signature=new Class[] {Object.class,Object.class}; } if(putAll_signature == null) { putAll_signature=new Class[] {Map.class}; } if(clear_signature == null) clear_signature=new Class[0]; if(remove_signature == null) { remove_signature=new Class[] {Object.class}; } } catch(Throwable ex) { if(log.isErrorEnabled()) log.error("exception=" + ex); } } public static void main(String[] args) { try { // The setup here is kind of weird: // 1. Create a channel // 2. Create a DistributedHashtable (on the channel) // 3. Connect the channel (so the HT gets a VIEW_CHANGE) // 4. Start the HT // // A simpler setup is // DistributedHashtable ht = new DistributedHashtable("demo", null, // "file://c:/JGroups-2.0/conf/state_transfer.xml", 5000); JChannel c = new JChannel("file:/c:/JGroups-2.0/conf/state_transfer.xml"); DistributedHashtable ht = new DistributedHashtable(c, false, 5000); c.connect("demo"); ht.start(5000); ht.put("name", "Michelle Ban"); Object old_key = ht.remove("name"); System.out.println("old key was " + old_key); ht.put("newkey", "newvalue"); Map m = new HashMap(); m.put("k1", "v1"); m.put("k2", "v2"); ht.putAll(m); System.out.println("hashmap is " + ht); } catch (Throwable t) { t.printStackTrace(); } } }
implement streaming state transfer
src/org/jgroups/blocks/DistributedHashtable.java
implement streaming state transfer
Java
bsd-2-clause
57e1fb79f799aa0fbc34e79bcb94b532572b0f5f
0
ratan12/Atarashii,ratan12/Atarashii,AnimeNeko/Atarashii,AnimeNeko/Atarashii
package net.somethingdreadful.MAL; import android.annotation.SuppressLint; import android.app.Activity; import android.app.Fragment; import android.content.Context; import android.content.IntentFilter; import android.os.AsyncTask; import android.os.Bundle; import android.support.v4.content.ContextCompat; import android.support.v4.content.LocalBroadcastManager; import android.support.v4.widget.SwipeRefreshLayout; import android.util.Log; import android.view.LayoutInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.AbsListView.OnScrollListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ArrayAdapter; import android.widget.GridView; import android.widget.ImageView; import android.widget.PopupMenu; import android.widget.TextView; import android.widget.ViewFlipper; import com.crashlytics.android.Crashlytics; import com.crashlytics.android.answers.Answers; import com.crashlytics.android.answers.ContentViewEvent; import com.squareup.picasso.Callback; import com.squareup.picasso.Picasso; import net.somethingdreadful.MAL.account.AccountService; import net.somethingdreadful.MAL.api.APIHelper; import net.somethingdreadful.MAL.api.BaseModels.AnimeManga.Anime; import net.somethingdreadful.MAL.api.BaseModels.AnimeManga.GenericRecord; import net.somethingdreadful.MAL.api.BaseModels.AnimeManga.Manga; import net.somethingdreadful.MAL.api.MALApi.ListType; import net.somethingdreadful.MAL.broadcasts.RecordStatusUpdatedReceiver; import net.somethingdreadful.MAL.tasks.NetworkTask; import net.somethingdreadful.MAL.tasks.TaskJob; import net.somethingdreadful.MAL.tasks.WriteDetailTask; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import butterknife.BindView; import butterknife.ButterKnife; import lombok.Getter; import lombok.Setter; public class IGF extends Fragment implements OnScrollListener, OnItemClickListener, NetworkTask.NetworkTaskListener, RecordStatusUpdatedReceiver.RecordStatusUpdatedListener { private ListType listType = ListType.ANIME; // just to have it proper initialized private Context context; public TaskJob taskjob; private Activity activity; private NetworkTask networkTask; private IGFCallbackListener callback; private ListViewAdapter<GenericRecord> ga; private boolean popupEnabled = true; private ArrayList<GenericRecord> gl = new ArrayList<>(); private ArrayList<GenericRecord> backGl = new ArrayList<>(); @BindView(R.id.gridview) GridView Gridview; @BindView(R.id.viewFlipper) ViewFlipper viewflipper; @BindView(R.id.swiperefresh) SwipeRefreshLayout swipeRefresh; private RecordStatusUpdatedReceiver recordStatusReceiver; private int page = 1; public int list = -1; private int resource; private int height = 0; private int sortType = 1; @Getter private boolean isAnime = true; @Getter private boolean isList = true; private boolean inverse = false; @Getter private boolean loading = true; private boolean useSecondaryAmounts; private boolean hasmorepages = false; private boolean clearAfterLoading = false; private boolean details = false; private boolean numberList = false; /* setSwipeRefreshEnabled() may be called before swipeRefresh exists (before onCreateView() is * called), so save it and apply it in onCreateView() */ private boolean swipeRefreshEnabled = true; private String query; @Setter @Getter private String username = null; @Override public void onSaveInstanceState(Bundle state) { state.putSerializable("gl", gl); state.putSerializable("backGl", backGl); state.putSerializable("listType", listType); state.putSerializable("taskjob", taskjob); state.putInt("page", page); state.putInt("list", list); state.putInt("sortType", sortType); state.putInt("resource", resource); state.putBoolean("inverse", inverse); state.putBoolean("hasmorepages", hasmorepages); state.putBoolean("popupEnabled", popupEnabled); state.putBoolean("swipeRefreshEnabled", swipeRefreshEnabled); state.putBoolean("useSecondaryAmounts", useSecondaryAmounts); state.putBoolean("details", details); state.putBoolean("numberList", numberList); state.putBoolean("isAnime", isAnime); state.putBoolean("isList", isList); state.putString("query", query); state.putString("username", username); super.onSaveInstanceState(state); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle state) { View view = inflater.inflate(R.layout.record_igf_layout, container, false); view.setBackgroundColor(ContextCompat.getColor(getActivity(), R.color.bg_dark)); ButterKnife.bind(this, view); Gridview.setOnItemClickListener(this); Gridview.setOnScrollListener(this); if (state != null) { backGl = (ArrayList<GenericRecord>) state.getSerializable("backGl"); gl = (ArrayList<GenericRecord>) state.getSerializable("gl"); listType = (ListType) state.getSerializable("listType"); taskjob = (TaskJob) state.getSerializable("taskjob"); page = state.getInt("page"); list = state.getInt("list"); resource = state.getInt("resource"); hasmorepages = state.getBoolean("hasmorepages"); swipeRefreshEnabled = state.getBoolean("swipeRefreshEnabled"); query = state.getString("query"); username = state.getString("username"); details = state.getBoolean("details"); isAnime = state.getBoolean("isAnime"); numberList = state.getBoolean("numberList"); useSecondaryAmounts = state.getBoolean("useSecondaryAmounts"); isList = state.getBoolean("isList"); sortType = state.getInt("sortType"); inverse = state.getBoolean("inverse"); popupEnabled = state.getBoolean("popupEnabled"); } else { resource = PrefManager.getTraditionalListEnabled() ? R.layout.record_igf_listview : R.layout.record_igf_gridview; useSecondaryAmounts = PrefManager.getUseSecondaryAmountsEnabled(); } activity = getActivity(); context = activity; setColumns(); if (activity instanceof Home) swipeRefresh.setOnRefreshListener((Home) getActivity()); if (activity instanceof IGFCallbackListener) callback = (IGFCallbackListener) activity; swipeRefresh.setColorSchemeResources(android.R.color.holo_blue_bright, android.R.color.holo_green_light, android.R.color.holo_orange_light, android.R.color.holo_red_light); swipeRefresh.setEnabled(swipeRefreshEnabled); recordStatusReceiver = new RecordStatusUpdatedReceiver(this); IntentFilter filter = new IntentFilter(RecordStatusUpdatedReceiver.RECV_IDENT); LocalBroadcastManager.getInstance(activity).registerReceiver(recordStatusReceiver, filter); if (gl.size() > 0) // there are already records, fragment has been rotated refresh(); if (callback != null) callback.onIGFReady(this); return view; } @Override public void onDetach() { if (recordStatusReceiver != null) LocalBroadcastManager.getInstance(activity).unregisterReceiver(recordStatusReceiver); super.onDetach(); } /** * Set the numbers columns for the best overview. */ @SuppressLint("InlinedApi") private void setColumns() { int screenWidth = Theme.convert(context.getResources().getConfiguration().screenWidthDp); if (PrefManager.getTraditionalListEnabled()) { Gridview.setNumColumns(1); //remain in the listview mode } else if (PrefManager.getIGFColumns() == 0) { int columns = (int) Math.ceil(screenWidth / Theme.floatConvert(225)); int width = screenWidth / columns; height = (int) (width / 0.7); Gridview.setNumColumns(columns); PrefManager.setIGFColumns(columns); PrefManager.commitChanges(); } else { height = (int) (screenWidth / PrefManager.getIGFColumns() / 0.7); Gridview.setNumColumns(PrefManager.getIGFColumns()); } } /** * Set listType, boolean isAnime */ public void setListType(ListType listType) { Crashlytics.log(Log.INFO, "Atarashii", "IGF.sort(): listType=" + listType); this.listType = listType; isAnime = listType.equals(ListType.ANIME); } /** * Init the list other than the user. * * @param listType ListType, boolean isAnime * @return The fragment */ public IGF setFriendList(ListType listType) { setListType(listType); this.popupEnabled = false; return this; } /** * Filter the list by status type. */ public void filter(int statusType) { switch (statusType) { case 1: gl = backGl; refresh(); break; case 2: filterStatus(isAnime() ? "watching" : "reading"); break; case 3: filterStatus("completed"); break; case 4: filterStatus("on-hold"); break; case 5: filterStatus("dropped"); break; case 6: filterStatus(isAnime() ? "plan to watch" : "plan to read"); break; default: gl = backGl; refresh(); break; } } /** * Filter the status by the provided String. * * @param status The status of the record */ private void filterStatus(String status) { ArrayList<GenericRecord> gr = new ArrayList<>(); if (backGl != null && backGl.size() > 0) { if (isAnime()) for (GenericRecord record : backGl) { if (((Anime) record).getWatchedStatus().equals(status)) gr.add(record); } else for (GenericRecord record : backGl) { if (((Manga) record).getReadStatus().equals(status)) gr.add(record); } } gl = gr; sortList(sortType); } /** * Sort records by the sortType ID. * * @param sortType The sort ID */ public void sort(int sortType) { Crashlytics.log(Log.INFO, "Atarashii", "IGF.sort(" + listType + "): sortType=" + sortType); this.sortType = sortType; if (taskjob.equals(TaskJob.GETFRIENDLIST) && !isLoading()) { sortList(sortType); } else { getRecords(true, taskjob, list); } } /** * Instead of reloading we just sort them. * <p/> * note: do not change only this part but also the DatabaseManager part! * * @param sortType The sort type */ private void sortList(final int sortType) { Collections.sort(gl != null && gl.size() > 0 ? gl : new ArrayList<GenericRecord>(), new Comparator<GenericRecord>() { @Override public int compare(GenericRecord GR1, GenericRecord GR2) { switch (sortType) { case 1: return GR1.getTitle().toLowerCase().compareTo(GR2.getTitle().toLowerCase()); case 2: return compareCheck(((Integer) GR2.getScore()).compareTo(GR1.getScore()), GR1, GR2); case 3: return compareCheck(GR1.getType().toLowerCase().compareTo(GR2.getType().toLowerCase()), GR1, GR2); case 4: return compareCheck(GR1.getStatus().toLowerCase().compareTo(GR2.getStatus().toLowerCase()), GR1, GR2); case 5: return compareCheck(((Integer) ((Anime) GR1).getWatchedEpisodes()).compareTo(((Anime) GR2).getWatchedEpisodes()), GR1, GR2); case -5: return compareCheck(((Integer) ((Manga) GR1).getChaptersRead()).compareTo(((Manga) GR2).getChaptersRead()), GR1, GR2); default: return GR1.getTitle().toLowerCase().compareTo(GR2.getTitle().toLowerCase()); } } }); if (inverse) Collections.reverse(gl); refresh(); } /** * Used to sort records also on title if they are in the same x. * * @param x The x passed by compareTo * @param GR1 The first record to compare * @param GR2 The second record to compare * @return int X the x sorting value */ private int compareCheck(int x, GenericRecord GR1, GenericRecord GR2) { if (x != 0) return x; else return GR1.getTitle().toLowerCase().compareTo(GR2.getTitle().toLowerCase()); } /** * Show details on covers. */ public void details() { this.details = !details; if (details) resource = R.layout.record_igf_details; else resource = PrefManager.getTraditionalListEnabled() ? R.layout.record_igf_listview : R.layout.record_igf_gridview; refresh(); } /** * Get the details status. */ public boolean getDetails() { return details; } /** * Get the amount of columns. * * @param portrait The orientation of the screen. * @return int The amount of columns */ public static int getColumns(boolean portrait) { int screen; if (Theme.isPortrait() && portrait || !Theme.isPortrait() && !portrait) screen = Theme.convert(Theme.context.getResources().getConfiguration().screenWidthDp); else screen = Theme.convert(Theme.context.getResources().getConfiguration().screenHeightDp); return (int) Math.ceil(screen / Theme.floatConvert(225)); } /** * Get the max amount of columns before the design breaks. * * @param portrait The orientation of the screen. * @return int The amount of max columns */ public static int getMaxColumns(boolean portrait) { int screen; if (Theme.isPortrait() && portrait || !Theme.isPortrait() && !portrait) screen = Theme.convert(Theme.context.getResources().getConfiguration().screenWidthDp); else screen = Theme.convert(Theme.context.getResources().getConfiguration().screenHeightDp); return (int) Math.ceil(screen / Theme.convert(225)) + 2; } /** * Add +1 episode/volume/chapters to the anime/manga. * <p/> * Use null if the other record isn't available * * @param anime The Anime record that should increase by one * @param manga The manga record that should increase by one */ private void setProgressPlusOne(Anime anime, Manga manga) { if (isAnime()) { anime.setWatchedEpisodes(anime.getWatchedEpisodes() + 1); new WriteDetailTask(listType, activity).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, anime); } else { manga.setProgress(useSecondaryAmounts, manga.getProgress(useSecondaryAmounts) + 1); new WriteDetailTask(listType, activity).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, manga); } refresh(); } /** * Mark the anime/manga as completed. * <p/> * Use null if the other record isn't available * * @param anime The Anime record that should be marked as complete * @param manga The manga record that should be marked as complete */ private void setMarkAsComplete(Anime anime, Manga manga) { if (isAnime()) { anime.setWatchedStatus(GenericRecord.STATUS_COMPLETED); gl.remove(anime); new WriteDetailTask(listType, activity).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, anime); } else { manga.setReadStatus(GenericRecord.STATUS_COMPLETED); if (manga.getChapters() > 0) manga.setChaptersRead(manga.getChapters()); if (manga.getVolumes() > 0) manga.setVolumesRead(manga.getVolumes()); gl.remove(manga); new WriteDetailTask(listType, activity).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, manga); } refresh(); } /** * Handle the loading indicator. * * @param show If true then the IGF will show the indiacator */ private void toggleLoadingIndicator(boolean show) { if (viewflipper != null) viewflipper.setDisplayedChild(show ? 1 : 0); } /** * Handle the SwipeRefresh animantion. * * @param show If true then the IGF will show the animation */ public void toggleSwipeRefreshAnimation(boolean show) { if (swipeRefresh != null) swipeRefresh.setRefreshing(show); } /** * Handle the SwipeRefreshView. * * @param enabled If true then the SwipeRefreshView will be enabled */ public void setSwipeRefreshEnabled(boolean enabled) { swipeRefreshEnabled = enabled; if (swipeRefresh != null) swipeRefresh.setEnabled(enabled); } /** * Check of task contains any other taskjob. * * @param taskJob1 The first Taskjob to compare * @param taskJob2 The second Taskjob to compare * @return boolean True if they contain the taskjob */ private boolean containsTask(TaskJob taskJob1, TaskJob taskJob2) { return taskJob1.toString().contains(taskJob2.toString()); } /** * Browse trough the anime/manga lists. */ public void getBrowse(HashMap<String, String> query, boolean clear) { taskjob = TaskJob.BROWSE; isList = false; if (clear) { resetPage(); gl.clear(); if (ga == null) setAdapter(); ga.clear(); } boolean isEmpty = gl.isEmpty(); toggleLoadingIndicator((page == 1 && !isList()) || (taskjob.equals(TaskJob.FORCESYNC) && isEmpty)); toggleSwipeRefreshAnimation(page > 1 && !isList() || taskjob.equals(TaskJob.FORCESYNC)); loading = true; try { new NetworkTask(activity,listType, query, this).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR); } catch (Exception e) { Crashlytics.log(Log.ERROR, "Atarashii", "IGF.getBrowse(): " + e.getMessage()); Crashlytics.logException(e); } } /** * Get the anime/manga lists. * * @param clear If true then the whole list will be cleared and loaded * @param task Which list should be shown (top, popular, upcoming...) * @param list Which list type should be shown (completed, dropped, in progress...) */ public void getRecords(boolean clear, TaskJob task, int list) { if (task != null) taskjob = task; if (task != TaskJob.GETLIST && task != TaskJob.FORCESYNC && task != TaskJob.GETFRIENDLIST) { details = false; numberList = containsTask(taskjob, TaskJob.GETMOSTPOPULAR) || containsTask(taskjob, TaskJob.GETTOPRATED); resource = PrefManager.getTraditionalListEnabled() ? R.layout.record_igf_listview : R.layout.record_igf_gridview; isList = false; } else { isList = true; } if (list != this.list) this.list = list; /* only show loading indicator if * - is not own list and on page 1 * - force sync and list is empty (only show swipe refresh animation if not empty) * - clear is set */ boolean isEmpty = gl.isEmpty(); toggleLoadingIndicator((page == 1 && !isList()) || (taskjob.equals(TaskJob.FORCESYNC) && isEmpty) || clear); /* show swipe refresh animation if * - loading more pages * - forced update * - clear is unset */ toggleSwipeRefreshAnimation((page > 1 && !isList() || taskjob.equals(TaskJob.FORCESYNC)) && !clear); loading = true; try { if (clear) { resetPage(); gl.clear(); if (ga == null) setAdapter(); ga.clear(); } Bundle data = new Bundle(); data.putInt("page", page); networkTask = new NetworkTask(taskjob, listType, activity, data, this); ArrayList<String> args = new ArrayList<>(); if (taskjob.equals(TaskJob.GETFRIENDLIST)) { args.add(username); setSwipeRefreshEnabled(false); } else if (isList()) { setSwipeRefreshEnabled(true); args.add(ContentManager.listSortFromInt(list, listType)); args.add(String.valueOf(sortType)); args.add(String.valueOf(inverse)); } else { args.add(query); } networkTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, args.toArray(new String[args.size()])); } catch (Exception e) { Crashlytics.log(Log.ERROR, "Atarashii", "IGF.getRecords(): " + e.getMessage()); Crashlytics.logException(e); } } /** * Get the search results of the query. * * @param search The query that should be searched for */ public void searchRecords(String search) { if (search != null && !search.equals(query) && !search.isEmpty()) { // no need for searching the same again or empty string query = search; page = 1; setSwipeRefreshEnabled(false); getRecords(true, TaskJob.SEARCH, 0); } } /** * Reset the page number of anime/manga lists. */ private void resetPage() { page = 1; if (Gridview != null) { Gridview.requestFocusFromTouch(); Gridview.post(new Runnable() { @Override public void run() { Gridview.setSelection(0); } }); } } /** * Set the adapter anime/manga. */ private void setAdapter() { ga = new ListViewAdapter<>(context, resource); ga.setNotifyOnChange(true); } /** * Refresh all the covers. */ private void refresh() { try { if (ga == null) setAdapter(); ga.clear(); ga.supportAddAll(gl); if (Gridview.getAdapter() == null) Gridview.setAdapter(ga); } catch (Exception e) { if (APIHelper.isNetworkAvailable(context)) { Crashlytics.log(Log.ERROR, "Atarashii", "IGF.refresh(): " + e.getMessage()); Crashlytics.logException(e); if (taskjob.equals(TaskJob.SEARCH)) { Theme.Snackbar(activity, R.string.toast_error_Search); } else { if (isAnime()) Theme.Snackbar(activity, R.string.toast_error_Anime_Sync); else Theme.Snackbar(activity, R.string.toast_error_Manga_Sync); } } else { Theme.Snackbar(activity, R.string.toast_error_noConnectivity); } } loading = false; } /** * Inverse the list and refresh it. */ public void inverse() { this.inverse = !inverse; if (taskjob.equals(TaskJob.GETFRIENDLIST)) { if (gl != null && gl.size() > 0) Collections.reverse(gl); refresh(); } else { getRecords(true, taskjob, list); } } /** * Set the list with the new page/list. */ @SuppressWarnings("unchecked") // Don't panic, we handle possible class cast exceptions @Override public void onNetworkTaskFinished(Object result, TaskJob job, ListType type) { ArrayList resultList; try { if (type == ListType.ANIME) resultList = (ArrayList<Anime>) result; else resultList = (ArrayList<Manga>) result; } catch (Exception e) { Crashlytics.log(Log.ERROR, "Atarashii", "IGF.onNetworkTaskFinished(): " + result.getClass().toString()); Crashlytics.logException(e); resultList = null; } if (resultList != null) { if (resultList.size() == 0 && taskjob.equals(TaskJob.SEARCH)) { if (this.page == 1) doRecordsLoadedCallback(job); } else { if (job.equals(TaskJob.FORCESYNC)) doRecordsLoadedCallback(job); if (clearAfterLoading || job.equals(TaskJob.FORCESYNC) || job.equals(TaskJob.GETFRIENDLIST)) { // a forced sync always reloads all data, so clear the list gl.clear(); clearAfterLoading = false; } hasmorepages = resultList.size() > 0; gl.addAll(resultList); if (taskjob.equals(TaskJob.GETFRIENDLIST)) { backGl.addAll(resultList); sortList(sortType); } else { refresh(); } } } else { doRecordsLoadedCallback(job); // no resultList ? something went wrong } networkTask = null; toggleSwipeRefreshAnimation(false); toggleLoadingIndicator(false); } @Override public void onNetworkTaskError(TaskJob job) { doRecordsLoadedCallback(job); activity.runOnUiThread(new Runnable() { @Override public void run() { toggleSwipeRefreshAnimation(false); toggleLoadingIndicator(false); } }); } /** * Trigger to the parent activity that the records are loaded. * * @param job Which list should be shown (top, popular, upcoming...) */ private void doRecordsLoadedCallback(TaskJob job) { if (callback != null) callback.onRecordsLoadingFinished(job); } @Override public void onScrollStateChanged(AbsListView view, int scrollState) { } /** * Load more pages if we are almost on the bottom. */ @Override public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) { // don't do anything if there is nothing in the list if (!isList()) { if (firstVisibleItem == 0 && visibleItemCount == 0 && totalItemCount == 0) return; if (totalItemCount - firstVisibleItem <= (visibleItemCount * 2) && !loading && hasmorepages) { loading = true; page++; getRecords(false, null, list); } } } // user updated record on DetailsView, so update the list if necessary @Override public void onRecordStatusUpdated(ListType type) { // broadcast received if (type != null && type.equals(listType) && isList()) { clearAfterLoading = true; getRecords(false, TaskJob.GETLIST, list); } } /** * Handle the gridview click by navigating to the detailview. */ @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { Answers.getInstance().logContentView(new ContentViewEvent() .putContentName("DetailView") .putContentType(String.valueOf(listType)) .putContentId(String.valueOf(listType).charAt(0) + String.valueOf(gl.get(position).getId()))); callback.onItemClick(gl.get(position).getId(), listType, username); } static class ViewHolder { TextView label; TextView progressCount; TextView flavourText; ImageView cover; ImageView actionButton; TextView scoreCount; TextView typeCount; TextView statusCount; } /** * The custom adapter for the covers anime/manga. */ public class ListViewAdapter<T> extends ArrayAdapter<T> { final LayoutInflater inflater; final boolean listView; final String StatusWatching; final String StatusReading; final String StatusCompleted; final String StatusOnHold; final String StatusDropped; final String StatusPlanningToWatch; final String StatusPlanningToRead; final String Number; final boolean isMAL; public ListViewAdapter(Context context, int resource) { super(context, resource); // Get the string to make the scrolling smoother StatusWatching = getString(R.string.cover_Watching); StatusReading = getString(R.string.cover_Reading); StatusCompleted = getString(R.string.cover_Completed); StatusOnHold = getString(R.string.cover_OnHold); StatusDropped = getString(R.string.cover_Dropped); StatusPlanningToWatch = getString(R.string.cover_PlanningToWatch); StatusPlanningToRead = getString(R.string.cover_PlanningToRead); Number = getString(R.string.label_Number); isMAL = AccountService.isMAL(); inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); listView = resource != R.layout.record_igf_listview; } @SuppressWarnings("deprecation") public View getView(int position, View view, ViewGroup parent) { final GenericRecord record = gl.get(position); Anime animeRecord; Manga mangaRecord; ViewHolder viewHolder = null; String status; int progress; if (isAnime()) { animeRecord = (Anime) record; status = animeRecord.getWatchedStatus(); progress = animeRecord.getWatchedEpisodes(); } else { mangaRecord = (Manga) record; status = mangaRecord.getReadStatus(); progress = useSecondaryAmounts ? mangaRecord.getVolumesRead() : mangaRecord.getChaptersRead(); } if (view != null) viewHolder = (ViewHolder) view.getTag(); if (view == null || (details && viewHolder.scoreCount == null) || (!details && viewHolder.scoreCount != null)) { view = inflater.inflate(resource, parent, false); viewHolder = new ViewHolder(); viewHolder.label = (TextView) view.findViewById(R.id.animeName); viewHolder.progressCount = (TextView) view.findViewById(R.id.watchedCount); viewHolder.cover = (ImageView) view.findViewById(R.id.coverImage); viewHolder.actionButton = (ImageView) view.findViewById(R.id.popUpButton); viewHolder.flavourText = (TextView) view.findViewById(R.id.stringWatched); viewHolder.scoreCount = (TextView) view.findViewById(R.id.scoreCount); viewHolder.typeCount = (TextView) view.findViewById(R.id.typeCount); viewHolder.statusCount = (TextView) view.findViewById(R.id.statusCount); view.setTag(viewHolder); if (listView) view.getLayoutParams().height = height; } try { viewHolder.label.setText(record.getTitle()); if (details) { viewHolder.scoreCount.setText(String.valueOf(record.getScore())); viewHolder.typeCount.setText(record.getType()); viewHolder.statusCount.setText(record.getStatus()); } if (isList() && status != null) { viewHolder.progressCount.setText(String.valueOf(progress)); switch (status) { case "watching": viewHolder.flavourText.setText(StatusWatching); viewHolder.progressCount.setVisibility(View.VISIBLE); if (popupEnabled) { viewHolder.actionButton.setVisibility(View.VISIBLE); viewHolder.actionButton.setOnClickListener(new ABOnClickListener(record)); } else { viewHolder.actionButton.setVisibility(View.GONE); } break; case "reading": viewHolder.flavourText.setText(StatusReading); viewHolder.progressCount.setVisibility(View.VISIBLE); if (popupEnabled) { viewHolder.actionButton.setVisibility(View.VISIBLE); viewHolder.actionButton.setOnClickListener(new ABOnClickListener(record)); } else { viewHolder.actionButton.setVisibility(View.GONE); } break; case "completed": viewHolder.flavourText.setText(StatusCompleted); viewHolder.actionButton.setVisibility(View.GONE); viewHolder.progressCount.setVisibility(View.GONE); break; case "on-hold": viewHolder.flavourText.setText(StatusOnHold); viewHolder.progressCount.setVisibility(View.VISIBLE); viewHolder.actionButton.setVisibility(View.GONE); break; case "dropped": viewHolder.flavourText.setText(StatusDropped); viewHolder.actionButton.setVisibility(View.GONE); viewHolder.progressCount.setVisibility(View.GONE); break; case "plan to watch": viewHolder.flavourText.setText(StatusPlanningToWatch); viewHolder.actionButton.setVisibility(View.GONE); viewHolder.progressCount.setVisibility(View.GONE); break; case "plan to read": viewHolder.flavourText.setText(StatusPlanningToRead); viewHolder.actionButton.setVisibility(View.GONE); viewHolder.progressCount.setVisibility(View.GONE); break; default: viewHolder.flavourText.setText(""); viewHolder.actionButton.setVisibility(View.GONE); viewHolder.progressCount.setVisibility(View.GONE); break; } } else { viewHolder.actionButton.setVisibility(View.GONE); if (isMAL && numberList) { viewHolder.progressCount.setVisibility(View.VISIBLE); viewHolder.progressCount.setText(String.valueOf(position + 1)); viewHolder.flavourText.setText(Number); } else { viewHolder.progressCount.setVisibility(View.GONE); viewHolder.flavourText.setText(getString(R.string.unknown)); } } Picasso.with(context) .load(record.getImageUrl()) .error(R.drawable.cover_error) .placeholder(R.drawable.cover_loading) .into(viewHolder.cover); } catch (Exception e) { Theme.logTaskCrash("IGF", "ListViewAdapter()", e); } return view; } public void supportAddAll(Collection<? extends T> collection) { for (T record : collection) { this.add(record); } } /** * Custom grid clicker for passing the right record */ public class ABOnClickListener implements View.OnClickListener { final GenericRecord record; public ABOnClickListener(GenericRecord record) { this.record = record; } @Override public void onClick(View view) { PopupMenu popup = new PopupMenu(context, view); popup.getMenuInflater().inflate(R.menu.record_popup, popup.getMenu()); if (!isAnime()) popup.getMenu().findItem(R.id.plusOne).setTitle(R.string.action_PlusOneRead); popup.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { switch (item.getItemId()) { case R.id.plusOne: if (isAnime()) setProgressPlusOne((Anime) record, null); else setProgressPlusOne(null, (Manga) record); break; case R.id.markCompleted: if (isAnime()) setMarkAsComplete((Anime) record, null); else setMarkAsComplete(null, (Manga) record); break; } return true; } }); popup.show(); } } } public interface IGFCallbackListener { void onIGFReady(IGF igf); void onRecordsLoadingFinished(TaskJob job); void onItemClick(int id, ListType listType, String username); } }
Atarashii/src/net/somethingdreadful/MAL/IGF.java
package net.somethingdreadful.MAL; import android.annotation.SuppressLint; import android.app.Activity; import android.app.Fragment; import android.content.Context; import android.content.IntentFilter; import android.os.AsyncTask; import android.os.Bundle; import android.support.v4.content.ContextCompat; import android.support.v4.content.LocalBroadcastManager; import android.support.v4.widget.SwipeRefreshLayout; import android.util.Log; import android.view.LayoutInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.AbsListView.OnScrollListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ArrayAdapter; import android.widget.GridView; import android.widget.ImageView; import android.widget.PopupMenu; import android.widget.TextView; import android.widget.ViewFlipper; import com.crashlytics.android.Crashlytics; import com.crashlytics.android.answers.Answers; import com.crashlytics.android.answers.ContentViewEvent; import com.squareup.picasso.Picasso; import net.somethingdreadful.MAL.account.AccountService; import net.somethingdreadful.MAL.api.APIHelper; import net.somethingdreadful.MAL.api.BaseModels.AnimeManga.Anime; import net.somethingdreadful.MAL.api.BaseModels.AnimeManga.GenericRecord; import net.somethingdreadful.MAL.api.BaseModels.AnimeManga.Manga; import net.somethingdreadful.MAL.api.MALApi.ListType; import net.somethingdreadful.MAL.broadcasts.RecordStatusUpdatedReceiver; import net.somethingdreadful.MAL.tasks.NetworkTask; import net.somethingdreadful.MAL.tasks.TaskJob; import net.somethingdreadful.MAL.tasks.WriteDetailTask; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import butterknife.BindView; import butterknife.ButterKnife; import lombok.Getter; import lombok.Setter; public class IGF extends Fragment implements OnScrollListener, OnItemClickListener, NetworkTask.NetworkTaskListener, RecordStatusUpdatedReceiver.RecordStatusUpdatedListener { private ListType listType = ListType.ANIME; // just to have it proper initialized private Context context; public TaskJob taskjob; private Activity activity; private NetworkTask networkTask; private IGFCallbackListener callback; private ListViewAdapter<GenericRecord> ga; private boolean popupEnabled = true; private ArrayList<GenericRecord> gl = new ArrayList<>(); private ArrayList<GenericRecord> backGl = new ArrayList<>(); @BindView(R.id.gridview) GridView Gridview; @BindView(R.id.viewFlipper) ViewFlipper viewflipper; @BindView(R.id.swiperefresh) SwipeRefreshLayout swipeRefresh; private RecordStatusUpdatedReceiver recordStatusReceiver; private int page = 1; public int list = -1; private int resource; private int height = 0; private int sortType = 1; @Getter private boolean isAnime = true; @Getter private boolean isList = true; private boolean inverse = false; @Getter private boolean loading = true; private boolean useSecondaryAmounts; private boolean hasmorepages = false; private boolean clearAfterLoading = false; private boolean details = false; private boolean numberList = false; /* setSwipeRefreshEnabled() may be called before swipeRefresh exists (before onCreateView() is * called), so save it and apply it in onCreateView() */ private boolean swipeRefreshEnabled = true; private String query; @Setter @Getter private String username = null; @Override public void onSaveInstanceState(Bundle state) { state.putSerializable("gl", gl); state.putSerializable("backGl", backGl); state.putSerializable("listType", listType); state.putSerializable("taskjob", taskjob); state.putInt("page", page); state.putInt("list", list); state.putInt("sortType", sortType); state.putInt("resource", resource); state.putBoolean("inverse", inverse); state.putBoolean("hasmorepages", hasmorepages); state.putBoolean("popupEnabled", popupEnabled); state.putBoolean("swipeRefreshEnabled", swipeRefreshEnabled); state.putBoolean("useSecondaryAmounts", useSecondaryAmounts); state.putBoolean("details", details); state.putBoolean("numberList", numberList); state.putBoolean("isAnime", isAnime); state.putBoolean("isList", isList); state.putString("query", query); state.putString("username", username); super.onSaveInstanceState(state); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle state) { View view = inflater.inflate(R.layout.record_igf_layout, container, false); ButterKnife.bind(this, view); Gridview.setOnItemClickListener(this); Gridview.setOnScrollListener(this); Gridview.setBackgroundColor(ContextCompat.getColor(getActivity(), R.color.bg_dark)); if (state != null) { backGl = (ArrayList<GenericRecord>) state.getSerializable("backGl"); gl = (ArrayList<GenericRecord>) state.getSerializable("gl"); listType = (ListType) state.getSerializable("listType"); taskjob = (TaskJob) state.getSerializable("taskjob"); page = state.getInt("page"); list = state.getInt("list"); resource = state.getInt("resource"); hasmorepages = state.getBoolean("hasmorepages"); swipeRefreshEnabled = state.getBoolean("swipeRefreshEnabled"); query = state.getString("query"); username = state.getString("username"); details = state.getBoolean("details"); isAnime = state.getBoolean("isAnime"); numberList = state.getBoolean("numberList"); useSecondaryAmounts = state.getBoolean("useSecondaryAmounts"); isList = state.getBoolean("isList"); sortType = state.getInt("sortType"); inverse = state.getBoolean("inverse"); popupEnabled = state.getBoolean("popupEnabled"); } else { resource = PrefManager.getTraditionalListEnabled() ? R.layout.record_igf_listview : R.layout.record_igf_gridview; useSecondaryAmounts = PrefManager.getUseSecondaryAmountsEnabled(); } activity = getActivity(); context = activity; setColumns(); if (activity instanceof Home) swipeRefresh.setOnRefreshListener((Home) getActivity()); if (activity instanceof IGFCallbackListener) callback = (IGFCallbackListener) activity; swipeRefresh.setColorSchemeResources(android.R.color.holo_blue_bright, android.R.color.holo_green_light, android.R.color.holo_orange_light, android.R.color.holo_red_light); swipeRefresh.setEnabled(swipeRefreshEnabled); recordStatusReceiver = new RecordStatusUpdatedReceiver(this); IntentFilter filter = new IntentFilter(RecordStatusUpdatedReceiver.RECV_IDENT); LocalBroadcastManager.getInstance(activity).registerReceiver(recordStatusReceiver, filter); if (gl.size() > 0) // there are already records, fragment has been rotated refresh(); if (callback != null) callback.onIGFReady(this); return view; } @Override public void onDetach() { if (recordStatusReceiver != null) LocalBroadcastManager.getInstance(activity).unregisterReceiver(recordStatusReceiver); super.onDetach(); } /** * Set the numbers columns for the best overview. */ @SuppressLint("InlinedApi") private void setColumns() { int screenWidth = Theme.convert(context.getResources().getConfiguration().screenWidthDp); if (PrefManager.getTraditionalListEnabled()) { Gridview.setNumColumns(1); //remain in the listview mode } else if (PrefManager.getIGFColumns() == 0) { int columns = (int) Math.ceil(screenWidth / Theme.floatConvert(225)); int width = screenWidth / columns; height = (int) (width / 0.7); Gridview.setNumColumns(columns); PrefManager.setIGFColumns(columns); PrefManager.commitChanges(); } else { height = (int) (screenWidth / PrefManager.getIGFColumns() / 0.7); Gridview.setNumColumns(PrefManager.getIGFColumns()); } } /** * Set listType, boolean isAnime */ public void setListType(ListType listType) { Crashlytics.log(Log.INFO, "Atarashii", "IGF.sort(): listType=" + listType); this.listType = listType; isAnime = listType.equals(ListType.ANIME); } /** * Init the list other than the user. * * @param listType ListType, boolean isAnime * @return The fragment */ public IGF setFriendList(ListType listType) { setListType(listType); this.popupEnabled = false; return this; } /** * Filter the list by status type. */ public void filter(int statusType) { switch (statusType) { case 1: gl = backGl; refresh(); break; case 2: filterStatus(isAnime() ? "watching" : "reading"); break; case 3: filterStatus("completed"); break; case 4: filterStatus("on-hold"); break; case 5: filterStatus("dropped"); break; case 6: filterStatus(isAnime() ? "plan to watch" : "plan to read"); break; default: gl = backGl; refresh(); break; } } /** * Filter the status by the provided String. * * @param status The status of the record */ private void filterStatus(String status) { ArrayList<GenericRecord> gr = new ArrayList<>(); if (backGl != null && backGl.size() > 0) { if (isAnime()) for (GenericRecord record : backGl) { if (((Anime) record).getWatchedStatus().equals(status)) gr.add(record); } else for (GenericRecord record : backGl) { if (((Manga) record).getReadStatus().equals(status)) gr.add(record); } } gl = gr; sortList(sortType); } /** * Sort records by the sortType ID. * * @param sortType The sort ID */ public void sort(int sortType) { Crashlytics.log(Log.INFO, "Atarashii", "IGF.sort(" + listType + "): sortType=" + sortType); this.sortType = sortType; if (taskjob.equals(TaskJob.GETFRIENDLIST) && !isLoading()) { sortList(sortType); } else { getRecords(true, taskjob, list); } } /** * Instead of reloading we just sort them. * <p/> * note: do not change only this part but also the DatabaseManager part! * * @param sortType The sort type */ private void sortList(final int sortType) { Collections.sort(gl != null && gl.size() > 0 ? gl : new ArrayList<GenericRecord>(), new Comparator<GenericRecord>() { @Override public int compare(GenericRecord GR1, GenericRecord GR2) { switch (sortType) { case 1: return GR1.getTitle().toLowerCase().compareTo(GR2.getTitle().toLowerCase()); case 2: return compareCheck(((Integer) GR2.getScore()).compareTo(GR1.getScore()), GR1, GR2); case 3: return compareCheck(GR1.getType().toLowerCase().compareTo(GR2.getType().toLowerCase()), GR1, GR2); case 4: return compareCheck(GR1.getStatus().toLowerCase().compareTo(GR2.getStatus().toLowerCase()), GR1, GR2); case 5: return compareCheck(((Integer) ((Anime) GR1).getWatchedEpisodes()).compareTo(((Anime) GR2).getWatchedEpisodes()), GR1, GR2); case -5: return compareCheck(((Integer) ((Manga) GR1).getChaptersRead()).compareTo(((Manga) GR2).getChaptersRead()), GR1, GR2); default: return GR1.getTitle().toLowerCase().compareTo(GR2.getTitle().toLowerCase()); } } }); if (inverse) Collections.reverse(gl); refresh(); } /** * Used to sort records also on title if they are in the same x. * * @param x The x passed by compareTo * @param GR1 The first record to compare * @param GR2 The second record to compare * @return int X the x sorting value */ private int compareCheck(int x, GenericRecord GR1, GenericRecord GR2) { if (x != 0) return x; else return GR1.getTitle().toLowerCase().compareTo(GR2.getTitle().toLowerCase()); } /** * Show details on covers. */ public void details() { this.details = !details; if (details) resource = R.layout.record_igf_details; else resource = PrefManager.getTraditionalListEnabled() ? R.layout.record_igf_listview : R.layout.record_igf_gridview; refresh(); } /** * Get the details status. */ public boolean getDetails() { return details; } /** * Get the amount of columns. * * @param portrait The orientation of the screen. * @return int The amount of columns */ public static int getColumns(boolean portrait) { int screen; if (Theme.isPortrait() && portrait || !Theme.isPortrait() && !portrait) screen = Theme.convert(Theme.context.getResources().getConfiguration().screenWidthDp); else screen = Theme.convert(Theme.context.getResources().getConfiguration().screenHeightDp); return (int) Math.ceil(screen / Theme.floatConvert(225)); } /** * Get the max amount of columns before the design breaks. * * @param portrait The orientation of the screen. * @return int The amount of max columns */ public static int getMaxColumns(boolean portrait) { int screen; if (Theme.isPortrait() && portrait || !Theme.isPortrait() && !portrait) screen = Theme.convert(Theme.context.getResources().getConfiguration().screenWidthDp); else screen = Theme.convert(Theme.context.getResources().getConfiguration().screenHeightDp); return (int) Math.ceil(screen / Theme.convert(225)) + 2; } /** * Add +1 episode/volume/chapters to the anime/manga. * <p/> * Use null if the other record isn't available * * @param anime The Anime record that should increase by one * @param manga The manga record that should increase by one */ private void setProgressPlusOne(Anime anime, Manga manga) { if (isAnime()) { anime.setWatchedEpisodes(anime.getWatchedEpisodes() + 1); new WriteDetailTask(listType, activity).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, anime); } else { manga.setProgress(useSecondaryAmounts, manga.getProgress(useSecondaryAmounts) + 1); new WriteDetailTask(listType, activity).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, manga); } refresh(); } /** * Mark the anime/manga as completed. * <p/> * Use null if the other record isn't available * * @param anime The Anime record that should be marked as complete * @param manga The manga record that should be marked as complete */ private void setMarkAsComplete(Anime anime, Manga manga) { if (isAnime()) { anime.setWatchedStatus(GenericRecord.STATUS_COMPLETED); gl.remove(anime); new WriteDetailTask(listType, activity).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, anime); } else { manga.setReadStatus(GenericRecord.STATUS_COMPLETED); if (manga.getChapters() > 0) manga.setChaptersRead(manga.getChapters()); if (manga.getVolumes() > 0) manga.setVolumesRead(manga.getVolumes()); gl.remove(manga); new WriteDetailTask(listType, activity).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, manga); } refresh(); } /** * Handle the loading indicator. * * @param show If true then the IGF will show the indiacator */ private void toggleLoadingIndicator(boolean show) { if (viewflipper != null) viewflipper.setDisplayedChild(show ? 1 : 0); } /** * Handle the SwipeRefresh animantion. * * @param show If true then the IGF will show the animation */ public void toggleSwipeRefreshAnimation(boolean show) { if (swipeRefresh != null) swipeRefresh.setRefreshing(show); } /** * Handle the SwipeRefreshView. * * @param enabled If true then the SwipeRefreshView will be enabled */ public void setSwipeRefreshEnabled(boolean enabled) { swipeRefreshEnabled = enabled; if (swipeRefresh != null) swipeRefresh.setEnabled(enabled); } /** * Check of task contains any other taskjob. * * @param taskJob1 The first Taskjob to compare * @param taskJob2 The second Taskjob to compare * @return boolean True if they contain the taskjob */ private boolean containsTask(TaskJob taskJob1, TaskJob taskJob2) { return taskJob1.toString().contains(taskJob2.toString()); } /** * Browse trough the anime/manga lists. */ public void getBrowse(HashMap<String, String> query, boolean clear) { taskjob = TaskJob.BROWSE; isList = false; if (clear) { resetPage(); gl.clear(); if (ga == null) setAdapter(); ga.clear(); } boolean isEmpty = gl.isEmpty(); toggleLoadingIndicator((page == 1 && !isList()) || (taskjob.equals(TaskJob.FORCESYNC) && isEmpty)); toggleSwipeRefreshAnimation(page > 1 && !isList() || taskjob.equals(TaskJob.FORCESYNC)); loading = true; try { new NetworkTask(activity,listType, query, this).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR); } catch (Exception e) { Crashlytics.log(Log.ERROR, "Atarashii", "IGF.getBrowse(): " + e.getMessage()); Crashlytics.logException(e); } } /** * Get the anime/manga lists. * * @param clear If true then the whole list will be cleared and loaded * @param task Which list should be shown (top, popular, upcoming...) * @param list Which list type should be shown (completed, dropped, in progress...) */ public void getRecords(boolean clear, TaskJob task, int list) { if (task != null) taskjob = task; if (task != TaskJob.GETLIST && task != TaskJob.FORCESYNC && task != TaskJob.GETFRIENDLIST) { details = false; numberList = containsTask(taskjob, TaskJob.GETMOSTPOPULAR) || containsTask(taskjob, TaskJob.GETTOPRATED); resource = PrefManager.getTraditionalListEnabled() ? R.layout.record_igf_listview : R.layout.record_igf_gridview; isList = false; } else { isList = true; } if (list != this.list) this.list = list; /* only show loading indicator if * - is not own list and on page 1 * - force sync and list is empty (only show swipe refresh animation if not empty) * - clear is set */ boolean isEmpty = gl.isEmpty(); toggleLoadingIndicator((page == 1 && !isList()) || (taskjob.equals(TaskJob.FORCESYNC) && isEmpty) || clear); /* show swipe refresh animation if * - loading more pages * - forced update * - clear is unset */ toggleSwipeRefreshAnimation((page > 1 && !isList() || taskjob.equals(TaskJob.FORCESYNC)) && !clear); loading = true; try { if (clear) { resetPage(); gl.clear(); if (ga == null) setAdapter(); ga.clear(); } Bundle data = new Bundle(); data.putInt("page", page); networkTask = new NetworkTask(taskjob, listType, activity, data, this); ArrayList<String> args = new ArrayList<>(); if (taskjob.equals(TaskJob.GETFRIENDLIST)) { args.add(username); setSwipeRefreshEnabled(false); } else if (isList()) { setSwipeRefreshEnabled(true); args.add(ContentManager.listSortFromInt(list, listType)); args.add(String.valueOf(sortType)); args.add(String.valueOf(inverse)); } else { args.add(query); } networkTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, args.toArray(new String[args.size()])); } catch (Exception e) { Crashlytics.log(Log.ERROR, "Atarashii", "IGF.getRecords(): " + e.getMessage()); Crashlytics.logException(e); } } /** * Get the search results of the query. * * @param search The query that should be searched for */ public void searchRecords(String search) { if (search != null && !search.equals(query) && !search.isEmpty()) { // no need for searching the same again or empty string query = search; page = 1; setSwipeRefreshEnabled(false); getRecords(true, TaskJob.SEARCH, 0); } } /** * Reset the page number of anime/manga lists. */ private void resetPage() { page = 1; if (Gridview != null) { Gridview.requestFocusFromTouch(); Gridview.post(new Runnable() { @Override public void run() { Gridview.setSelection(0); } }); } } /** * Set the adapter anime/manga. */ private void setAdapter() { ga = new ListViewAdapter<>(context, resource); ga.setNotifyOnChange(true); } /** * Refresh all the covers. */ private void refresh() { try { if (ga == null) setAdapter(); ga.clear(); ga.supportAddAll(gl); if (Gridview.getAdapter() == null) Gridview.setAdapter(ga); } catch (Exception e) { if (APIHelper.isNetworkAvailable(context)) { Crashlytics.log(Log.ERROR, "Atarashii", "IGF.refresh(): " + e.getMessage()); Crashlytics.logException(e); if (taskjob.equals(TaskJob.SEARCH)) { Theme.Snackbar(activity, R.string.toast_error_Search); } else { if (isAnime()) Theme.Snackbar(activity, R.string.toast_error_Anime_Sync); else Theme.Snackbar(activity, R.string.toast_error_Manga_Sync); } } else { Theme.Snackbar(activity, R.string.toast_error_noConnectivity); } } loading = false; } /** * Inverse the list and refresh it. */ public void inverse() { this.inverse = !inverse; if (taskjob.equals(TaskJob.GETFRIENDLIST)) { if (gl != null && gl.size() > 0) Collections.reverse(gl); refresh(); } else { getRecords(true, taskjob, list); } } /** * Set the list with the new page/list. */ @SuppressWarnings("unchecked") // Don't panic, we handle possible class cast exceptions @Override public void onNetworkTaskFinished(Object result, TaskJob job, ListType type) { ArrayList resultList; try { if (type == ListType.ANIME) resultList = (ArrayList<Anime>) result; else resultList = (ArrayList<Manga>) result; } catch (Exception e) { Crashlytics.log(Log.ERROR, "Atarashii", "IGF.onNetworkTaskFinished(): " + result.getClass().toString()); Crashlytics.logException(e); resultList = null; } if (resultList != null) { if (resultList.size() == 0 && taskjob.equals(TaskJob.SEARCH)) { if (this.page == 1) doRecordsLoadedCallback(job); } else { if (job.equals(TaskJob.FORCESYNC)) doRecordsLoadedCallback(job); if (clearAfterLoading || job.equals(TaskJob.FORCESYNC) || job.equals(TaskJob.GETFRIENDLIST)) { // a forced sync always reloads all data, so clear the list gl.clear(); clearAfterLoading = false; } hasmorepages = resultList.size() > 0; gl.addAll(resultList); if (taskjob.equals(TaskJob.GETFRIENDLIST)) { backGl.addAll(resultList); sortList(sortType); } else { refresh(); } } } else { doRecordsLoadedCallback(job); // no resultList ? something went wrong } networkTask = null; toggleSwipeRefreshAnimation(false); toggleLoadingIndicator(false); } @Override public void onNetworkTaskError(TaskJob job) { doRecordsLoadedCallback(job); activity.runOnUiThread(new Runnable() { @Override public void run() { toggleSwipeRefreshAnimation(false); toggleLoadingIndicator(false); } }); } /** * Trigger to the parent activity that the records are loaded. * * @param job Which list should be shown (top, popular, upcoming...) */ private void doRecordsLoadedCallback(TaskJob job) { if (callback != null) callback.onRecordsLoadingFinished(job); } @Override public void onScrollStateChanged(AbsListView view, int scrollState) { } /** * Load more pages if we are almost on the bottom. */ @Override public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) { // don't do anything if there is nothing in the list if (!isList()) { if (firstVisibleItem == 0 && visibleItemCount == 0 && totalItemCount == 0) return; if (totalItemCount - firstVisibleItem <= (visibleItemCount * 2) && !loading && hasmorepages) { loading = true; page++; getRecords(false, null, list); } } } // user updated record on DetailsView, so update the list if necessary @Override public void onRecordStatusUpdated(ListType type) { // broadcast received if (type != null && type.equals(listType) && isList()) { clearAfterLoading = true; getRecords(false, TaskJob.GETLIST, list); } } /** * Handle the gridview click by navigating to the detailview. */ @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { Answers.getInstance().logContentView(new ContentViewEvent() .putContentName("DetailView") .putContentType(String.valueOf(listType)) .putContentId(String.valueOf(listType).charAt(0) + String.valueOf(gl.get(position).getId()))); callback.onItemClick(gl.get(position).getId(), listType, username); } static class ViewHolder { TextView label; TextView progressCount; TextView flavourText; ImageView cover; ImageView actionButton; TextView scoreCount; TextView typeCount; TextView statusCount; } /** * The custom adapter for the covers anime/manga. */ public class ListViewAdapter<T> extends ArrayAdapter<T> { final LayoutInflater inflater; final boolean listView; final String StatusWatching; final String StatusReading; final String StatusCompleted; final String StatusOnHold; final String StatusDropped; final String StatusPlanningToWatch; final String StatusPlanningToRead; final String Number; final boolean isMAL; public ListViewAdapter(Context context, int resource) { super(context, resource); // Get the string to make the scrolling smoother StatusWatching = getString(R.string.cover_Watching); StatusReading = getString(R.string.cover_Reading); StatusCompleted = getString(R.string.cover_Completed); StatusOnHold = getString(R.string.cover_OnHold); StatusDropped = getString(R.string.cover_Dropped); StatusPlanningToWatch = getString(R.string.cover_PlanningToWatch); StatusPlanningToRead = getString(R.string.cover_PlanningToRead); Number = getString(R.string.label_Number); isMAL = AccountService.isMAL(); inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); listView = resource != R.layout.record_igf_listview; } @SuppressWarnings("deprecation") public View getView(int position, View view, ViewGroup parent) { final GenericRecord record = gl.get(position); Anime animeRecord; Manga mangaRecord; ViewHolder viewHolder = null; String status; int progress; if (isAnime()) { animeRecord = (Anime) record; status = animeRecord.getWatchedStatus(); progress = animeRecord.getWatchedEpisodes(); } else { mangaRecord = (Manga) record; status = mangaRecord.getReadStatus(); progress = useSecondaryAmounts ? mangaRecord.getVolumesRead() : mangaRecord.getChaptersRead(); } if (view != null) viewHolder = (ViewHolder) view.getTag(); if (view == null || (details && viewHolder.scoreCount == null) || (!details && viewHolder.scoreCount != null)) { view = inflater.inflate(resource, parent, false); viewHolder = new ViewHolder(); viewHolder.label = (TextView) view.findViewById(R.id.animeName); viewHolder.progressCount = (TextView) view.findViewById(R.id.watchedCount); viewHolder.cover = (ImageView) view.findViewById(R.id.coverImage); viewHolder.actionButton = (ImageView) view.findViewById(R.id.popUpButton); viewHolder.flavourText = (TextView) view.findViewById(R.id.stringWatched); viewHolder.scoreCount = (TextView) view.findViewById(R.id.scoreCount); viewHolder.typeCount = (TextView) view.findViewById(R.id.typeCount); viewHolder.statusCount = (TextView) view.findViewById(R.id.statusCount); view.setTag(viewHolder); if (listView) view.getLayoutParams().height = height; } try { viewHolder.label.setText(record.getTitle()); if (details) { viewHolder.scoreCount.setText(String.valueOf(record.getScore())); viewHolder.typeCount.setText(record.getType()); viewHolder.statusCount.setText(record.getStatus()); } if (isList() && status != null) { viewHolder.progressCount.setText(String.valueOf(progress)); switch (status) { case "watching": viewHolder.flavourText.setText(StatusWatching); viewHolder.progressCount.setVisibility(View.VISIBLE); if (popupEnabled) { viewHolder.actionButton.setVisibility(View.VISIBLE); viewHolder.actionButton.setOnClickListener(new ABOnClickListener(record)); } else { viewHolder.actionButton.setVisibility(View.GONE); } break; case "reading": viewHolder.flavourText.setText(StatusReading); viewHolder.progressCount.setVisibility(View.VISIBLE); if (popupEnabled) { viewHolder.actionButton.setVisibility(View.VISIBLE); viewHolder.actionButton.setOnClickListener(new ABOnClickListener(record)); } else { viewHolder.actionButton.setVisibility(View.GONE); } break; case "completed": viewHolder.flavourText.setText(StatusCompleted); viewHolder.actionButton.setVisibility(View.GONE); viewHolder.progressCount.setVisibility(View.GONE); break; case "on-hold": viewHolder.flavourText.setText(StatusOnHold); viewHolder.progressCount.setVisibility(View.VISIBLE); viewHolder.actionButton.setVisibility(View.GONE); break; case "dropped": viewHolder.flavourText.setText(StatusDropped); viewHolder.actionButton.setVisibility(View.GONE); viewHolder.progressCount.setVisibility(View.GONE); break; case "plan to watch": viewHolder.flavourText.setText(StatusPlanningToWatch); viewHolder.actionButton.setVisibility(View.GONE); viewHolder.progressCount.setVisibility(View.GONE); break; case "plan to read": viewHolder.flavourText.setText(StatusPlanningToRead); viewHolder.actionButton.setVisibility(View.GONE); viewHolder.progressCount.setVisibility(View.GONE); break; default: viewHolder.flavourText.setText(""); viewHolder.actionButton.setVisibility(View.GONE); viewHolder.progressCount.setVisibility(View.GONE); break; } } else { viewHolder.actionButton.setVisibility(View.GONE); if (isMAL && numberList) { viewHolder.progressCount.setVisibility(View.VISIBLE); viewHolder.progressCount.setText(String.valueOf(position + 1)); viewHolder.flavourText.setText(Number); } else { viewHolder.progressCount.setVisibility(View.GONE); viewHolder.flavourText.setText(getString(R.string.unknown)); } } Picasso.with(context) .load(record.getImageUrl()) .error(R.drawable.cover_error) .placeholder(R.drawable.cover_loading) .into(viewHolder.cover); } catch (Exception e) { Theme.logTaskCrash("IGF", "ListViewAdapter()", e); } return view; } public void supportAddAll(Collection<? extends T> collection) { for (T record : collection) { this.add(record); } } /** * Custom grid clicker for passing the right record */ public class ABOnClickListener implements View.OnClickListener { final GenericRecord record; public ABOnClickListener(GenericRecord record) { this.record = record; } @Override public void onClick(View view) { PopupMenu popup = new PopupMenu(context, view); popup.getMenuInflater().inflate(R.menu.record_popup, popup.getMenu()); if (!isAnime()) popup.getMenu().findItem(R.id.plusOne).setTitle(R.string.action_PlusOneRead); popup.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { switch (item.getItemId()) { case R.id.plusOne: if (isAnime()) setProgressPlusOne((Anime) record, null); else setProgressPlusOne(null, (Manga) record); break; case R.id.markCompleted: if (isAnime()) setMarkAsComplete((Anime) record, null); else setMarkAsComplete(null, (Manga) record); break; } return true; } }); popup.show(); } } } public interface IGFCallbackListener { void onIGFReady(IGF igf); void onRecordsLoadingFinished(TaskJob job); void onItemClick(int id, ListType listType, String username); } }
Fix IGF backgroundcolor
Atarashii/src/net/somethingdreadful/MAL/IGF.java
Fix IGF backgroundcolor
Java
bsd-2-clause
35d5d932db47e9b2de51abdb9e573de39f073b40
0
MattDevo/edk2,MattDevo/edk2,MattDevo/edk2,MattDevo/edk2,MattDevo/edk2,MattDevo/edk2,MattDevo/edk2,MattDevo/edk2
/** @file Copyright (c) 2006, Intel Corporation All rights reserved. This program and the accompanying materials are licensed and made available under the terms and conditions of the BSD License which accompanies this distribution. The full text of the license may be found at http://opensource.org/licenses/bsd-license.php THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. **/ package org.tianocore.migration; import java.awt.*; import java.awt.event.*; import java.io.*; import java.util.*; import javax.swing.*; public final class FirstPanel extends JPanel implements ActionListener, ItemListener, UI { /** * Define class Serial Version UID */ private static final long serialVersionUID = 207759413522910399L; private String modulepath; private ModuleInfo mi; private JButton moduleButton, goButton, msaEditorButton, criticButton; private JTextField moduletext; private JTextArea log; private JFileChooser fc; private JCheckBox filebox, screenbox, mibox, criticbox, defaultpathbox; private boolean tofile = true, toscreen = true; private PrintWriter logfile; FirstPanel() throws Exception { GridBagLayout gridbag = new GridBagLayout(); setLayout(gridbag); GridBagConstraints cst = new GridBagConstraints(); goButton = new JButton("Go"); goButton.addActionListener(this); goButton.setActionCommand("go"); moduleButton = new JButton("Choose ModulePath"); moduleButton.addActionListener(this); msaEditorButton = new JButton("MsaEditor"); msaEditorButton.addActionListener(this); criticButton = new JButton("Critic"); criticButton.addActionListener(this); moduletext = new JTextField(30); filebox = new JCheckBox("Output to logfile", true); filebox.addItemListener(this); screenbox = new JCheckBox("Specify logfile", false); screenbox.addItemListener(this); mibox = new JCheckBox("Print ModuleInfo", false); mibox.addItemListener(this); ModuleInfo.printModuleInfo = false; criticbox = new JCheckBox("Run Critic", true); criticbox.addItemListener(this); ModuleInfo.doCritic = true; defaultpathbox = new JCheckBox("Use Default Output Path", true); defaultpathbox.addItemListener(this); JPanel modulePanel = new JPanel(); modulePanel.add(moduleButton); modulePanel.add(moduletext); modulePanel.add(goButton); //modulePanel.add(msaEditorButton); cst.gridx = 0; cst.gridy = 0; //cst.gridwidth = GridBagConstraints.REMAINDER; gridbag.setConstraints(modulePanel, cst); add(modulePanel); cst.gridx = 1; cst.gridy = 0; gridbag.setConstraints(criticButton, cst); //add(criticButton); JPanel checkboxPanel = new JPanel(); checkboxPanel.setLayout(new BoxLayout(checkboxPanel, BoxLayout.Y_AXIS)); checkboxPanel.add(filebox); checkboxPanel.add(screenbox); checkboxPanel.add(mibox); checkboxPanel.add(criticbox); checkboxPanel.add(defaultpathbox); cst.gridx = 1; cst.gridy = 1; //cst.gridheight = 2; gridbag.setConstraints(checkboxPanel, cst); add(checkboxPanel); log = new JTextArea(10,20); log.setMargin(new Insets(5,5,5,5)); log.setEditable(false); JScrollPane logScrollPane = new JScrollPane(log); cst.gridx = 0; cst.gridy = 1; cst.fill = GridBagConstraints.BOTH; gridbag.setConstraints(logScrollPane, cst); add(logScrollPane); fc = new JFileChooser(); fc.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); } //---------------------------------------------------------------------------------------// public boolean yesOrNo(String question) { return JOptionPane.showConfirmDialog(this, question, "Yes or No", JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION; } public void print(String message) { if (toscreen == true) { log.append(message); System.out.print(message); } if (tofile == true) { logfile.append(message); } } public void println(String message) { print(message + "\n"); } public void println(Set<String> hash) { if (toscreen == true) { log.append(hash + "\n"); System.out.println(hash); } if (tofile == true) { logfile.append(hash + "\n"); } } public String choose(String message, Object[] choicelist) { return (String)JOptionPane.showInputDialog(this, message,"Choose",JOptionPane.PLAIN_MESSAGE,null,choicelist,choicelist[0]); } public String getInput(String message) { return (String)JOptionPane.showInputDialog(message); } //---------------------------------------------------------------------------------------// public String getFilepath() { if (fc.showOpenDialog(this) == JFileChooser.APPROVE_OPTION) { log.append(fc.getSelectedFile().getAbsolutePath() + "\n"); return fc.getSelectedFile().getAbsolutePath(); } return null; } //---------------------------------------------------------------------------------------// public void actionPerformed(ActionEvent e) { if ( e.getSource() == moduleButton ) { modulepath = getFilepath(); } if ( e.getSource() == goButton ) { try { logfile = new PrintWriter(new BufferedWriter(new FileWriter(modulepath + File.separator + "migration.log"))); ModuleInfo.triger(modulepath); logfile.flush(); } catch (Exception en) { println(en.getMessage()); } } if ( e.getSource() == msaEditorButton) { try { MsaTreeEditor.init(mi, this); } catch (Exception en) { println(en.getMessage()); } } if ( e.getSource() == criticButton) { try { Critic.fireAt(modulepath); } catch (Exception en) { println(en.getMessage()); } } } public void itemStateChanged(ItemEvent e) { if (e.getSource() == filebox) { if (e.getStateChange() == ItemEvent.DESELECTED) { System.out.println("filebox DESELECTED"); } else if (e.getStateChange() == ItemEvent.SELECTED) { System.out.println("filebox SELECTED"); } } else if (e.getSource() == screenbox) { if (e.getStateChange() == ItemEvent.DESELECTED) { System.out.println("screenbox DESELECTED"); } else if (e.getStateChange() == ItemEvent.SELECTED) { System.out.println("screenbox SELECTED"); } } else if (e.getSource() == mibox) { if (e.getStateChange() == ItemEvent.DESELECTED) { ModuleInfo.printModuleInfo = false; } else if (e.getStateChange() == ItemEvent.SELECTED) { ModuleInfo.printModuleInfo = true; } } else if (e.getSource() == criticbox) { if (e.getStateChange() == ItemEvent.DESELECTED) { ModuleInfo.doCritic = false; System.out.println("criticbox DESELECTED"); } else if (e.getStateChange() == ItemEvent.SELECTED) { ModuleInfo.doCritic = true; System.out.println("criticbox SELECTED"); } } else if (e.getSource() == defaultpathbox) { if (e.getStateChange() == ItemEvent.DESELECTED) { System.out.println("defaultpathbox DESELECTED"); } else if (e.getStateChange() == ItemEvent.SELECTED) { System.out.println("defaultpathbox SELECTED"); } } } //---------------------------------------------------------------------------------------// public static FirstPanel init() throws Exception { //UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName()); UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); //UIManager.setLookAndFeel("javax.swing.plaf.metal.MetalLookAndFeel"); //UIManager.setLookAndFeel("com.sun.java.swing.plaf.windows.WindowsLookAndFeel"); //UIManager.setLookAndFeel("com.sun.java.swing.plaf.gtk.GTKLookAndFeel"); //UIManager.setLookAndFeel("com.sun.java.swing.plaf.motif.MotifLookAndFeel"); JFrame frame = new JFrame("MigrationTools"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); FirstPanel fp = new FirstPanel(); //fp.setLayout(new GridBagLayout()); //fp.setLayout(new BoxLayout(fp, BoxLayout.Y_AXIS)); fp.setOpaque(true); frame.setContentPane(fp); frame.pack(); frame.setVisible(true); return fp; } }
Tools/Source/MigrationTools/org/tianocore/migration/FirstPanel.java
/** @file Copyright (c) 2006, Intel Corporation All rights reserved. This program and the accompanying materials are licensed and made available under the terms and conditions of the BSD License which accompanies this distribution. The full text of the license may be found at http://opensource.org/licenses/bsd-license.php THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. **/ package org.tianocore.migration; import java.awt.*; import java.awt.event.*; import java.io.*; import java.util.*; import javax.swing.*; public final class FirstPanel extends JPanel implements ActionListener, ItemListener, UI { /** * Define class Serial Version UID */ private static final long serialVersionUID = 207759413522910399L; private String modulepath; private ModuleInfo mi; private JButton moduleButton, goButton, msaEditorButton, criticButton; private JTextField moduletext; private JTextArea log; private JFileChooser fc; private JCheckBox filebox, screenbox, mibox, criticbox, defaultpathbox; private boolean tofile = true, toscreen = true; private PrintWriter logfile; FirstPanel() throws Exception { GridBagLayout gridbag = new GridBagLayout(); setLayout(gridbag); GridBagConstraints cst = new GridBagConstraints(); goButton = new JButton("Go"); goButton.addActionListener(this); goButton.setActionCommand("go"); moduleButton = new JButton("Choose ModulePath"); moduleButton.addActionListener(this); msaEditorButton = new JButton("MsaEditor"); msaEditorButton.addActionListener(this); criticButton = new JButton("Critic"); criticButton.addActionListener(this); moduletext = new JTextField(30); filebox = new JCheckBox("Output to logfile", true); filebox.addItemListener(this); screenbox = new JCheckBox("Specify logfile", false); screenbox.addItemListener(this); mibox = new JCheckBox("Print ModuleInfo", false); mibox.addItemListener(this); ModuleInfo.printModuleInfo = false; criticbox = new JCheckBox("Run Critic", true); criticbox.addItemListener(this); ModuleInfo.doCritic = true; defaultpathbox = new JCheckBox("Use Default Output Path", true); defaultpathbox.addItemListener(this); JPanel modulePanel = new JPanel(); modulePanel.add(moduleButton); modulePanel.add(moduletext); modulePanel.add(goButton); //modulePanel.add(msaEditorButton); cst.gridx = 0; cst.gridy = 0; //cst.gridwidth = GridBagConstraints.REMAINDER; gridbag.setConstraints(modulePanel, cst); add(modulePanel); cst.gridx = 1; cst.gridy = 0; gridbag.setConstraints(criticButton, cst); //add(criticButton); JPanel checkboxPanel = new JPanel(); checkboxPanel.setLayout(new BoxLayout(checkboxPanel, BoxLayout.Y_AXIS)); checkboxPanel.add(filebox); checkboxPanel.add(screenbox); checkboxPanel.add(mibox); checkboxPanel.add(criticbox); checkboxPanel.add(defaultpathbox); cst.gridx = 1; cst.gridy = 1; //cst.gridheight = 2; gridbag.setConstraints(checkboxPanel, cst); add(checkboxPanel); log = new JTextArea(10,20); log.setMargin(new Insets(5,5,5,5)); log.setEditable(false); JScrollPane logScrollPane = new JScrollPane(log); cst.gridx = 0; cst.gridy = 1; cst.fill = GridBagConstraints.BOTH; gridbag.setConstraints(logScrollPane, cst); add(logScrollPane); fc = new JFileChooser(); fc.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES); } //---------------------------------------------------------------------------------------// public boolean yesOrNo(String question) { return JOptionPane.showConfirmDialog(this, question, "Yes or No", JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION; } public void print(String message) { if (toscreen == true) { log.append(message); System.out.print(message); } if (tofile == true) { logfile.append(message); } } public void println(String message) { print(message + "\n"); } public void println(Set<String> hash) { if (toscreen == true) { log.append(hash + "\n"); System.out.println(hash); } if (tofile == true) { logfile.append(hash + "\n"); } } public String choose(String message, Object[] choicelist) { return (String)JOptionPane.showInputDialog(this, message,"Choose",JOptionPane.PLAIN_MESSAGE,null,choicelist,choicelist[0]); } public String getInput(String message) { return (String)JOptionPane.showInputDialog(message); } //---------------------------------------------------------------------------------------// public String getFilepath() { if (fc.showOpenDialog(this) == JFileChooser.APPROVE_OPTION) { log.append(fc.getSelectedFile().getAbsolutePath() + "\n"); return fc.getSelectedFile().getAbsolutePath(); } return null; } //---------------------------------------------------------------------------------------// public void actionPerformed(ActionEvent e) { if ( e.getSource() == moduleButton ) { modulepath = getFilepath(); } if ( e.getSource() == goButton ) { try { logfile = new PrintWriter(new BufferedWriter(new FileWriter(modulepath + File.separator + "migration.log"))); ModuleInfo.triger(modulepath); logfile.flush(); } catch (Exception en) { println(en.getMessage()); } } if ( e.getSource() == msaEditorButton) { try { MsaTreeEditor.init(mi, this); } catch (Exception en) { println(en.getMessage()); } } if ( e.getSource() == criticButton) { try { Critic.fireAt(modulepath); } catch (Exception en) { println(en.getMessage()); } } } public void itemStateChanged(ItemEvent e) { if (e.getSource() == filebox) { if (e.getStateChange() == ItemEvent.DESELECTED) { System.out.println("filebox DESELECTED"); } else if (e.getStateChange() == ItemEvent.SELECTED) { System.out.println("filebox SELECTED"); } } else if (e.getSource() == screenbox) { if (e.getStateChange() == ItemEvent.DESELECTED) { System.out.println("screenbox DESELECTED"); } else if (e.getStateChange() == ItemEvent.SELECTED) { System.out.println("screenbox SELECTED"); } } else if (e.getSource() == mibox) { if (e.getStateChange() == ItemEvent.DESELECTED) { ModuleInfo.printModuleInfo = false; } else if (e.getStateChange() == ItemEvent.SELECTED) { ModuleInfo.printModuleInfo = true; } } else if (e.getSource() == criticbox) { if (e.getStateChange() == ItemEvent.DESELECTED) { ModuleInfo.doCritic = false; System.out.println("criticbox DESELECTED"); } else if (e.getStateChange() == ItemEvent.SELECTED) { ModuleInfo.doCritic = true; System.out.println("criticbox SELECTED"); } } else if (e.getSource() == defaultpathbox) { if (e.getStateChange() == ItemEvent.DESELECTED) { System.out.println("defaultpathbox DESELECTED"); } else if (e.getStateChange() == ItemEvent.SELECTED) { System.out.println("defaultpathbox SELECTED"); } } } //---------------------------------------------------------------------------------------// public static FirstPanel init() throws Exception { //UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName()); UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); //UIManager.setLookAndFeel("javax.swing.plaf.metal.MetalLookAndFeel"); //UIManager.setLookAndFeel("com.sun.java.swing.plaf.windows.WindowsLookAndFeel"); //UIManager.setLookAndFeel("com.sun.java.swing.plaf.gtk.GTKLookAndFeel"); //UIManager.setLookAndFeel("com.sun.java.swing.plaf.motif.MotifLookAndFeel"); JFrame frame = new JFrame("MigrationTools"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); FirstPanel fp = new FirstPanel(); //fp.setLayout(new GridBagLayout()); //fp.setLayout(new BoxLayout(fp, BoxLayout.Y_AXIS)); fp.setOpaque(true); frame.setContentPane(fp); frame.pack(); frame.setVisible(true); return fp; } }
Only allow to choose directory when choosing old code. git-svn-id: 5648d1bec6962b0a6d1d1b40eba8cf5cdb62da3d@1357 6f19259b-4bc3-4df7-8a09-765794883524
Tools/Source/MigrationTools/org/tianocore/migration/FirstPanel.java
Only allow to choose directory when choosing old code.
Java
bsd-2-clause
d82c3342642035763e0c316576128c8754a618ec
0
IMFTool/regxmllib,ethanchan747/regxmllib,sandflow/regxmllib,ethanchan747/regxmllib,IMFTool/regxmllib,sandflow/regxmllib
/* * Copyright (c) 2014, Pierre-Anthony Lemieux (pal@sandflow.com) * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.sandflow.smpte.klv; import com.sandflow.smpte.klv.exceptions.KLVException; import static com.sandflow.smpte.klv.exceptions.KLVException.MAX_LENGTH_EXCEEED; import com.sandflow.smpte.util.UL; import java.io.DataInput; import java.io.DataInputStream; import java.io.EOFException; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; /** * KLVInputStream allows KLV data structures to be read from an InputStream */ public class KLVInputStream extends InputStream implements DataInput { public enum ByteOrder { LITTLE_ENDIAN, BIG_ENDIAN } private DataInputStream dis; private ByteOrder byteorder; /** * Assumes big endian byte ordering. * * @param is InputStream to read from */ public KLVInputStream(InputStream is) { this(is, ByteOrder.BIG_ENDIAN); } /** * Allows the byte ordering to be specified. * * @param is InputStream to read from * @param byteorder Byte ordering of the file */ public KLVInputStream(InputStream is, ByteOrder byteorder) { if (is == null) throw new NullPointerException(); dis = new DataInputStream(is); this.byteorder = byteorder; } /** * Byte order of the stream. * * @return Byte order of the stream */ public ByteOrder getByteorder() { return byteorder; } /** * Reads a single UL. * * @return UL * @throws IOException * @throws EOFException */ public UL readUL() throws IOException, EOFException { byte[] ul = new byte[16]; readFully(ul); return new UL(ul); } /** * Reads a single BER-encoded length. The maximum length of the encoded length is 8 bytes. * * @return Length * @throws EOFException * @throws IOException * @throws KLVException */ public long readBERLength() throws EOFException, IOException, KLVException { long val = 0; int b = read(); if (b <= 0) { throw new EOFException(); } if ((b & 0x80) == 0) { return b; } int bersz = (b & 0x0f); if (bersz > 8) { throw new KLVException(MAX_LENGTH_EXCEEED); } byte[] octets = new byte[bersz]; if (read(octets) < bersz) { throw new EOFException(); } for (int i = 0; i < bersz; i++) { int tmp = (((int) octets[i]) & 0xFF); val = (val << 8) + tmp; if (val > Integer.MAX_VALUE) { throw new KLVException(MAX_LENGTH_EXCEEED); } } return val; } /** * Reads a single KLV triplet. * * @return KLV Triplet * @throws IOException * @throws EOFException * @throws KLVException */ public Triplet readTriplet() throws IOException, EOFException, KLVException { UL ul = readUL(); long len = readBERLength(); if (len > Integer.MAX_VALUE) { throw new KLVException(MAX_LENGTH_EXCEEED); } byte[] value = new byte[(int) len]; if (len != read(value)) { throw new EOFException("EOF reached while reading Value."); } return new MemoryTriplet(ul, value); } @Override public final int read(byte[] bytes) throws IOException { return dis.read(bytes); } @Override public final int read(byte[] bytes, int i, int i1) throws IOException { return dis.read(bytes, i, i1); } @Override public final void readFully(byte[] bytes) throws IOException { dis.readFully(bytes); } @Override public final void readFully(byte[] bytes, int i, int i1) throws IOException { dis.readFully(bytes, i, i1); } @Override public final int skipBytes(int i) throws IOException { return dis.skipBytes(i); } @Override public final boolean readBoolean() throws IOException { return dis.readBoolean(); } @Override public final byte readByte() throws IOException { return dis.readByte(); } @Override public final int readUnsignedByte() throws IOException { return dis.readUnsignedByte(); } @Override public final short readShort() throws IOException { if (byteorder == ByteOrder.BIG_ENDIAN) { return dis.readShort(); } else { int lo = readUnsignedByte(); int hi = readUnsignedByte(); return (short) (lo + (hi << 8)); } } @Override public final int readUnsignedShort() throws IOException { if (byteorder == ByteOrder.BIG_ENDIAN) { return dis.readUnsignedShort(); } else { int lo = readUnsignedByte(); int hi = readUnsignedByte(); return lo + hi << 8; } } @Override public final char readChar() throws IOException { return dis.readChar(); } @Override public final int readInt() throws IOException { if (byteorder == ByteOrder.BIG_ENDIAN) { return dis.readInt(); } else { int b0 = readUnsignedByte(); int b1 = readUnsignedByte(); int b2 = readUnsignedByte(); int b3 = readUnsignedByte(); return b0 + (b1 << 8) + (b2 << 16) + (b3 << 24); } } public long readUnsignedInt() throws IOException, EOFException { if (byteorder == ByteOrder.BIG_ENDIAN) { return ((long) dis.readInt()) & 0xFFFF; } else { int b0 = readUnsignedByte(); int b1 = readUnsignedByte(); int b2 = readUnsignedByte(); int b3 = readUnsignedByte(); return ((long) b0 + (b1 << 8) + (b2 << 16) + (b3 << 24)) & 0xFFFF; } } @Override public final long readLong() throws IOException { if (byteorder == ByteOrder.BIG_ENDIAN) { return dis.readLong(); } else { int b0 = readUnsignedByte(); int b1 = readUnsignedByte(); int b2 = readUnsignedByte(); int b3 = readUnsignedByte(); int b4 = readUnsignedByte(); int b5 = readUnsignedByte(); int b6 = readUnsignedByte(); int b7 = readUnsignedByte(); return b0 + (b1 << 8) + (b2 << 16) + (b3 << 24) + (b4 << 32) + (b5 << 40) + (b6 << 48) + (b7 << 56); } } @Override public final float readFloat() throws IOException { return dis.readFloat(); } @Override public final double readDouble() throws IOException { return dis.readDouble(); } @Override public final String readLine() throws IOException { return dis.readLine(); } @Override public final String readUTF() throws IOException { return dis.readUTF(); } public static final String readUTF(DataInput di) throws IOException { return DataInputStream.readUTF(di); } @Override public int read() throws IOException { return dis.read(); } @Override public long skip(long l) throws IOException { return dis.skip(l); } @Override public int available() throws IOException { return dis.available(); } @Override public void close() throws IOException { dis.close(); } @Override public synchronized void mark(int i) { dis.mark(i); } @Override public synchronized void reset() throws IOException { dis.reset(); } @Override public boolean markSupported() { return dis.markSupported(); } protected static final void swap(byte[] array, int i, int j) { byte tmp = array[i]; array[i] = array[j]; array[j] = tmp; } protected static final void uuidLEtoBE(byte[] uuid) { /* swap the 32-bit word of the UUID */ swap(uuid, 0, 3); swap(uuid, 1, 2); /* swap the first 16-bit word of the UUID */ swap(uuid, 4, 5); /* swap the second 16-bit word of the UUID */ swap(uuid, 6, 7); } }
regxmllib/src/main/java/com/sandflow/smpte/klv/KLVInputStream.java
/* * Copyright (c) 2014, Pierre-Anthony Lemieux (pal@sandflow.com) * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.sandflow.smpte.klv; import com.sandflow.smpte.klv.exceptions.KLVException; import static com.sandflow.smpte.klv.exceptions.KLVException.MAX_LENGTH_EXCEEED; import com.sandflow.smpte.util.UL; import java.io.DataInput; import java.io.DataInputStream; import java.io.EOFException; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; /** * KLVInputStream allows KLV data structures to be read from an InputStream */ public class KLVInputStream extends InputStream implements DataInput { public enum ByteOrder { LITTLE_ENDIAN, BIG_ENDIAN } private DataInputStream dis; private ByteOrder byteorder; /** * Assumes big endian byte ordering. * * @param is InputStream to read from */ public KLVInputStream(InputStream is) { this(is, ByteOrder.BIG_ENDIAN); } /** * Allows the byte ordering to be specified. * * @param is InputStream to read from * @param byteorder Byte ordering of the file */ public KLVInputStream(InputStream is, ByteOrder byteorder) { dis = new DataInputStream(is); this.byteorder = byteorder; } /** * Byte order of the stream. * * @return Byte order of the stream */ public ByteOrder getByteorder() { return byteorder; } /** * Reads a single UL. * * @return UL * @throws IOException * @throws EOFException */ public UL readUL() throws IOException, EOFException { byte[] ul = new byte[16]; if (read(ul) < ul.length) { throw new EOFException(); } return new UL(ul); } /** * Reads a single BER-encoded length. The maximum length of the encoded length is 8 bytes. * * @return Length * @throws EOFException * @throws IOException * @throws KLVException */ public long readBERLength() throws EOFException, IOException, KLVException { long val = 0; int b = read(); if (b <= 0) { throw new EOFException(); } if ((b & 0x80) == 0) { return b; } int bersz = (b & 0x0f); if (bersz > 8) { throw new KLVException(MAX_LENGTH_EXCEEED); } byte[] octets = new byte[bersz]; if (read(octets) < bersz) { throw new EOFException(); } for (int i = 0; i < bersz; i++) { int tmp = (((int) octets[i]) & 0xFF); val = (val << 8) + tmp; if (val > Integer.MAX_VALUE) { throw new KLVException(MAX_LENGTH_EXCEEED); } } return val; } /** * Reads a single KLV triplet. * * @return KLV Triplet * @throws IOException * @throws EOFException * @throws KLVException */ public Triplet readTriplet() throws IOException, EOFException, KLVException { UL ul = readUL(); long len = readBERLength(); if (len > Integer.MAX_VALUE) { throw new KLVException(MAX_LENGTH_EXCEEED); } byte[] value = new byte[(int) len]; if (len != read(value)) { throw new EOFException("EOF reached while reading Value."); } return new MemoryTriplet(ul, value); } @Override public final int read(byte[] bytes) throws IOException { return dis.read(bytes); } @Override public final int read(byte[] bytes, int i, int i1) throws IOException { return dis.read(bytes, i, i1); } @Override public final void readFully(byte[] bytes) throws IOException { dis.readFully(bytes); } @Override public final void readFully(byte[] bytes, int i, int i1) throws IOException { dis.readFully(bytes, i, i1); } @Override public final int skipBytes(int i) throws IOException { return dis.skipBytes(i); } @Override public final boolean readBoolean() throws IOException { return dis.readBoolean(); } @Override public final byte readByte() throws IOException { return dis.readByte(); } @Override public final int readUnsignedByte() throws IOException { return dis.readUnsignedByte(); } @Override public final short readShort() throws IOException { if (byteorder == ByteOrder.BIG_ENDIAN) { return dis.readShort(); } else { int lo = readUnsignedByte(); int hi = readUnsignedByte(); return (short) (lo + (hi << 8)); } } @Override public final int readUnsignedShort() throws IOException { if (byteorder == ByteOrder.BIG_ENDIAN) { return dis.readUnsignedShort(); } else { int lo = readUnsignedByte(); int hi = readUnsignedByte(); return lo + hi << 8; } } @Override public final char readChar() throws IOException { return dis.readChar(); } @Override public final int readInt() throws IOException { if (byteorder == ByteOrder.BIG_ENDIAN) { return dis.readInt(); } else { int b0 = readUnsignedByte(); int b1 = readUnsignedByte(); int b2 = readUnsignedByte(); int b3 = readUnsignedByte(); return b0 + (b1 << 8) + (b2 << 16) + (b3 << 24); } } public long readUnsignedInt() throws IOException, EOFException { if (byteorder == ByteOrder.BIG_ENDIAN) { return ((long) dis.readInt()) & 0xFFFF; } else { int b0 = readUnsignedByte(); int b1 = readUnsignedByte(); int b2 = readUnsignedByte(); int b3 = readUnsignedByte(); return ((long) b0 + (b1 << 8) + (b2 << 16) + (b3 << 24)) & 0xFFFF; } } @Override public final long readLong() throws IOException { if (byteorder == ByteOrder.BIG_ENDIAN) { return dis.readLong(); } else { int b0 = readUnsignedByte(); int b1 = readUnsignedByte(); int b2 = readUnsignedByte(); int b3 = readUnsignedByte(); int b4 = readUnsignedByte(); int b5 = readUnsignedByte(); int b6 = readUnsignedByte(); int b7 = readUnsignedByte(); return b0 + (b1 << 8) + (b2 << 16) + (b3 << 24) + (b4 << 32) + (b5 << 40) + (b6 << 48) + (b7 << 56); } } @Override public final float readFloat() throws IOException { return dis.readFloat(); } @Override public final double readDouble() throws IOException { return dis.readDouble(); } @Override public final String readLine() throws IOException { return dis.readLine(); } @Override public final String readUTF() throws IOException { return dis.readUTF(); } public static final String readUTF(DataInput di) throws IOException { return DataInputStream.readUTF(di); } @Override public int read() throws IOException { return dis.read(); } @Override public long skip(long l) throws IOException { return dis.skip(l); } @Override public int available() throws IOException { return dis.available(); } @Override public void close() throws IOException { dis.close(); } @Override public synchronized void mark(int i) { dis.mark(i); } @Override public synchronized void reset() throws IOException { dis.reset(); } @Override public boolean markSupported() { return dis.markSupported(); } protected static final void swap(byte[] array, int i, int j) { byte tmp = array[i]; array[i] = array[j]; array[j] = tmp; } protected static final void uuidLEtoBE(byte[] uuid) { /* swap the 32-bit word of the UUID */ swap(uuid, 0, 3); swap(uuid, 1, 2); /* swap the first 16-bit word of the UUID */ swap(uuid, 4, 5); /* swap the second 16-bit word of the UUID */ swap(uuid, 6, 7); } }
Fix null reference bug in KLVInputStream.java
regxmllib/src/main/java/com/sandflow/smpte/klv/KLVInputStream.java
Fix null reference bug in KLVInputStream.java
Java
epl-1.0
b95f3dd61170528f69f65d9b3c593d3980df20b5
0
edgarmueller/emfstore-rest
/******************************************************************************* * Copyright 2011 Chair for Applied Software Engineering, * Technische Universitaet Muenchen. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: ******************************************************************************/ package org.eclipse.emf.emfstore.client.test.api; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.util.ConcurrentModificationException; import java.util.Set; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.edit.command.MoveCommand; import org.eclipse.emf.edit.command.RemoveCommand; import org.eclipse.emf.edit.domain.AdapterFactoryEditingDomain; import org.eclipse.emf.edit.domain.EditingDomain; import org.eclipse.emf.emfstore.bowling.BowlingPackage; import org.eclipse.emf.emfstore.bowling.Game; import org.eclipse.emf.emfstore.bowling.League; import org.eclipse.emf.emfstore.bowling.Matchup; import org.eclipse.emf.emfstore.bowling.Player; import org.eclipse.emf.emfstore.bowling.Tournament; import org.eclipse.emf.emfstore.client.ESLocalProject; import org.eclipse.emf.emfstore.client.ESWorkspaceProvider; import org.eclipse.emf.emfstore.common.model.ESModelElementId; import org.eclipse.emf.emfstore.internal.client.model.util.EMFStoreCommand; import org.junit.Test; /** * ModelElementTest. * * @author Tobias Verhoeven */ public class ModelElementTest { /** * Tests adding model elements. */ @Test public void testAddModelElementsWithoutCommands() { ESLocalProject localProject = ESWorkspaceProvider.INSTANCE.getWorkspace().createLocalProject("Testprojekt"); League leagueA = ProjectChangeUtil.createLeague("America"); League leagueB = ProjectChangeUtil.createLeague("Europe"); localProject.getModelElements().add(leagueA); localProject.getModelElements().add(leagueB); assertEquals(2, localProject.getAllModelElements().size()); Player playerA = ProjectChangeUtil.createPlayer("Hans"); Player playerB = ProjectChangeUtil.createPlayer("Anton"); leagueA.getPlayers().add(playerA); leagueA.getPlayers().add(playerB); assertEquals(4, localProject.getAllModelElements().size()); Player playerC = ProjectChangeUtil.createPlayer("Paul"); Player playerD = ProjectChangeUtil.createPlayer("Klaus"); leagueA.getPlayers().add(playerC); leagueA.getPlayers().add(playerD); assertEquals(6, localProject.getAllModelElements().size()); assertEquals(2, localProject.getModelElements().size()); Tournament tournamentA = ProjectChangeUtil.createTournament(false); localProject.getModelElements().add(tournamentA); Matchup matchupA = ProjectChangeUtil.createMatchup(null, null); Matchup matchupB = ProjectChangeUtil.createMatchup(null, null); Game gameA = ProjectChangeUtil.createGame(playerA); Game gameB = ProjectChangeUtil.createGame(playerB); Game gameC = ProjectChangeUtil.createGame(playerC); Game gameD = ProjectChangeUtil.createGame(playerD); tournamentA.getMatchups().add(matchupA); matchupA.getGames().add(gameA); matchupA.getGames().add(gameB); assertEquals(10, localProject.getAllModelElements().size()); tournamentA.getMatchups().add(matchupB); matchupB.getGames().add(gameC); matchupB.getGames().add(gameD); assertEquals(13, localProject.getAllModelElements().size()); assertEquals(3, localProject.getModelElements().size()); } /** * Tests adding model elements. */ @Test public void testAddModelElementsWithCommands() { final ESLocalProject localProject = ESWorkspaceProvider.INSTANCE.getWorkspace().createLocalProject( "Testprojekt"); final League leagueA = ProjectChangeUtil.createLeague("America"); final League leagueB = ProjectChangeUtil.createLeague("Europe"); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(leagueA); localProject.getModelElements().add(leagueB); } }.execute(); assertEquals(2, localProject.getAllModelElements().size()); final Player playerA = ProjectChangeUtil.createPlayer("Hans"); final Player playerB = ProjectChangeUtil.createPlayer("Anton"); new EMFStoreCommand() { @Override protected void doRun() { leagueA.getPlayers().add(playerA); leagueA.getPlayers().add(playerB); } }.execute(); assertEquals(4, localProject.getAllModelElements().size()); final Player playerC = ProjectChangeUtil.createPlayer("Paul"); final Player playerD = ProjectChangeUtil.createPlayer("Klaus"); new EMFStoreCommand() { @Override protected void doRun() { leagueA.getPlayers().add(playerC); leagueA.getPlayers().add(playerD); } }.execute(); assertEquals(6, localProject.getAllModelElements().size()); assertEquals(2, localProject.getModelElements().size()); final Tournament tournamentA = ProjectChangeUtil.createTournament(false); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournamentA); } }.execute(); final Matchup matchupA = ProjectChangeUtil.createMatchup(null, null); final Matchup matchupB = ProjectChangeUtil.createMatchup(null, null); final Game gameA = ProjectChangeUtil.createGame(playerA); final Game gameB = ProjectChangeUtil.createGame(playerB); final Game gameC = ProjectChangeUtil.createGame(playerC); final Game gameD = ProjectChangeUtil.createGame(playerD); new EMFStoreCommand() { @Override protected void doRun() { tournamentA.getMatchups().add(matchupA); matchupA.getGames().add(gameA); matchupA.getGames().add(gameB); } }.execute(); assertEquals(10, localProject.getAllModelElements().size()); new EMFStoreCommand() { @Override protected void doRun() { tournamentA.getMatchups().add(matchupB); matchupB.getGames().add(gameC); matchupB.getGames().add(gameD); } }.execute(); assertEquals(13, localProject.getAllModelElements().size()); assertEquals(3, localProject.getModelElements().size()); } @Test(expected = ConcurrentModificationException.class) public void testDeleteAllModelElementsWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); new EMFStoreCommand() { @Override protected void doRun() { Set<EObject> elements = localProject.getAllModelElements(); for (EObject object : elements) { localProject.getModelElements().remove(object); } } }.execute(); assertEquals(0, localProject.getAllModelElements().size()); } @Test(expected = ConcurrentModificationException.class) public void testDeleteAllModelElementsWithoutCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); Set<EObject> elements = localProject.getAllModelElements(); for (EObject object : elements) { localProject.getModelElements().remove(object); } assertEquals(0, localProject.getAllModelElements().size()); } /** * adds and deletes model element and undos the deletion. */ @Test public void testDeleteUndoWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Player player = ProjectChangeUtil.createPlayer("Heinrich"); final int SIZE = localProject.getAllModelElements().size(); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(player); } }.run(false); assertTrue(localProject.getAllModelElements().contains(player)); assertTrue(localProject.contains(player)); ESModelElementId id = localProject.getModelElementId(player); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().remove(player); } }.run(false); assertEquals(SIZE, localProject.getAllModelElements().size()); assertFalse(localProject.getAllModelElements().contains(player)); assertFalse(localProject.contains(player)); assertNull(localProject.getModelElement(id)); new EMFStoreCommand() { @Override protected void doRun() { localProject.undoLastOperation(); } }.run(false); assertEquals(SIZE + 1, localProject.getAllModelElements().size()); assertFalse(localProject.getAllModelElements().contains(player)); assertFalse(localProject.contains(player)); assertNotNull(localProject.getModelElement(id)); } /** * adds and deletes model element and undos the deletion. */ @Test public void testDeleteUndoWithoutCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Player player = ProjectChangeUtil.createPlayer("Heinrich"); final int SIZE = localProject.getAllModelElements().size(); localProject.getModelElements().add(player); assertTrue(localProject.getAllModelElements().contains(player)); assertTrue(localProject.contains(player)); ESModelElementId id = localProject.getModelElementId(player); localProject.getModelElements().remove(player); assertEquals(SIZE, localProject.getAllModelElements().size()); assertFalse(localProject.getAllModelElements().contains(player)); assertFalse(localProject.contains(player)); localProject.undoLastOperation(); assertEquals(SIZE + 1, localProject.getAllModelElements().size()); assertFalse(localProject.getAllModelElements().contains(player)); assertFalse(localProject.contains(player)); assertNotNull(localProject.getModelElement(id)); } @Test public void testReferenceDeletionWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournament = ProjectChangeUtil.createTournament(true); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournament); } }.run(false); final Player player = ProjectChangeUtil.createPlayer("Heinrich"); final Player player2 = ProjectChangeUtil.createPlayer("Walter"); final Player player3 = ProjectChangeUtil.createPlayer("Wilhelm"); new EMFStoreCommand() { @Override protected void doRun() { tournament.getPlayers().add(player); tournament.getPlayers().add(player2); tournament.getPlayers().add(player3); } }.run(false); assertEquals(3, tournament.getPlayers().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().remove(player2); } }.execute(); assertEquals(2, tournament.getPlayers().size()); assertTrue(localProject.contains(player)); assertTrue(localProject.contains(player3)); assertFalse(localProject.contains(player2)); } @Test public void testReferenceDeletionWithoutCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournament = ProjectChangeUtil.createTournament(true); localProject.getModelElements().add(tournament); final Player player = ProjectChangeUtil.createPlayer("Heinrich"); final Player player2 = ProjectChangeUtil.createPlayer("Walter"); final Player player3 = ProjectChangeUtil.createPlayer("Wilhelm"); tournament.getPlayers().add(player); tournament.getPlayers().add(player2); tournament.getPlayers().add(player3); assertEquals(3, tournament.getPlayers().size()); tournament.getPlayers().remove(player2); assertEquals(2, tournament.getPlayers().size()); assertTrue(localProject.contains(player)); assertTrue(localProject.contains(player3)); // TODO: enable this assert once the operation recorder works without commands too // assertFalse(localProject.contains(player2)); } @Test public void testMultiReferenceRevertWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournament = ProjectChangeUtil.createTournament(true); final int numTrophies = 40; new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournament); } }.run(false); new EMFStoreCommand() { @Override protected void doRun() { for (int i = 0; i < numTrophies; i++) tournament.getReceivesTrophy().add(false); } }.run(false); assertEquals(numTrophies, tournament.getReceivesTrophy().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.revert(); } }.run(false); assertEquals(0, tournament.getReceivesTrophy().size()); } @Test public void testMultiReferenceDeleteRevertWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournament = ProjectChangeUtil.createTournament(true); final int numTrophies = 40; final int numDeletes = 10; new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournament); } }.run(false); new EMFStoreCommand() { @Override protected void doRun() { for (int i = 0; i < numTrophies; i++) tournament.getReceivesTrophy().add(false); } }.run(false); assertEquals(numTrophies, tournament.getReceivesTrophy().size()); new EMFStoreCommand() { @Override protected void doRun() { for (int i = 0; i < numDeletes; i++) tournament.getReceivesTrophy().remove(i); } }.run(false); assertEquals(numTrophies - numDeletes, tournament.getReceivesTrophy().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.revert(); } }.run(false); assertEquals(0, tournament.getReceivesTrophy().size()); } @Test public void testMultiReferenceRemoveRevertWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournament = ProjectChangeUtil.createTournament(true); final int numTrophies = 40; final int numDeletes = 10; new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournament); } }.run(false); new EMFStoreCommand() { @Override protected void doRun() { for (int i = 0; i < numTrophies; i++) tournament.getReceivesTrophy().add(false); } }.run(false); assertEquals(numTrophies, tournament.getReceivesTrophy().size()); EditingDomain domain = AdapterFactoryEditingDomain.getEditingDomainFor(tournament); for (int i = 0; i < numDeletes; i++) domain.getCommandStack().execute( RemoveCommand.create(domain, tournament, BowlingPackage.eINSTANCE.getTournament_ReceivesTrophy(), Boolean.FALSE)); assertEquals(numTrophies - numDeletes, tournament.getReceivesTrophy().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.revert(); } }.run(false); assertEquals(0, tournament.getReceivesTrophy().size()); } @Test public void testMultiReferenceMoveRevertWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournament = ProjectChangeUtil.createTournament(true); final int numTrophies = 40; final int numDeletes = 10; new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournament); } }.run(false); new EMFStoreCommand() { @Override protected void doRun() { for (int i = 0; i < numTrophies; i++) tournament.getReceivesTrophy().add(Boolean.FALSE); } }.run(false); assertEquals(numTrophies, tournament.getReceivesTrophy().size()); EditingDomain domain = AdapterFactoryEditingDomain.getEditingDomainFor(tournament); for (int i = 10; i < numDeletes; i++) domain.getCommandStack().execute( MoveCommand.create(domain, tournament, BowlingPackage.eINSTANCE.getTournament_ReceivesTrophy(), Boolean.FALSE, i - 1)); assertEquals(numTrophies, tournament.getReceivesTrophy().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.revert(); } }.run(false); assertEquals(0, tournament.getReceivesTrophy().size()); } @Test public void testUndoAddOperation() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournamentA = ProjectChangeUtil.createTournament(true); final Tournament tournamentB = ProjectChangeUtil.createTournament(true); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournamentA); localProject.getModelElements().add(tournamentB); } }.run(false); final Matchup matchupA = ProjectChangeUtil.createMatchup(null, null); new EMFStoreCommand() { @Override protected void doRun() { tournamentA.getMatchups().add(matchupA); } }.run(false); assertEquals(1, tournamentA.getMatchups().size()); assertTrue(tournamentA.getMatchups().contains(matchupA)); new EMFStoreCommand() { @Override protected void doRun() { tournamentB.getMatchups().add(matchupA); } }.run(false); assertEquals(1, tournamentB.getMatchups().size()); assertTrue(tournamentB.getMatchups().contains(matchupA)); assertEquals(0, tournamentA.getMatchups().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.undoLastOperation(); } }.run(false); assertEquals(0, tournamentB.getMatchups().size()); assertEquals(1, tournamentA.getMatchups().size()); assertTrue(tournamentA.getMatchups().contains(matchupA)); } @Test public void testUndoMoveOperation() { final ESLocalProject localProject = ESWorkspaceProvider.INSTANCE.getWorkspace().createLocalProject( "SimpleEmptyProject"); final Tournament tournamentA = ProjectChangeUtil.createTournament(true); final Tournament tournamentB = ProjectChangeUtil.createTournament(true); final Matchup matchupA = ProjectChangeUtil.createMatchup(null, null); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournamentA); } }.run(false); new EMFStoreCommand() { @Override protected void doRun() { tournamentA.getMatchups().add(matchupA); } }.run(false); ESModelElementId matchupID = localProject.getModelElementId(matchupA); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournamentB); } }.run(false); assertEquals(1, tournamentA.getMatchups().size()); assertTrue(tournamentA.getMatchups().contains(matchupA)); new EMFStoreCommand() { @Override protected void doRun() { tournamentA.getMatchups().remove(matchupA); } }.run(false); assertEquals(2, localProject.getModelElements().size()); new EMFStoreCommand() { @Override protected void doRun() { tournamentB.getMatchups().add(matchupA); } }.run(false); assertEquals(1, tournamentB.getMatchups().size()); assertTrue(tournamentB.getMatchups().contains(matchupA)); assertEquals(0, tournamentA.getMatchups().size()); assertEquals(2, localProject.getModelElements().size()); // undos move from root to container new EMFStoreCommand() { @Override protected void doRun() { localProject.undoLastOperation(); } }.run(false); assertEquals(0, tournamentB.getMatchups().size()); assertEquals(0, tournamentA.getMatchups().size()); assertEquals(3, localProject.getModelElements().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.undoLastOperation(); } }.run(false); assertEquals(0, tournamentA.getMatchups().size()); assertEquals(2, localProject.getModelElements().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.undoLastOperation(); } }.run(false); assertEquals(1, tournamentA.getMatchups().size()); assertEquals(2, localProject.getModelElements().size()); assertEquals(matchupID, localProject.getModelElementId(tournamentA.getMatchups().get(0))); } }
org.eclipse.emf.emfstore.client.test/src/org/eclipse/emf/emfstore/client/test/api/ModelElementTest.java
/******************************************************************************* * Copyright 2011 Chair for Applied Software Engineering, * Technische Universitaet Muenchen. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: ******************************************************************************/ package org.eclipse.emf.emfstore.client.test.api; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.util.ConcurrentModificationException; import java.util.Set; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.edit.command.MoveCommand; import org.eclipse.emf.edit.command.RemoveCommand; import org.eclipse.emf.edit.domain.AdapterFactoryEditingDomain; import org.eclipse.emf.edit.domain.EditingDomain; import org.eclipse.emf.emfstore.bowling.BowlingPackage; import org.eclipse.emf.emfstore.bowling.Game; import org.eclipse.emf.emfstore.bowling.League; import org.eclipse.emf.emfstore.bowling.Matchup; import org.eclipse.emf.emfstore.bowling.Player; import org.eclipse.emf.emfstore.bowling.Tournament; import org.eclipse.emf.emfstore.client.ESLocalProject; import org.eclipse.emf.emfstore.client.ESWorkspaceProvider; import org.eclipse.emf.emfstore.common.model.ESModelElementId; import org.eclipse.emf.emfstore.internal.client.model.util.EMFStoreCommand; import org.junit.Test; /** * ModelElementTest. * * @author Tobias Verhoeven */ public class ModelElementTest { /** * Tests adding model elements. */ @Test public void testAddModelElementsWithoutCommands() { ESLocalProject localProject = ESWorkspaceProvider.INSTANCE.getWorkspace().createLocalProject("Testprojekt"); League leagueA = ProjectChangeUtil.createLeague("America"); League leagueB = ProjectChangeUtil.createLeague("Europe"); localProject.getModelElements().add(leagueA); localProject.getModelElements().add(leagueB); assertEquals(2, localProject.getAllModelElements().size()); Player playerA = ProjectChangeUtil.createPlayer("Hans"); Player playerB = ProjectChangeUtil.createPlayer("Anton"); leagueA.getPlayers().add(playerA); leagueA.getPlayers().add(playerB); assertEquals(4, localProject.getAllModelElements().size()); Player playerC = ProjectChangeUtil.createPlayer("Paul"); Player playerD = ProjectChangeUtil.createPlayer("Klaus"); leagueA.getPlayers().add(playerC); leagueA.getPlayers().add(playerD); assertEquals(6, localProject.getAllModelElements().size()); assertEquals(2, localProject.getModelElements().size()); Tournament tournamentA = ProjectChangeUtil.createTournament(false); localProject.getModelElements().add(tournamentA); Matchup matchupA = ProjectChangeUtil.createMatchup(null, null); Matchup matchupB = ProjectChangeUtil.createMatchup(null, null); Game gameA = ProjectChangeUtil.createGame(playerA); Game gameB = ProjectChangeUtil.createGame(playerB); Game gameC = ProjectChangeUtil.createGame(playerC); Game gameD = ProjectChangeUtil.createGame(playerD); tournamentA.getMatchups().add(matchupA); matchupA.getGames().add(gameA); matchupA.getGames().add(gameB); assertEquals(10, localProject.getAllModelElements().size()); tournamentA.getMatchups().add(matchupB); matchupB.getGames().add(gameC); matchupB.getGames().add(gameD); assertEquals(13, localProject.getAllModelElements().size()); assertEquals(3, localProject.getModelElements().size()); } /** * Tests adding model elements. */ @Test public void testAddModelElementsWithCommands() { final ESLocalProject localProject = ESWorkspaceProvider.INSTANCE.getWorkspace().createLocalProject( "Testprojekt"); final League leagueA = ProjectChangeUtil.createLeague("America"); final League leagueB = ProjectChangeUtil.createLeague("Europe"); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(leagueA); localProject.getModelElements().add(leagueB); } }.execute(); assertEquals(2, localProject.getAllModelElements().size()); final Player playerA = ProjectChangeUtil.createPlayer("Hans"); final Player playerB = ProjectChangeUtil.createPlayer("Anton"); new EMFStoreCommand() { @Override protected void doRun() { leagueA.getPlayers().add(playerA); leagueA.getPlayers().add(playerB); } }.execute(); assertEquals(4, localProject.getAllModelElements().size()); final Player playerC = ProjectChangeUtil.createPlayer("Paul"); final Player playerD = ProjectChangeUtil.createPlayer("Klaus"); new EMFStoreCommand() { @Override protected void doRun() { leagueA.getPlayers().add(playerC); leagueA.getPlayers().add(playerD); } }.execute(); assertEquals(6, localProject.getAllModelElements().size()); assertEquals(2, localProject.getModelElements().size()); final Tournament tournamentA = ProjectChangeUtil.createTournament(false); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournamentA); } }.execute(); final Matchup matchupA = ProjectChangeUtil.createMatchup(null, null); final Matchup matchupB = ProjectChangeUtil.createMatchup(null, null); final Game gameA = ProjectChangeUtil.createGame(playerA); final Game gameB = ProjectChangeUtil.createGame(playerB); final Game gameC = ProjectChangeUtil.createGame(playerC); final Game gameD = ProjectChangeUtil.createGame(playerD); new EMFStoreCommand() { @Override protected void doRun() { tournamentA.getMatchups().add(matchupA); matchupA.getGames().add(gameA); matchupA.getGames().add(gameB); } }.execute(); assertEquals(10, localProject.getAllModelElements().size()); new EMFStoreCommand() { @Override protected void doRun() { tournamentA.getMatchups().add(matchupB); matchupB.getGames().add(gameC); matchupB.getGames().add(gameD); } }.execute(); assertEquals(13, localProject.getAllModelElements().size()); assertEquals(3, localProject.getModelElements().size()); } @Test(expected = ConcurrentModificationException.class) public void testDeleteAllModelElementsWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); new EMFStoreCommand() { @Override protected void doRun() { Set<EObject> elements = localProject.getAllModelElements(); for (EObject object : elements) { localProject.getModelElements().remove(object); } } }.execute(); assertEquals(0, localProject.getAllModelElements().size()); } @Test(expected = ConcurrentModificationException.class) public void testDeleteAllModelElementsWithoutCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); Set<EObject> elements = localProject.getAllModelElements(); for (EObject object : elements) { localProject.getModelElements().remove(object); } assertEquals(0, localProject.getAllModelElements().size()); } /** * adds and deletes model element and undos the deletion. */ @Test public void testDeleteUndoWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Player player = ProjectChangeUtil.createPlayer("Heinrich"); final int SIZE = localProject.getAllModelElements().size(); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(player); } }.run(false); assertTrue(localProject.getAllModelElements().contains(player)); assertTrue(localProject.contains(player)); ESModelElementId id = localProject.getModelElementId(player); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().remove(player); } }.run(false); assertEquals(SIZE, localProject.getAllModelElements().size()); assertFalse(localProject.getAllModelElements().contains(player)); assertFalse(localProject.contains(player)); assertNull(localProject.getModelElement(id)); new EMFStoreCommand() { @Override protected void doRun() { localProject.undoLastOperation(); } }.run(false); assertEquals(SIZE + 1, localProject.getAllModelElements().size()); assertFalse(localProject.getAllModelElements().contains(player)); assertFalse(localProject.contains(player)); assertNotNull(localProject.getModelElement(id)); } /** * adds and deletes model element and undos the deletion. */ @Test public void testDeleteUndoWithoutCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Player player = ProjectChangeUtil.createPlayer("Heinrich"); final int SIZE = localProject.getAllModelElements().size(); localProject.getModelElements().add(player); assertTrue(localProject.getAllModelElements().contains(player)); assertTrue(localProject.contains(player)); ESModelElementId id = localProject.getModelElementId(player); localProject.getModelElements().remove(player); assertEquals(SIZE, localProject.getAllModelElements().size()); assertFalse(localProject.getAllModelElements().contains(player)); assertFalse(localProject.contains(player)); localProject.undoLastOperation(); assertEquals(SIZE + 1, localProject.getAllModelElements().size()); assertFalse(localProject.getAllModelElements().contains(player)); assertFalse(localProject.contains(player)); assertNotNull(localProject.getModelElement(id)); } @Test public void testReferenceDeletionWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournament = ProjectChangeUtil.createTournament(true); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournament); } }.run(false); final Player player = ProjectChangeUtil.createPlayer("Heinrich"); final Player player2 = ProjectChangeUtil.createPlayer("Walter"); final Player player3 = ProjectChangeUtil.createPlayer("Wilhelm"); new EMFStoreCommand() { @Override protected void doRun() { tournament.getPlayers().add(player); tournament.getPlayers().add(player2); tournament.getPlayers().add(player3); } }.run(false); assertEquals(3, tournament.getPlayers().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().remove(player2); } }.execute(); assertEquals(2, tournament.getPlayers().size()); assertTrue(localProject.contains(player)); assertTrue(localProject.contains(player3)); assertFalse(localProject.contains(player2)); } @Test public void testReferenceDeletionWithoutCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournament = ProjectChangeUtil.createTournament(true); localProject.getModelElements().add(tournament); final Player player = ProjectChangeUtil.createPlayer("Heinrich"); final Player player2 = ProjectChangeUtil.createPlayer("Walter"); final Player player3 = ProjectChangeUtil.createPlayer("Wilhelm"); tournament.getPlayers().add(player); tournament.getPlayers().add(player2); tournament.getPlayers().add(player3); assertEquals(3, tournament.getPlayers().size()); tournament.getPlayers().remove(player2); assertEquals(2, tournament.getPlayers().size()); assertTrue(localProject.contains(player)); assertTrue(localProject.contains(player3)); // TODO: enable this assert once the operation recorder works without commands too // assertFalse(localProject.contains(player2)); } @Test public void testMultiReferenceRevertWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournament = ProjectChangeUtil.createTournament(true); final int numTrophies = 40; new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournament); } }.run(false); new EMFStoreCommand() { @Override protected void doRun() { for (int i = 0; i < numTrophies; i++) tournament.getReceivesTrophy().add(false); } }.run(false); assertEquals(numTrophies, tournament.getReceivesTrophy().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.revert(); } }.run(false); assertEquals(0, tournament.getReceivesTrophy().size()); } @Test public void testMultiReferenceDeleteRevertWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournament = ProjectChangeUtil.createTournament(true); final int numTrophies = 40; final int numDeletes = 10; new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournament); } }.run(false); new EMFStoreCommand() { @Override protected void doRun() { for (int i = 0; i < numTrophies; i++) tournament.getReceivesTrophy().add(false); } }.run(false); assertEquals(numTrophies, tournament.getReceivesTrophy().size()); new EMFStoreCommand() { @Override protected void doRun() { for (int i = 0; i < numDeletes; i++) tournament.getReceivesTrophy().remove(i); } }.run(false); assertEquals(numTrophies - numDeletes, tournament.getReceivesTrophy().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.revert(); } }.run(false); assertEquals(0, tournament.getReceivesTrophy().size()); } @Test public void testMultiReferenceRemoveRevertWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournament = ProjectChangeUtil.createTournament(true); final int numTrophies = 40; final int numDeletes = 10; new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournament); } }.run(false); new EMFStoreCommand() { @Override protected void doRun() { for (int i = 0; i < numTrophies; i++) tournament.getReceivesTrophy().add(false); } }.run(false); assertEquals(numTrophies, tournament.getReceivesTrophy().size()); EditingDomain domain = AdapterFactoryEditingDomain.getEditingDomainFor(tournament); for (int i = 0; i < numDeletes; i++) domain.getCommandStack().execute( RemoveCommand.create(domain, tournament, BowlingPackage.eINSTANCE.getTournament_ReceivesTrophy(), Boolean.FALSE)); assertEquals(numTrophies - numDeletes, tournament.getReceivesTrophy().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.revert(); } }.run(false); assertEquals(0, tournament.getReceivesTrophy().size()); } @Test public void testMultiReferenceMoveRevertWithCommand() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournament = ProjectChangeUtil.createTournament(true); final int numTrophies = 40; final int numDeletes = 10; new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournament); } }.run(false); new EMFStoreCommand() { @Override protected void doRun() { for (int i = 0; i < numTrophies; i++) tournament.getReceivesTrophy().add(Boolean.FALSE); } }.run(false); assertEquals(numTrophies, tournament.getReceivesTrophy().size()); EditingDomain domain = AdapterFactoryEditingDomain.getEditingDomainFor(tournament); for (int i = 10; i < numDeletes; i++) domain.getCommandStack().execute( MoveCommand.create(domain, tournament, BowlingPackage.eINSTANCE.getTournament_ReceivesTrophy(), Boolean.FALSE, i - 1)); assertEquals(numTrophies, tournament.getReceivesTrophy().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.revert(); } }.run(false); assertEquals(0, tournament.getReceivesTrophy().size()); } @Test public void testUndoAddOperation() { final ESLocalProject localProject = ProjectChangeUtil.createBasicBowlingProject(); final Tournament tournamentA = ProjectChangeUtil.createTournament(true); final Tournament tournamentB = ProjectChangeUtil.createTournament(true); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournamentA); localProject.getModelElements().add(tournamentB); } }.run(false); final Matchup matchupA = ProjectChangeUtil.createMatchup(null, null); new EMFStoreCommand() { @Override protected void doRun() { tournamentA.getMatchups().add(matchupA); } }.run(false); assertEquals(1, tournamentA.getMatchups().size()); assertTrue(tournamentA.getMatchups().contains(matchupA)); new EMFStoreCommand() { @Override protected void doRun() { tournamentB.getMatchups().add(matchupA); } }.run(false); assertEquals(1, tournamentB.getMatchups().size()); assertTrue(tournamentB.getMatchups().contains(matchupA)); assertEquals(0, tournamentA.getMatchups().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.undoLastOperation(); } }.run(false); assertEquals(0, tournamentB.getMatchups().size()); assertEquals(1, tournamentA.getMatchups().size()); assertTrue(tournamentA.getMatchups().contains(matchupA)); } @Test public void testUndoMoveOperation() { final ESLocalProject localProject = ESWorkspaceProvider.INSTANCE.getWorkspace().createLocalProject( "SimpleEmptyProject", ""); final Tournament tournamentA = ProjectChangeUtil.createTournament(true); final Tournament tournamentB = ProjectChangeUtil.createTournament(true); final Matchup matchupA = ProjectChangeUtil.createMatchup(null, null); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournamentA); } }.run(false); new EMFStoreCommand() { @Override protected void doRun() { tournamentA.getMatchups().add(matchupA); } }.run(false); ESModelElementId matchupID = localProject.getModelElementId(matchupA); new EMFStoreCommand() { @Override protected void doRun() { localProject.getModelElements().add(tournamentB); } }.run(false); assertEquals(1, tournamentA.getMatchups().size()); assertTrue(tournamentA.getMatchups().contains(matchupA)); new EMFStoreCommand() { @Override protected void doRun() { tournamentA.getMatchups().remove(matchupA); } }.run(false); assertEquals(2, localProject.getModelElements().size()); new EMFStoreCommand() { @Override protected void doRun() { tournamentB.getMatchups().add(matchupA); } }.run(false); assertEquals(1, tournamentB.getMatchups().size()); assertTrue(tournamentB.getMatchups().contains(matchupA)); assertEquals(0, tournamentA.getMatchups().size()); assertEquals(2, localProject.getModelElements().size()); // undos move from root to container new EMFStoreCommand() { @Override protected void doRun() { localProject.undoLastOperation(); } }.run(false); assertEquals(0, tournamentB.getMatchups().size()); assertEquals(0, tournamentA.getMatchups().size()); assertEquals(3, localProject.getModelElements().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.undoLastOperation(); } }.run(false); assertEquals(0, tournamentA.getMatchups().size()); assertEquals(2, localProject.getModelElements().size()); new EMFStoreCommand() { @Override protected void doRun() { localProject.undoLastOperation(); } }.run(false); assertEquals(1, tournamentA.getMatchups().size()); assertEquals(2, localProject.getModelElements().size()); assertEquals(matchupID, localProject.getModelElementId(tournamentA.getMatchups().get(0))); } }
fixed createLocalProject call in ModelElementTest
org.eclipse.emf.emfstore.client.test/src/org/eclipse/emf/emfstore/client/test/api/ModelElementTest.java
fixed createLocalProject call in ModelElementTest
Java
mpl-2.0
439800c40479051dd550e9da18c56cb91a9db179
0
Pixida/logtest
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Copyright (c) 2016 Pixida GmbH */ package de.pixida.logtest.buildserver; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.time.StopWatch; import org.apache.commons.lang3.tuple.Pair; import org.apache.log4j.Level; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import de.pixida.logtest.automatondefinitions.JsonAutomatonDefinition; import de.pixida.logtest.logreaders.GenericLogReader; import de.pixida.logtest.logreaders.ILogReader; import de.pixida.logtest.processing.EvaluationResult; import de.pixida.logtest.processing.Job; import de.pixida.logtest.processing.JobExecutor; import de.pixida.logtest.processing.LogSink; import de.pixida.logtest.reporting.ConsoleSummaryReportGenerator; import de.pixida.logtest.reporting.JUnitStyleXmlReportGenerator; import de.pixida.logtest.reporting.ReportsGenerator; public class RunIntegrationTests { private static final String AUTOMATON_DIRECTORY_SWITCH = "automatonDirectory"; private static final String TRACE_LOG_DIRECTORY_SWITCH = "traceLogDirectory"; private static final String VERBOSITY_SWITCH = "verbose"; private static final String REPORT_SWITCH = "reportFile"; private static final String LOG_READER_CONFIG_SWITCH = "logReaderConfig"; private static final String HELP_SWITCH = "help"; private static final Logger LOG = LoggerFactory.getLogger(RunIntegrationTests.class); static class ExitWithFailureException extends RuntimeException { private static final long serialVersionUID = 1L; ExitWithFailureException() { // Empty constructor needed by checkstyle } int getExitCode() { return 1; } } private boolean verbose = false; private File jUnitReportTarget = null; private Map<File, List<Pair<File, Map<String, String>>>> configuredExecutions; private List<List<EvaluationResult>> results; private List<Job> jobs; private final StopWatch stopWatch = new StopWatch(); private List<Long> jobExecutionTimesMs; private JSONObject logReaderConfigFromCommandLine; public RunIntegrationTests() { // Empty constructor needed by checkstyle } public static void main(final String[] args) { try { final RunIntegrationTests runner = new RunIntegrationTests(); final boolean run = runner.parseCommandLine(args); if (run) { runner.createAndRunJobs(); runner.printResults(); } } catch (final ExitWithFailureException ee) { LOG.debug("Finished with exit code: " + ee.getExitCode()); System.exit(ee.getExitCode()); } catch (final Exception re) { LOG.debug("Abording with errors", re); System.exit(1); // Exit with error } } boolean parseCommandLine(final String[] args) { final Options options = createOptions(); final CommandLineParser parser = new DefaultParser(); try { final CommandLine params = parser.parse(options, args); if (params.hasOption(HELP_SWITCH)) { printHelp(options); return false; } this.applyVerbositySwitch(params); this.configuredExecutions = groupAutomatonsByTraceFile(params); final String param = params.getOptionValue(REPORT_SWITCH); try { if (params.hasOption(LOG_READER_CONFIG_SWITCH)) { this.logReaderConfigFromCommandLine = new JSONObject(params.getOptionValue(LOG_READER_CONFIG_SWITCH)); } } catch (final JSONException jsonEx) { throw new ParseException("Failed to parse log reader configuration JSON data: " + jsonEx.getMessage()); } if (param != null) { this.jUnitReportTarget = new File(param); } } catch (final ParseException e) { System.err.println(e.getMessage()); System.err.println(); printHelp(options); // Abort with failure - build server job must not succeed if the calling convention is erroneous throw new ExitWithFailureException(); } return true; } boolean getIsVerbose() { return this.verbose; } void createAndRunJobs() { this.jobs = this.createJobs(this.configuredExecutions); LOG.info("Starting integration tests"); this.stopWatch.start(); final JobExecutor executor = new JobExecutor(this.jobs); this.results = executor.getResults(); this.jobExecutionTimesMs = executor.getJobExecutionTimesMs(); this.stopWatch.stop(); LOG.info("Integration tests finished"); } void printResults() { final ReportsGenerator reportsGenerator = new ReportsGenerator(); reportsGenerator.setJobs(this.jobs); reportsGenerator.setResults(this.results); reportsGenerator.addReportGenerator(new ConsoleSummaryReportGenerator(this.stopWatch.getTime())); reportsGenerator.setJobExecutionTimes(this.jobExecutionTimesMs); if (this.jUnitReportTarget != null) { reportsGenerator.addReportGenerator(new JUnitStyleXmlReportGenerator(this.jUnitReportTarget, this.stopWatch.getTime())); } reportsGenerator.generateReports(); final long numFailedExecutions = this.results.stream().mapToLong(result -> result.stream().filter(er -> !er.isSuccess()).count()) .sum(); if (numFailedExecutions == 1) { throw new ExitWithFailureException(); } } private List<Job> createJobs(final Map<File, List<Pair<File, Map<String, String>>>> pairsWithParams) { final List<Job> result = new ArrayList<>(); for (final Entry<File, List<Pair<File, Map<String, String>>>> pair : pairsWithParams.entrySet()) { final List<LogSink> sinks = new ArrayList<>(); for (final Pair<File, Map<String, String>> sinkDef : pair.getValue()) { final LogSink newSink = new LogSink(); newSink.setAutomaton(new JsonAutomatonDefinition(sinkDef.getLeft())); newSink.setParameters(sinkDef.getRight()); sinks.add(newSink); } final Job newJob = new Job(); newJob.setLogReader(this.createAndConfigureLogReader(pair.getKey())); newJob.setSinks(sinks); result.add(newJob); } return result; } private void applyVerbositySwitch(final CommandLine params) { if (params.hasOption(VERBOSITY_SWITCH)) { if (org.apache.log4j.Logger.getRootLogger().getLevel().isGreaterOrEqual(Level.DEBUG)) // Don't turn TRACE into DEBUG { org.apache.log4j.Logger.getRootLogger().setLevel(Level.DEBUG); this.verbose = true; LOG.debug("Verbose mode enabled"); } } } private static Options createOptions() { final Options options = new Options(); final Option traceLogDirectory = Option.builder("t") .longOpt(TRACE_LOG_DIRECTORY_SWITCH) .desc("Trace logs location") .hasArg() .argName("folder") .build(); final Option automatonDirectory = Option.builder("a") .longOpt(AUTOMATON_DIRECTORY_SWITCH) .desc("Automatons location") .hasArg() .argName("folder") .build(); final Option reportFile = Option.builder("r") .longOpt(REPORT_SWITCH) .desc("Generated JUnit report XML file target location") .hasArg() .argName("file") .build(); final Option verbosity = Option.builder("v") .longOpt(VERBOSITY_SWITCH) .desc("Enable debug output") .build(); final Option logReaderConfigSwitch = Option.builder("lrcfg") .longOpt(LOG_READER_CONFIG_SWITCH) .desc("Log reader configuration (JSON)") .hasArg() .argName("json-object") .build(); final Option helpSwitch = Option.builder("h") .longOpt(HELP_SWITCH) .desc("Show (this) help") .build(); options.addOption(traceLogDirectory); options.addOption(automatonDirectory); options.addOption(logReaderConfigSwitch); options.addOption(reportFile); options.addOption(verbosity); options.addOption(helpSwitch); return options; } private ILogReader createAndConfigureLogReader(final File logFile) { final GenericLogReader logReader = new GenericLogReader(logFile); if (this.logReaderConfigFromCommandLine != null) { logReader.overwriteCurrentSettingsWithSettingsInConfigurationFile(this.logReaderConfigFromCommandLine); } else { // Define default settings here for now logReader.setHeadlinePattern("^(.*?([0-9]+))"); logReader.setHeadlinePatternIndexOfTimestamp(1 + 1); } return logReader; } private static Map<File, List<Pair<File, Map<String, String>>>> groupAutomatonsByTraceFile(final CommandLine params) throws ParseException { final File logFolder = new File(commandLineParamOrCurrentDirectory(params, TRACE_LOG_DIRECTORY_SWITCH)); final File automatonsFolder = new File(commandLineParamOrCurrentDirectory(params, AUTOMATON_DIRECTORY_SWITCH)); LOG.debug("Using log folder: {}", logFolder.getAbsolutePath()); LOG.debug("Using automatons folder: {}", automatonsFolder.getAbsolutePath()); final Map<File, List<Pair<File, Map<String, String>>>> result = new HashMap<>(); for (final String arg : params.getArgList()) { final int numComponentsLogFileAndAutomaton = 2; final int numComponentsLogFileAndAutomatonAndParameter = 3; final String[] components = arg.split(":", numComponentsLogFileAndAutomatonAndParameter); if (components.length < numComponentsLogFileAndAutomaton || components.length > numComponentsLogFileAndAutomatonAndParameter) { throw new ParseException( "Invalid execution entry on command line. Format must be <logfile>:<automaton>[:<parameters>]: " + arg); } final File traceLog = new File(logFolder, components[0]); List<Pair<File, Map<String, String>>> automatons = result.get(traceLog); if (automatons == null) { automatons = new ArrayList<>(); result.put(traceLog, automatons); } Map<String, String> parameters = null; if (components.length >= numComponentsLogFileAndAutomatonAndParameter) { parameters = parseAutomatonParameters(components[numComponentsLogFileAndAutomatonAndParameter - 1]); } if (parameters == null) { parameters = new HashMap<>(); } automatons.add(Pair.of(new File(automatonsFolder, components[1]), parameters)); } return result; } private static String commandLineParamOrCurrentDirectory(final CommandLine params, final String paramName) { return params.hasOption(paramName) ? params.getOptionValue(paramName) : "."; } private static Map<String, String> parseAutomatonParameters(final String string) throws ParseException { final Map<String, String> result = new HashMap<>(); // Separate by ',' final String[] params = string.split(","); // Separate by '=' for (final String param : params) { final String[] kv = param.split("="); final int kvLen = 2; if (kv.length != kvLen) { throw new ParseException("A parameter entry must be a key=value pair."); } result.put(kv[0], kv[1]); } return result; } private static void printHelp(final Options options) { final HelpFormatter formatter = new HelpFormatter(); final int assumedConsoleWidth = 150; formatter.setWidth(assumedConsoleWidth); formatter.printHelp("java -jar logtest-buildserver-app.jar [OPTIONS]... [EXECUTIONS]...\n" + "An EXECUTION is a triple <scenario-filename>:<automaton-filename>[:<parameters, comma separated key=value pairs...>,<...>]" + " e.g. runFor120Minutes-trace.txt:checkEverythingShutsDownProperly.json:size=50,stop=yes", options); } Map<File, List<Pair<File, Map<String, String>>>> getConfiguredExecutions() { return this.configuredExecutions; } }
logtest-buildserver-app/src/main/java/de/pixida/logtest/buildserver/RunIntegrationTests.java
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Copyright (c) 2016 Pixida GmbH */ package de.pixida.logtest.buildserver; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.time.StopWatch; import org.apache.commons.lang3.tuple.Pair; import org.apache.log4j.Level; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import de.pixida.logtest.automatondefinitions.JsonAutomatonDefinition; import de.pixida.logtest.logreaders.GenericLogReader; import de.pixida.logtest.logreaders.ILogReader; import de.pixida.logtest.processing.EvaluationResult; import de.pixida.logtest.processing.Job; import de.pixida.logtest.processing.JobExecutor; import de.pixida.logtest.processing.LogSink; import de.pixida.logtest.reporting.ConsoleSummaryReportGenerator; import de.pixida.logtest.reporting.JUnitStyleXmlReportGenerator; import de.pixida.logtest.reporting.ReportsGenerator; public class RunIntegrationTests { private static final String AUTOMATON_DIRECTORY_SWITCH = "automatonDirectory"; private static final String TRACE_LOG_DIRECTORY_SWITCH = "traceLogDirectory"; private static final String VERBOSITY_SWITCH = "verbose"; private static final String REPORT_SWITCH = "reportFile"; private static final String LOG_READER_CONFIG_SWITCH = "logReaderConfig"; private static final String HELP_SWITCH = "help"; private static final Logger LOG = LoggerFactory.getLogger(RunIntegrationTests.class); static class ExitWithFailureException extends RuntimeException { private static final long serialVersionUID = 1L; ExitWithFailureException() { // Empty constructor needed by checkstyle } int getExitCode() { return 1; } } private boolean verbose = false; private File jUnitReportTarget = null; private Map<File, List<Pair<File, Map<String, String>>>> configuredExecutions; private List<List<EvaluationResult>> results; private List<Job> jobs; private final StopWatch stopWatch = new StopWatch(); private List<Long> jobExecutionTimesMs; private JSONObject logReaderConfigFromCommandLine; public RunIntegrationTests() { // Empty constructor needed by checkstyle } public static void main(final String[] args) { try { final RunIntegrationTests runner = new RunIntegrationTests(); final boolean run = runner.parseCommandLine(args); if (run) { runner.createAndRunJobs(); runner.printResults(); } } catch (final ExitWithFailureException ee) { LOG.debug("Finished with exit code: " + ee.getExitCode()); System.exit(ee.getExitCode()); } catch (final Exception re) { LOG.debug("Abording with errors", re); System.exit(1); // Exit with error } } boolean parseCommandLine(final String[] args) { final Options options = createOptions(); final CommandLineParser parser = new DefaultParser(); try { final CommandLine params = parser.parse(options, args); if (params.hasOption(HELP_SWITCH)) { printHelp(options); return false; } this.applyVerbositySwitch(params); this.configuredExecutions = groupAutomatonsByTraceFile(params); final String param = params.getOptionValue(REPORT_SWITCH); try { if (params.hasOption(LOG_READER_CONFIG_SWITCH)) { this.logReaderConfigFromCommandLine = new JSONObject(params.getOptionValue(LOG_READER_CONFIG_SWITCH)); } } catch (final JSONException jsonEx) { throw new ParseException("Failed to parse log reader configuration JSON data: " + jsonEx.getMessage()); } if (param != null) { this.jUnitReportTarget = new File(param); } } catch (final ParseException e) { System.err.println(e.getMessage()); System.err.println(); printHelp(options); // Abort with failure - build server job must not succeed if the calling convention is erroneous throw new ExitWithFailureException(); } return true; } boolean getIsVerbose() { return this.verbose; } void createAndRunJobs() { this.jobs = this.createJobs(this.configuredExecutions); LOG.info("Starting integration tests"); this.stopWatch.start(); final JobExecutor executor = new JobExecutor(this.jobs); this.results = executor.getResults(); this.jobExecutionTimesMs = executor.getJobExecutionTimesMs(); this.stopWatch.stop(); LOG.info("Integration tests finished"); } void printResults() { final ReportsGenerator reportsGenerator = new ReportsGenerator(); reportsGenerator.setJobs(this.jobs); reportsGenerator.setResults(this.results); reportsGenerator.addReportGenerator(new ConsoleSummaryReportGenerator(this.stopWatch.getTime())); reportsGenerator.setJobExecutionTimes(this.jobExecutionTimesMs); if (this.jUnitReportTarget != null) { reportsGenerator.addReportGenerator(new JUnitStyleXmlReportGenerator(this.jUnitReportTarget, this.stopWatch.getTime())); } reportsGenerator.generateReports(); final long numFailedExecutions = this.results.stream().mapToLong(result -> result.stream().filter(er -> !er.isSuccess()).count()) .sum(); if (numFailedExecutions == 1) { throw new ExitWithFailureException(); } } private List<Job> createJobs(final Map<File, List<Pair<File, Map<String, String>>>> pairsWithParams) { final List<Job> result = new ArrayList<>(); for (final Entry<File, List<Pair<File, Map<String, String>>>> pair : pairsWithParams.entrySet()) { final List<LogSink> sinks = new ArrayList<>(); for (final Pair<File, Map<String, String>> sinkDef : pair.getValue()) { final LogSink newSink = new LogSink(); newSink.setAutomaton(new JsonAutomatonDefinition(sinkDef.getLeft())); newSink.setParameters(sinkDef.getRight()); sinks.add(newSink); } final Job newJob = new Job(); newJob.setLogReader(this.createAndConfigureLogReader(pair.getKey())); newJob.setSinks(sinks); result.add(newJob); } return result; } private void applyVerbositySwitch(final CommandLine params) { if (params.hasOption(VERBOSITY_SWITCH)) { if (org.apache.log4j.Logger.getRootLogger().getLevel().isGreaterOrEqual(Level.DEBUG)) // Don't turn TRACE into DEBUG { org.apache.log4j.Logger.getRootLogger().setLevel(Level.DEBUG); this.verbose = true; LOG.debug("Verbose mode enabled"); } } } private static Options createOptions() { final Options options = new Options(); final Option traceLogDirectory = Option.builder("t") .longOpt(TRACE_LOG_DIRECTORY_SWITCH) .desc("Trace logs location") .hasArg() .argName("folder") .build(); final Option automatonDirectory = Option.builder("a") .longOpt(AUTOMATON_DIRECTORY_SWITCH) .desc("Automatons location") .hasArg() .argName("folder") .build(); final Option reportFile = Option.builder("r") .longOpt(REPORT_SWITCH) .desc("Generated JUnit report XML file target location") .hasArg() .argName("file") .build(); final Option verbosity = Option.builder("v") .longOpt(VERBOSITY_SWITCH) .desc("Enable debug output") .build(); final Option logReaderConfigSwitch = Option.builder("lrcfg") .longOpt(LOG_READER_CONFIG_SWITCH) .desc("Log reader configuration (JSON)") .hasArg() .argName("json-object") .build(); final Option helpSwitch = Option.builder("h") .longOpt(HELP_SWITCH) .desc("Show (this) help") .build(); options.addOption(traceLogDirectory); options.addOption(automatonDirectory); options.addOption(logReaderConfigSwitch); options.addOption(reportFile); options.addOption(verbosity); options.addOption(helpSwitch); return options; } private ILogReader createAndConfigureLogReader(final File logFile) { final GenericLogReader logReader = new GenericLogReader(logFile); if (this.logReaderConfigFromCommandLine != null) { logReader.overwriteCurrentSettingsWithSettingsInConfigurationFile(this.logReaderConfigFromCommandLine); } else { // Define default settings here for now logReader.setHeadlinePattern("^(.*?([0-9]+))"); logReader.setHeadlinePatternIndexOfTimestamp(1 + 1); } return logReader; } private static Map<File, List<Pair<File, Map<String, String>>>> groupAutomatonsByTraceFile(final CommandLine params) throws ParseException { final File logFolder = new File(commandLineParamOrCurrentDirectory(params, TRACE_LOG_DIRECTORY_SWITCH)); final File automatonsFolder = new File(commandLineParamOrCurrentDirectory(params, AUTOMATON_DIRECTORY_SWITCH)); LOG.debug("Using log folder: {}", logFolder.getAbsolutePath()); LOG.debug("Using automatons folder: {}", automatonsFolder.getAbsolutePath()); final Map<File, List<Pair<File, Map<String, String>>>> result = new HashMap<>(); for (final String arg : params.getArgList()) { final int numComponentsLogFileAndAutomaton = 2; final int numComponentsLogFileAndAutomatonAndParameter = 3; final String[] components = arg.split(":", numComponentsLogFileAndAutomatonAndParameter); if (components.length < numComponentsLogFileAndAutomaton || components.length > numComponentsLogFileAndAutomatonAndParameter) { throw new ParseException( "Invalid execution entry on command line. Format must be <logfile>:<automaton>[:<parameters>]: " + arg); } final File traceLog = new File(logFolder, components[0]); List<Pair<File, Map<String, String>>> automatons = result.get(traceLog); if (automatons == null) { automatons = new ArrayList<>(); result.put(traceLog, automatons); } Map<String, String> parameters = null; if (components.length >= numComponentsLogFileAndAutomatonAndParameter) { parameters = parseAutomatonParameters(components[numComponentsLogFileAndAutomatonAndParameter - 1]); } if (parameters == null) { parameters = new HashMap<>(); } automatons.add(Pair.of(new File(automatonsFolder, components[1]), parameters)); } return result; } private static String commandLineParamOrCurrentDirectory(final CommandLine params, final String paramName) { return params.hasOption(paramName) ? params.getOptionValue(paramName) : "."; } private static Map<String, String> parseAutomatonParameters(final String string) throws ParseException { final Map<String, String> result = new HashMap<>(); // Separate by ',' final String[] params = string.split(","); // Separate by '=' for (final String param : params) { final String[] kv = param.split("="); final int kvLen = 2; if (kv.length != kvLen) { throw new ParseException("A parameter entry must be a key=value pair."); } result.put(kv[0], kv[1]); } return result; } private static void printHelp(final Options options) { final HelpFormatter formatter = new HelpFormatter(); final int assumedConsoleWidth = 150; formatter.setWidth(assumedConsoleWidth); formatter.printHelp("java -jar logtest-buildserver-app.jar [OPTIONS]... [EXECUTIONS]...\n" + "An EXECUTION is a triple <scenario-filename>:<automaton-filename>[:<comma separated key=value pairs...>,<...>] e.g. " + "runFor120Minutes-trace.txt:checkEverythingShutsDownProperly.json:size=50,stop=yes", options); } Map<File, List<Pair<File, Map<String, String>>>> getConfiguredExecutions() { return this.configuredExecutions; } }
Improved help message
logtest-buildserver-app/src/main/java/de/pixida/logtest/buildserver/RunIntegrationTests.java
Improved help message